summaryrefslogtreecommitdiff
path: root/subversion/tests
diff options
context:
space:
mode:
authorJames McCoy <jamessan@debian.org>2018-07-31 22:26:52 -0400
committerJames McCoy <jamessan@debian.org>2018-07-31 22:26:52 -0400
commite20a507113ff1126aeb4a97b806390ea377fe292 (patch)
tree0260b3a40387d7f994fbadaf22f1e9d3c080b09f /subversion/tests
parentc64debffb81d2fa17e9a72af7199ccf88b3cc556 (diff)
New upstream version 1.10.2
Diffstat (limited to 'subversion/tests')
-rw-r--r--subversion/tests/README283
-rw-r--r--subversion/tests/afl/README31
-rw-r--r--subversion/tests/afl/afl-x509-testcase/test1bin0 -> 785 bytes
-rw-r--r--subversion/tests/afl/afl-x509.c75
-rw-r--r--subversion/tests/cmdline/README534
-rw-r--r--subversion/tests/cmdline/atomic-ra-revprop-change.c213
-rwxr-xr-xsubversion/tests/cmdline/authz_tests.py1711
-rwxr-xr-xsubversion/tests/cmdline/autoprop_tests.py767
-rwxr-xr-xsubversion/tests/cmdline/basic_tests.py3281
-rwxr-xr-xsubversion/tests/cmdline/blame_tests.py1094
-rwxr-xr-xsubversion/tests/cmdline/cat_tests.py276
-rwxr-xr-xsubversion/tests/cmdline/changelist_tests.py1213
-rwxr-xr-xsubversion/tests/cmdline/checkout_tests.py1213
-rwxr-xr-xsubversion/tests/cmdline/commit_tests.py3234
-rwxr-xr-xsubversion/tests/cmdline/copy_tests.py5965
-rwxr-xr-xsubversion/tests/cmdline/dav-mirror-autocheck.sh500
-rwxr-xr-xsubversion/tests/cmdline/davautocheck.sh791
-rwxr-xr-xsubversion/tests/cmdline/depth_tests.py3038
-rwxr-xr-xsubversion/tests/cmdline/diff_tests.py5267
-rw-r--r--subversion/tests/cmdline/diff_tests_data/3449_spurious_v131
-rw-r--r--subversion/tests/cmdline/diff_tests_data/3449_spurious_v232
-rw-r--r--subversion/tests/cmdline/diff_tests_data/3449_spurious_v331
-rw-r--r--subversion/tests/cmdline/entries-dump.c408
-rwxr-xr-xsubversion/tests/cmdline/entries_tests.py271
-rwxr-xr-xsubversion/tests/cmdline/export_tests.py1168
-rwxr-xr-xsubversion/tests/cmdline/externals_tests.py4524
-rw-r--r--subversion/tests/cmdline/externals_tests_data/invalid_uris_in_repo.dump142
-rwxr-xr-xsubversion/tests/cmdline/getopt_tests.py260
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--help_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--help_stdout55
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stdout1
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout97
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn--version_stdout26
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help--version_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help--version_stdout8
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stderr2
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stdout0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout249
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_stderr0
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_help_stdout55
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_stderr1
-rw-r--r--subversion/tests/cmdline/getopt_tests_data/svn_stdout0
-rwxr-xr-xsubversion/tests/cmdline/history_tests.py217
-rwxr-xr-xsubversion/tests/cmdline/import_tests.py616
-rwxr-xr-xsubversion/tests/cmdline/info_tests.py778
-rwxr-xr-xsubversion/tests/cmdline/input_validation_tests.py333
-rwxr-xr-xsubversion/tests/cmdline/iprop_authz_tests.py130
-rwxr-xr-xsubversion/tests/cmdline/iprop_tests.py1692
-rwxr-xr-xsubversion/tests/cmdline/legacy/utf8_tests.py170
-rw-r--r--subversion/tests/cmdline/lock-helper.c75
-rwxr-xr-xsubversion/tests/cmdline/lock_tests.py2557
-rwxr-xr-xsubversion/tests/cmdline/log_tests.py2840
-rw-r--r--subversion/tests/cmdline/log_tests_data/merge_history_repo.pngbin0 -> 9885 bytes
-rw-r--r--subversion/tests/cmdline/log_tests_data/xml-invalid-chars.dump19
-rwxr-xr-xsubversion/tests/cmdline/merge_authz_tests.py918
-rwxr-xr-xsubversion/tests/cmdline/merge_automatic_tests.py1440
-rwxr-xr-xsubversion/tests/cmdline/merge_reintegrate_tests.py2893
-rwxr-xr-xsubversion/tests/cmdline/merge_tests.py18687
-rwxr-xr-xsubversion/tests/cmdline/merge_tree_conflict_tests.py2409
-rwxr-xr-xsubversion/tests/cmdline/mergeinfo_tests.py974
-rwxr-xr-xsubversion/tests/cmdline/mod_authz_svn_tests.py1069
-rw-r--r--subversion/tests/cmdline/mod_dav_svn_tests.py663
-rwxr-xr-xsubversion/tests/cmdline/move_tests.py1795
-rwxr-xr-xsubversion/tests/cmdline/patch_tests.py7884
-rwxr-xr-xsubversion/tests/cmdline/prop_tests.py2890
-rwxr-xr-xsubversion/tests/cmdline/redirect_tests.py283
-rwxr-xr-xsubversion/tests/cmdline/relocate_tests.py441
-rwxr-xr-xsubversion/tests/cmdline/resolve_tests.py690
-rwxr-xr-xsubversion/tests/cmdline/revert_tests.py1690
-rwxr-xr-xsubversion/tests/cmdline/schedule_tests.py755
-rwxr-xr-xsubversion/tests/cmdline/shelve_tests.py176
-rwxr-xr-xsubversion/tests/cmdline/special_tests.py1348
-rw-r--r--subversion/tests/cmdline/special_tests_data/bad-special-type.dump47
-rw-r--r--subversion/tests/cmdline/special_tests_data/symlink.dump58
-rwxr-xr-xsubversion/tests/cmdline/stat_tests.py2364
-rwxr-xr-xsubversion/tests/cmdline/svnadmin_tests.py3907
-rw-r--r--subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gzbin0 -> 7077 bytes
-rw-r--r--subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included.dump434
-rw-r--r--subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included_full.dump713
-rw-r--r--subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump259
-rw-r--r--subversion/tests/cmdline/svnadmin_tests_data/skeleton_repos.dump207
-rwxr-xr-xsubversion/tests/cmdline/svnauthz_tests.py926
-rwxr-xr-xsubversion/tests/cmdline/svndumpfilter_tests.py788
-rw-r--r--subversion/tests/cmdline/svndumpfilter_tests_data/empty_revisions.dump94
-rw-r--r--subversion/tests/cmdline/svndumpfilter_tests_data/greek_tree.dump248
-rw-r--r--subversion/tests/cmdline/svndumpfilter_tests_data/mergeinfo_included_partial.dump769
-rw-r--r--subversion/tests/cmdline/svndumpfilter_tests_data/simple_v3.dumpbin0 -> 1420 bytes
-rw-r--r--subversion/tests/cmdline/svndumpfilter_tests_data/with_merges.dump346
-rw-r--r--subversion/tests/cmdline/svneditor.bat26
-rwxr-xr-xsubversion/tests/cmdline/svneditor.py78
-rwxr-xr-xsubversion/tests/cmdline/svnfsfs_tests.py338
-rwxr-xr-xsubversion/tests/cmdline/svnlook_tests.py748
-rwxr-xr-xsubversion/tests/cmdline/svnmover_tests.py1711
-rwxr-xr-xsubversion/tests/cmdline/svnmucc_tests.py607
-rwxr-xr-xsubversion/tests/cmdline/svnrdump_tests.py1053
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/add-multi-prop.dumpbin0 -> 830 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-and-modify.dumpbin0 -> 1367 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.dump49
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.expected.dump50
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.dump141
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.expected.dumpbin0 -> 1832 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-from-previous-version-and-modify.dumpbin0 -> 2474 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/copy-parent-modify-prop.dumpbin0 -> 1169 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.dumpbin0 -> 6316 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.expected.dumpbin0 -> 7175 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/dir-prop-change.dump48
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dumpbin0 -> 1012 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dumpbin0 -> 1001 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/mergeinfo_included_full.dumpbin0 -> 12299 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/modified-in-place.dumpbin0 -> 1232 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/move-and-modify.dumpbin0 -> 3926 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/multi-prop-edits.dump298
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/no-author.dump38
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/partial_incremental.dumpbin0 -> 859 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/repo-with-copy-of-root-dir.dump39
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/revision-0.dump30
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/revprops.dumpbin0 -> 6172 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/root-range.expected.dumpbin0 -> 5278 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/skeleton.dumpbin0 -> 2582 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/sparse-propchanges.dumpbin0 -> 1576 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/tag-empty-trunk.dump75
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file.dumpbin0 -> 1721 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file2.dumpbin0 -> 1444 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.dumpbin0 -> 7597 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.expected.dumpbin0 -> 8367 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-A-range.expected.dumpbin0 -> 4770 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-only-range.expected.dumpbin0 -> 5186 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-only.dumpbin0 -> 6116 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/trunk-only.expected.dumpbin0 -> 5306 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/url-encoding-bug.dumpbin0 -> 1544 bytes
-rw-r--r--subversion/tests/cmdline/svnrdump_tests_data/with_merges.dumpbin0 -> 6111 bytes
-rwxr-xr-xsubversion/tests/cmdline/svnserveautocheck.sh159
-rwxr-xr-xsubversion/tests/cmdline/svnsync_authz_tests.py504
-rwxr-xr-xsubversion/tests/cmdline/svnsync_tests.py635
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-and-modify.dump78
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.dump47
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.expected.dump47
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.dump48
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.expected.dump49
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.dump141
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.expected.dump143
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version-and-modify.dump155
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version.dump181
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/copy-parent-modify-prop.dump91
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/delete-revprops.dump45
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/delete-revprops.expected.dump41
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/delete-svn-props.dump76
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.dump386
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.expected.dump411
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/dir-prop-change.dump47
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/file-dir-file.dump77
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/largemods.dump4932
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/mergeinfo-contains-r0.dump28
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/modified-in-place.dump76
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/no-author.dump38
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/repo-with-copy-of-root-dir.dump39
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/revprops.dump338
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/svnsync-move-and-modify.dump288
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.dump449
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.expected.dump525
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.dump316
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.expected.dump317
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/tag-empty-trunk.dump75
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-dir.dump84
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file.dump125
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file2.dump116
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/tag-with-modified-file.dump128
-rw-r--r--subversion/tests/cmdline/svnsync_tests_data/url-encoding-bug.dump107
-rw-r--r--subversion/tests/cmdline/svntest/__init__.py60
-rw-r--r--subversion/tests/cmdline/svntest/actions.py2355
-rw-r--r--subversion/tests/cmdline/svntest/deeptrees.py1197
-rw-r--r--subversion/tests/cmdline/svntest/err.py286
-rw-r--r--subversion/tests/cmdline/svntest/factory.py1919
-rw-r--r--subversion/tests/cmdline/svntest/main.py2531
-rwxr-xr-xsubversion/tests/cmdline/svntest/mergetrees.py507
-rw-r--r--subversion/tests/cmdline/svntest/objects.py337
-rw-r--r--subversion/tests/cmdline/svntest/sandbox.py614
-rw-r--r--subversion/tests/cmdline/svntest/testcase.py351
-rw-r--r--subversion/tests/cmdline/svntest/tree.py881
-rw-r--r--subversion/tests/cmdline/svntest/verify.py960
-rw-r--r--subversion/tests/cmdline/svntest/wc.py1205
-rwxr-xr-xsubversion/tests/cmdline/svnversion_tests.py402
-rwxr-xr-xsubversion/tests/cmdline/switch_tests.py2929
-rw-r--r--subversion/tests/cmdline/theta.binbin0 -> 1380 bytes
-rwxr-xr-xsubversion/tests/cmdline/trans_tests.py978
-rwxr-xr-xsubversion/tests/cmdline/tree_conflict_tests.py1544
-rw-r--r--subversion/tests/cmdline/tree_conflict_tests.txt161
-rwxr-xr-xsubversion/tests/cmdline/update_tests.py6956
-rw-r--r--subversion/tests/cmdline/update_tests_data/checkout_broken_eol.dump48
-rwxr-xr-xsubversion/tests/cmdline/upgrade_tests.py1559
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/README37
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/add_add_del_del_tc.tar.bz2bin0 -> 838 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/add_add_x2.tar.bz2bin0 -> 782 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/basic_upgrade.tar.bz2bin0 -> 1940 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/changelist_upgrade_1_6.tar.bz2bin0 -> 1821 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2bin0 -> 2341 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/depth_exclude.tar.bz2bin0 -> 636 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/depth_exclude_2.tar.bz2bin0 -> 527 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/dirs-only.tar.bz2bin0 -> 479 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot.tar.bz2bin0 -> 3347 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot1_6.tar.bz2bin0 -> 905 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root.tar.bz2bin0 -> 2711 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root1_6.tar.bz2bin0 -> 832 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/logs_left_1_5.tar.bz2bin0 -> 2484 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/missing-dirs.tar.bz2bin0 -> 1276 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/replaced-files.tar.bz2bin0 -> 1041 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/tree-replace1.tar.bz2bin0 -> 1492 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/tree-replace2.tar.bz2bin0 -> 1328 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0.tar.bz2bin0 -> 4241 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2bin0 -> 4512 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_1_5.tar.bz2bin0 -> 1933 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_1_7_dir_external.tar.bz2bin0 -> 3167 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_absent.tar.bz2bin0 -> 1496 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_absent_repos.tar.bz2bin0 -> 9597 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_file_externals.tar.bz2bin0 -> 2379 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_from_1_7_wc.tar.bz2bin0 -> 5942 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_locked.tar.bz2bin0 -> 855 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_missing_replaced.tar.bz2bin0 -> 1667 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_not_present_replaced.tar.bz2bin0 -> 1717 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_wcprops.tar.bz2bin0 -> 2210 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_with_externals.tar.bz2bin0 -> 3475 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/upgrade_with_scheduled_change.tar.bz2bin0 -> 1928 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.0.tar.bz2bin0 -> 3392 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.6.tar.bz2bin0 -> 3435 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.6.12.tar.bz2bin0 -> 3302 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/wc-delete.tar.bz2bin0 -> 814 bytes
-rw-r--r--subversion/tests/cmdline/upgrade_tests_data/wc-without-stat1.tar.bz2bin0 -> 5342 bytes
-rwxr-xr-xsubversion/tests/cmdline/wc_tests.py394
-rw-r--r--subversion/tests/diacritical.txt41
-rw-r--r--subversion/tests/greek-tree.txt53
-rw-r--r--subversion/tests/libsvn_client/client-test.c1442
-rw-r--r--subversion/tests/libsvn_client/conflicts-test.c5275
-rw-r--r--subversion/tests/libsvn_client/mtcc-test.c848
-rw-r--r--subversion/tests/libsvn_delta/delta-window-test.h122
-rw-r--r--subversion/tests/libsvn_delta/random-test.c635
-rw-r--r--subversion/tests/libsvn_delta/range-index-test.h195
-rw-r--r--subversion/tests/libsvn_delta/svndiff-stream-test.c79
-rw-r--r--subversion/tests/libsvn_delta/svndiff-test.c108
-rw-r--r--subversion/tests/libsvn_delta/vdelta-test.c269
-rw-r--r--subversion/tests/libsvn_delta/vdelta_1.txt1
-rw-r--r--subversion/tests/libsvn_delta/window-test.c113
-rw-r--r--subversion/tests/libsvn_diff/diff-diff3-test.c3115
-rw-r--r--subversion/tests/libsvn_diff/parse-diff-test.c1138
-rw-r--r--subversion/tests/libsvn_fs/fs-sequential-test.c415
-rw-r--r--subversion/tests/libsvn_fs/fs-test.c7519
-rw-r--r--subversion/tests/libsvn_fs/locks-test.c1256
-rw-r--r--subversion/tests/libsvn_fs_base/changes-test.c926
-rw-r--r--subversion/tests/libsvn_fs_base/fs-base-test.c1552
-rw-r--r--subversion/tests/libsvn_fs_base/strings-reps-test.c759
-rw-r--r--subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c394
-rw-r--r--subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c1948
-rw-r--r--subversion/tests/libsvn_fs_fs/fs-fs-private-test.c441
-rw-r--r--subversion/tests/libsvn_fs_x/fs-x-pack-test.c969
-rw-r--r--subversion/tests/libsvn_fs_x/string-table-test.c318
-rw-r--r--subversion/tests/libsvn_ra/ra-test.c1826
-rw-r--r--subversion/tests/libsvn_ra_local/ra-local-test.c302
-rw-r--r--subversion/tests/libsvn_repos/authz-test.c495
-rw-r--r--subversion/tests/libsvn_repos/authz.groups4
-rw-r--r--subversion/tests/libsvn_repos/authz.rules18
-rw-r--r--subversion/tests/libsvn_repos/dir-delta-editor.c314
-rw-r--r--subversion/tests/libsvn_repos/dir-delta-editor.h68
-rw-r--r--subversion/tests/libsvn_repos/dump-load-test.c290
-rw-r--r--subversion/tests/libsvn_repos/repos-test.c4543
-rw-r--r--subversion/tests/libsvn_subr/auth-test.c479
-rw-r--r--subversion/tests/libsvn_subr/bit-array-test.c168
-rw-r--r--subversion/tests/libsvn_subr/cache-test.c622
-rw-r--r--subversion/tests/libsvn_subr/checksum-test.c395
-rw-r--r--subversion/tests/libsvn_subr/compat-test.c227
-rw-r--r--subversion/tests/libsvn_subr/compress-test.c93
-rw-r--r--subversion/tests/libsvn_subr/config-test.c500
-rw-r--r--subversion/tests/libsvn_subr/config-test.cfg72
-rw-r--r--subversion/tests/libsvn_subr/crypto-test.c192
-rw-r--r--subversion/tests/libsvn_subr/dirent_uri-test.c3025
-rw-r--r--subversion/tests/libsvn_subr/error-code-test.c87
-rw-r--r--subversion/tests/libsvn_subr/error-test.c246
-rw-r--r--subversion/tests/libsvn_subr/hashdump-test.c258
-rw-r--r--subversion/tests/libsvn_subr/io-test.c1190
-rw-r--r--subversion/tests/libsvn_subr/mergeinfo-test.c1837
-rw-r--r--subversion/tests/libsvn_subr/opt-test.c208
-rw-r--r--subversion/tests/libsvn_subr/packed-data-test.c578
-rw-r--r--subversion/tests/libsvn_subr/path-test.c1766
-rw-r--r--subversion/tests/libsvn_subr/prefix-string-test.c154
-rw-r--r--subversion/tests/libsvn_subr/priority-queue-test.c240
-rw-r--r--subversion/tests/libsvn_subr/revision-test.c136
-rw-r--r--subversion/tests/libsvn_subr/root-pools-test.c137
-rw-r--r--subversion/tests/libsvn_subr/skel-test.c909
-rw-r--r--subversion/tests/libsvn_subr/spillbuf-test.c595
-rw-r--r--subversion/tests/libsvn_subr/sqlite-test.c186
-rw-r--r--subversion/tests/libsvn_subr/stream-test.c1043
-rw-r--r--subversion/tests/libsvn_subr/string-test.c1157
-rw-r--r--subversion/tests/libsvn_subr/subst_translate-test.c526
-rw-r--r--subversion/tests/libsvn_subr/time-test.c360
-rw-r--r--subversion/tests/libsvn_subr/translate-test.c1333
-rw-r--r--subversion/tests/libsvn_subr/utf-test.c1034
-rw-r--r--subversion/tests/libsvn_subr/x509-test.c905
-rw-r--r--subversion/tests/libsvn_subr/xml-test.c360
-rw-r--r--subversion/tests/libsvn_subr/zlib.deflatedbin0 -> 77174 bytes
-rw-r--r--subversion/tests/libsvn_wc/conflict-data-test.c981
-rwxr-xr-xsubversion/tests/libsvn_wc/create_wc_for_upgrade.sh108
-rw-r--r--subversion/tests/libsvn_wc/db-test.c1556
-rw-r--r--subversion/tests/libsvn_wc/entries-compat.c647
-rw-r--r--subversion/tests/libsvn_wc/op-depth-test.c12104
-rw-r--r--subversion/tests/libsvn_wc/pristine-store-test.c324
-rw-r--r--subversion/tests/libsvn_wc/utils.c707
-rw-r--r--subversion/tests/libsvn_wc/utils.h244
-rw-r--r--subversion/tests/libsvn_wc/wc-incomplete-tester.c97
-rw-r--r--subversion/tests/libsvn_wc/wc-lock-tester.c136
-rw-r--r--subversion/tests/libsvn_wc/wc-queries-test.c1068
-rw-r--r--subversion/tests/libsvn_wc/wc-test-queries.h112
-rw-r--r--subversion/tests/libsvn_wc/wc-test-queries.sql78
-rw-r--r--subversion/tests/libsvn_wc/wc-test.c521
-rw-r--r--subversion/tests/manual/README3
-rwxr-xr-xsubversion/tests/manual/tree-conflicts-add-vs-add.py423
-rw-r--r--subversion/tests/svn_test.h387
-rw-r--r--subversion/tests/svn_test_fs.c972
-rw-r--r--subversion/tests/svn_test_fs.h203
-rw-r--r--subversion/tests/svn_test_main.c1125
-rw-r--r--subversion/tests/templates/empty-fsfs-v1.zipbin0 -> 9707 bytes
-rw-r--r--subversion/tests/templates/empty-fsfs-v2.zipbin0 -> 16187 bytes
-rw-r--r--subversion/tests/templates/empty-fsfs-v3.zipbin0 -> 17653 bytes
-rw-r--r--subversion/tests/templates/empty-fsfs-v4.zipbin0 -> 19638 bytes
-rw-r--r--subversion/tests/templates/empty-fsfs-v6.zipbin0 -> 22422 bytes
-rw-r--r--subversion/tests/templates/empty-fsfs-v7.zipbin0 -> 27120 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v1.zipbin0 -> 11426 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v2.zipbin0 -> 17917 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v3.zipbin0 -> 19392 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v4.zipbin0 -> 22771 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v6.zipbin0 -> 25498 bytes
-rw-r--r--subversion/tests/templates/greek-fsfs-v7.zipbin0 -> 30971 bytes
-rw-r--r--subversion/tests/templates/greek.dump260
-rw-r--r--subversion/tests/tests.conf36
335 files changed, 230222 insertions, 0 deletions
diff --git a/subversion/tests/README b/subversion/tests/README
new file mode 100644
index 0000000..9a506cd
--- /dev/null
+++ b/subversion/tests/README
@@ -0,0 +1,283 @@
+
+ ================================
+ A Subversion Testing Framework
+ ================================
+
+
+The three goals of Subversion's automated test-suite:
+
+ 1. It must be easy to run.
+ 2. It must be easy to understand the results.
+ 3. It must be easy to add new tests.
+
+
+
+Definition of an SVN "test program"
+-----------------------------------
+
+A Subversion test program is any executable that contains a number of
+sub-tests it can run. It has a standard interface:
+
+1. If run with a numeric argument N, the program runs sub-test N.
+
+2. If run with the argument `--list', it will list the names of all sub-tests.
+
+3. If run with no arguments, the program runs *all* sub-tests.
+
+4. The program returns either 0 (success) or 1 (if any sub-test failed).
+
+5. Upon finishing a test, the program reports the results in a format
+ which is both machine-readable (for the benefit of automatic
+ regression tracking scripts), and human-readable (for the sake of
+ painstaking grovelling by hand in the dead of night):
+
+ (PASS | FAIL): (argv[0]) (argv[1]): (description)
+
+For example,
+
+ [sussman@newton:~] ./frobtest 2
+ PASS: frobtest 2: frobnicating fragile data
+ [sussman@newton:~]
+
+Note that no particular programming language is required to write a
+set of tests; they just needs to export this user interface.
+
+
+
+How to write new C tests
+------------------------
+
+The C test framework tests library APIs, both internal and external.
+
+All test programs use a standard `main' function. You write .c files
+that contain only test functions --- you should not define your own
+`main' function.
+
+Instead, your code should define an externally visible array
+`test_funcs', like this:
+
+ /* The test table. */
+ struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS(test_a),
+ SVN_TEST_PASS(test_b),
+ SVN_TEST_PASS(test_c),
+ SVN_TEST_NULL
+ };
+
+In this example, `test_a', `test_b', and `test_c' are the names of
+test functions. The first and last elements of the array must be
+SVN_TEST_NULL. The first SVN_TEST_NULL is there to leave room for
+Buddha. The standard `main' function searches for the final
+SVN_TEST_NULL to determine the size of the array.
+
+Instead of SVN_TEST_PASS, you can use SVN_TEST_XFAIL to declare that a
+test is expected to fail. The status of such tests is then no longer
+marked as PASS or FAIL, but rather as XFAIL (eXpected FAILure) or
+XPASS (uneXpected PASS).
+
+The purpose of XFAIL tests is to confirm that a known bug still
+exists. When you see such a test uneXpectedly PASS, you've probably
+fixed the bug it tests for, even if that wasn't your intention. :-)
+XFAIL is not to be used as a way of testing a deliberately invalid
+operation that is expected to fail when Subversion is working
+correctly, nor as a place-holder for a test that is not yet written.
+
+Each test function conforms to the svn_test_driver_t prototype:
+
+ svn_error_t *f (const char **MSG,
+ svn_boolean_t MSG_ONLY
+ apr_pool_t *POOL);
+
+When called, a test function should first set *MSG to a brief (as in,
+half-line) description of the test. Then, if MSG_ONLY is TRUE, the
+test should immediately return SVN_NO_ERROR. Else it should perform a
+test. If the test passes, the function should return SVN_NO_ERROR;
+otherwise, it should return an error object, built using the functions
+in svn_error.h.
+
+Once you've got a .c file with a bunch of tests and a `test_funcs'
+array, you should link it against the `libsvn_tests_main.la' libtool
+library, in this directory, `subversion/tests'. That library provides
+a `main' function which will check the command-line arguments, pick
+the appropriate tests to run from your `test_funcs' array, and print
+the results in the standard way.
+
+
+How to write new Python tests
+-----------------------------
+
+The python test framework exercises the command-line client as a
+"black box".
+
+To write python tests, please look at the README file inside the
+cmdline/ subdirectory.
+
+
+When to write new tests
+-----------------------
+
+In the world of CVS development, people have noticed that the same
+bugs tend to recur over and over. Thus the CVS community has adopted
+a hard-and-fast rule that whenever somebody fixes a bug, a *new* test
+is added to the suite to specifically check for it. It's a common
+case that in the process of fixing a bug, several old bugs are
+accidentally resurrected... and then quickly revealed by the test
+suite.
+
+This same rule applies to Subversion development: ** If you fix a
+bug, write a test for it. **
+
+
+When to file a related issue
+----------------------------
+
+By definition, if you write a new test which is set to XFail, then it
+assumed that the test is for a known bug. In these cases it is
+recommended that you associate an issue in the issue tracker with the
+XFailing test. This ensures that the issue tracker is the authoritative
+list of known bugs -- see http://subversion.tigris.org/issue-tracker.html.
+You may need to create a new issue if one doesn't already exist.
+
+For C tests simply add a comment noting any associated issue:
+
+ /* This is for issue #3234. */
+ static svn_error_t *
+ test_copy_crash(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+ {
+ apr_array_header_t *sources;
+ svn_opt_revision_t rev;
+ .
+ .
+
+For Python tests use the @Issue() decorator (a summary comment of the
+issue never hurts either):
+
+ #---------------------------------------------------------------------
+ # Test for issue #3657 'dav update report handler in skelta mode can
+ # cause spurious conflicts'.
+ @Issue(3657)
+ @XFail()
+ def dav_skelta_mode_causes_spurious_conflicts(sbox):
+ "dav skelta mode can cause spurious conflicts"
+ .
+ .
+
+Of course it isn't *always* necessary to create an associated issue.
+If a the fix for an new XFailing test is imminent, you are probably
+better off simply fixing the bug and moving on. Use common sense, but
+when in doubt associate a new issue.
+
+
+What not to test
+----------------
+
+Regression tests are for testing interface promises. This might
+include semi-private interfaces (such as the non-public .h files
+inside module subdirs), but does not include implementation details
+behind the interfaces. For example, this is a good way to test
+svn_fs_txn_name:
+
+ /* Test that svn_fs_txn_name fulfills its promise. */
+ char *txn_name = NULL;
+ SVN_ERR = svn_fs_txn_name (&txn_name, txn, pool);
+ if (txn_name == NULL)
+ return fail();
+
+But this is not:
+
+ /* Test that the txn got id "0", since it's the first txn. */
+ char *txn_name = NULL;
+ SVN_ERR = svn_fs_txn_name (&txn_name, txn, pool);
+ if (txn_name && (strcmp (txn_name, "0") != 0))
+ return fail();
+
+During development, it may sometimes be very convenient to
+*temporarily* test implementation details via the regular test suite.
+It's okay to do that, but please remove the test when you're done and
+make sure it's clearly marked in the meantime. Since implementation
+details are not interface promises, they might legitimately change --
+and when they change, that test will break. At which point whoever
+encountered the problem will look into the test suite and find the
+temporary test you forgot to remove. As long as it's marked like
+this...
+
+ /* Temporary test for debugging only: Test that the txn got id
+ * "0", since it's the first txn.
+ * NOTE: If the test suite is failing because of this test, then
+ * just remove the test. It was written to help me debug an
+ * implementation detail that might have changed by now, so its
+ * failure does not necessarily mean there's anything wrong with
+ * Subversion. */
+ char *txn_name = NULL;
+ SVN_ERR = svn_fs_txn_name (&txn_name, txn, pool);
+ if (txn_name && (strcmp (txn_name, "0") != 0))
+ return fail();
+
+...then they won't have wasted much time.
+
+
+What's here
+-----------
+
+ * svn_test_main.c
+ [shared library "libsvn_tests_main"]
+ A standardized main() function to drive tests. Link this into
+ your automated test-programs.
+
+ * svn_test_editor.c
+ [shared library "libsvn_tests_editor"]
+ An editor for testing drivers of svn_delta_edit_fns_t. This
+ editor's functions simply print information to stdout.
+
+ * cmdline/
+ A collection of python scripts to test the command-line client.
+
+
+`make check`
+------------
+
+The file `build.conf' (at the top level of the tree) defines a
+[test-scripts] section. These are a list of scripts that will be run
+whenever someone types `make check`.
+
+Each script is expected to output sub-test information as described in
+the first section of this document; the `make check` rule scans for
+FAIL codes, and logs all the sub-test output into a top-level file
+called `tests.log'.
+
+If you write a new C executable that contains subtests, be sure to add
+a build "target" under the TESTING TARGETS section of build.conf.
+
+If you write a new python-script, be sure to add to the [test-scripts]
+section.
+
+
+Testing Over DAV
+----------------
+
+Please see subversion/tests/cmdline/README for how to run the
+command-line client test suite against a remote repository.
+
+Conclusion
+----------
+
+Our test suite...
+
+
+ 1. ...must be easy to run.
+
+ * run `make check`
+
+ 2. ...must be easy to understand the results.
+
+ * test programs output standardized messages
+ * all messages are logged
+ * `make check` only displays errors (not successes!)
+
+ 3. ...must be easy to add new tests.
+
+ * add your own sub-test to an existing test program, or
+ * add a new test program using template C or python code.
diff --git a/subversion/tests/afl/README b/subversion/tests/afl/README
new file mode 100644
index 0000000..d2015f7
--- /dev/null
+++ b/subversion/tests/afl/README
@@ -0,0 +1,31 @@
+American Fuzzy Lop (AFL) is available from all good distros, or from
+the upstream site: http://lcamtuf.coredump.cx/afl/
+
+Configure to build the instumented binary:
+
+ CC=afl-gcc ../src/configure --disable-shared --enable-static
+
+Build the binary:
+
+ make clean
+ make afl-x509
+
+Run the fuzzer (you may need root to change the CPU governor):
+
+ mkdir afl-findings
+ afl-fuzz -i ../src/subversion/tests/afl/afl-x509-testcase \
+ -o afl-findings -M fuzzer01 \
+ subversion/tests/afl/afl-x509
+ afl-fuzz -i ../src/subversion/tests/afl/afl-x509-testcase \
+ -o afl-findings -S fuzzer02 \
+ subversion/tests/afl/afl-x509
+ afl-fuzz -i ../src/subversion/tests/afl/afl-x509-testcase \
+ -o afl-findings -S fuzzer03 \
+ subversion/tests/afl/afl-x509
+
+Wait, burning CPU...
+
+Testcases that cause SEGVs are easier to debug using a separate build
+without AFL instrumentation:
+
+ gdb --args subversion/tests/afl/afl-x509 afl-findings/fuzzer01/crashes/some-test-case
diff --git a/subversion/tests/afl/afl-x509-testcase/test1 b/subversion/tests/afl/afl-x509-testcase/test1
new file mode 100644
index 0000000..9fa6b8b
--- /dev/null
+++ b/subversion/tests/afl/afl-x509-testcase/test1
Binary files differ
diff --git a/subversion/tests/afl/afl-x509.c b/subversion/tests/afl/afl-x509.c
new file mode 100644
index 0000000..06f106a
--- /dev/null
+++ b/subversion/tests/afl/afl-x509.c
@@ -0,0 +1,75 @@
+/*
+ * afl-x509.c an American Fuzz Lop test
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ */
+
+/* The input data can either be a file on disk or provided via stdin:
+
+ afl-x509 some-file
+ afl-x509 < some-file
+
+ In practice the file simply contains random binary data. The data
+ are interpreted as a (base64 decoded) x509 cert and a parse is
+ attempted. */
+
+#include "svn_x509.h"
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_io.h"
+
+#include <stdlib.h>
+
+static svn_error_t *
+parse(const char *filename, apr_pool_t *pool)
+{
+ svn_stringbuf_t *buf;
+ svn_x509_certinfo_t *certinfo;
+
+ SVN_ERR(svn_stringbuf_from_file2(&buf, filename, pool));
+ SVN_ERR(svn_x509_parse_cert(&certinfo, buf->data, buf->len, pool, pool));
+
+ return SVN_NO_ERROR;
+}
+
+int main(int argc, char **argv)
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+ const char *filename;
+
+ if (argc == 2)
+ filename = argv[1];
+ else
+ filename = "-";
+
+ if (svn_cmdline_init("afl-x509", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = parse(filename, pool);
+ if (err)
+ exit_code = EXIT_FAILURE;
+ svn_error_clear(err);
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/subversion/tests/cmdline/README b/subversion/tests/cmdline/README
new file mode 100644
index 0000000..65dcc28
--- /dev/null
+++ b/subversion/tests/cmdline/README
@@ -0,0 +1,534 @@
+ (-*- text -*-)
+
+ Subversion Commandline Client: Test Suite
+ ==========================================
+
+The cmdline client test suite doesn't use the C-level testing
+framework, but is structured similarly. Instead of testing library
+APIs, it drives the client just like a user would, examining the
+output and the on-disk results (i.e., the working copy) carefully as
+it goes. In other words, this is "black box" testing of the
+command-line client. It has no access to code internals; it never
+looks inside the .svn/ directory; it only performs actions that a
+human user would do.
+
+These tests require Python 2.7 or later.
+
+ [ For more general information on Subversion's testing system,
+ please read the README in subversion/tests/. ]
+
+
+How To Run The Tests
+====================
+
+To run a test script over ra_local, invoke it from THIS DIRECTORY.
+
+ $ cd subversion/tests/cmdline/
+
+Invoke the script with no arguments to run all the tests in that
+script:
+
+ $ ./basic_tests.py
+
+Invoke with one or more numeric arguments to run those particular tests:
+
+ $ ./basic_tests.py 7 13 17
+
+Invoke with one or more function names to run those particular tests:
+
+ $ ./basic_tests.py basic_mkdir_wc_with_parents basic_switch basic_import
+
+And invoke with the "--list" option to list information about some or
+all tests available in that script:
+
+ $ ./basic_tests.py --list 2 3 4
+ $ ./basic_tests.py --list
+
+Note: if you are building Subversion in a directory other than the source
+directory (q.v. INSTALL), you will have to invoke the tests from within
+the build directory:
+
+ $ cd obj/subversion/tests/cmdline
+ $ ../../../../svn/subversion/tests/cmdline/basic_tests.py
+
+
+Running over mod_dav_svn
+------------------------
+
+Running a script over mod_dav_svn is basically the same, but you have to
+set up httpd 2.0 first (on the same machine, since the tests create
+repositories on the fly), and pass a URL argument to the test scripts.
+
+Assuming you have httpd 2.0 installed in /usr/local/apache2, just add
+two Location directives to /usr/local/apache2/conf/httpd.conf, with
+paths adjusted appropriately:
+
+ <Location /svn-test-work/repositories>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/repositories
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ </Location>
+
+ <Location /svn-test-work/local_tmp/repos>
+ DAV svn
+ SVNPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp/repos
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ </Location>
+
+ <Location /authz-test-work/anon>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ # This may seem unnecessary but granting access to everyone here is necessary
+ # to exercise a bug with httpd 2.3.x+. The "Require all granted" syntax is
+ # new to 2.3.x+ which we can detect with the mod_authz_core.c module
+ # signature. Use the "Allow from all" syntax with older versions for symmetry.
+ <IfModule mod_authz_core.c>
+ Require all granted
+ </IfModule>
+ <IfModule !mod_authz_core.c>
+ Allow from all
+ </IfMOdule>
+ </Location>
+ <Location /authz-test-work/mixed>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ Satisfy Any
+ </Location>
+ <Location /authz-test-work/mixed-noauthwhenanon>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ AuthzSVNNoAuthWhenAnonymousAllowed On
+ </Location>
+ <Location /authz-test-work/authn>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ </Location>
+ <Location /authz-test-work/authn-anonoff>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ AuthzSVNAnonymous Off
+ </Location>
+ <Location /authz-test-work/authn-lcuser>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ AuthzForceUsernameCase Lower
+ </Location>
+ <Location /authz-test-work/authn-lcuser>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ Require valid-user
+ AuthzForceUsernameCase Lower
+ </Location>
+ <Location /authz-test-work/authn-group>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ AuthGroupFile /usr/local/apache2/conf/groups
+ Require group random
+ AuthzSVNAuthoritative Off
+ </Location>
+ <IfModule mod_authz_core.c>
+ <Location /authz-test-work/sallrany>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ AuthzSendForbiddenOnFailure On
+ Satisfy All
+ <RequireAny>
+ Require valid-user
+ Require expr req('ALLOW') == '1'
+ </RequireAny>
+ </Location>
+ <Location /authz-test-work/sallrall>
+ DAV svn
+ SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp
+ AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz
+ SVNListParentPath On
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile /usr/local/apache2/conf/users
+ AuthzSendForbiddenOnFailure On
+ Satisfy All
+ <RequireAll>
+ Require valid-user
+ Require expr req('ALLOW') == '1'
+ </RequireAll>
+ </Location>
+ </IfModule>
+
+
+ RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$ /svn-test-work/repositories/$1
+ RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$ /svn-test-work/repositories/$1
+
+Httpd should be running on port 80. You may also need to ensure that
+it's running as you, so it has read/write access to the repositories
+that are probably living in your Subversion working copy. To do this,
+set the User and Group directives in httpd.conf, something like this:
+
+ User yourusernamehere
+ Group users
+
+You need to run the tests over mod_dav_svn with authentication enabled, so
+just drop the following 2-line snippet into the
+/usr/local/apache2/conf/users file [1]:
+
+----------------------------
+jrandom:xCGl35kV9oWCY
+jconstant:xCGl35kV9oWCY
+JRANDOM:xCGl35kV9oWCY
+JCONSTANT:xCGl35kV9oWCY
+----------------------------
+
+and these lines into the
+/usr/local/apache/conf/groups file:
+----------------------------
+random: jrandom
+constant: jconstant
+----------------------------
+
+Now, (re)start Apache and run the tests over mod_dav_svn.
+
+You can run a test script over DAV:
+
+ $ ./basic_tests.py --url http://localhost
+ $ ./basic_tests.py --url http://localhost 3
+
+or
+
+ $ ./basic_tests.py --url=http://localhost
+ $ ./basic_tests.py --url=http://localhost 3
+
+If you run httpd on a port other than 80, you can specify the port in
+the URL: "http://localhost:15835" for example.
+
+To run all tests over DAV, pass BASE_URL when running 'make check'
+from the top of the build dir:
+
+ $ make check BASE_URL=http://localhost
+
+BASE_URL=URL can also be used when running individual tests:
+
+ $ ./basic_tests.py BASE_URL=http://localhost
+ $ ./basic_tests.py BASE_URL=http://localhost 3
+
+
+Note [1]: It would be quite too much to expect those password entries
+ to work on Windows... Apache httpd on Windows doesn't
+ understand crypted passwords, but it does understand
+ MD5-hashed passwords. The correct password entries for
+ Windows are:
+
+ ----------------------------
+ jrandom:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0
+ jconstant:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0
+ JRANDOM:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0
+ JCONSTANT:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0
+ ----------------------------
+
+
+As a shorthand to all of the above, ./davautocheck.sh will generate
+an Apache configuration listening on a random port on localhost and
+run some tests. Without arguments, or when invoking 'make davautocheck'
+on the top-level Makefile, it will run all tests. With arguments,
+it will run just one suite or just one test:
+
+ $ ./davautocheck.sh
+ $ ./davautocheck.sh basic
+ $ ./davautocheck.sh basic 15
+
+With '--no-tests' argument, it will start httpd but not run any tests. This is
+useful for manual testing --- create repositories in
+./svn-test-work/repositories/<$repo> and they will be accessible at
+<URL>/svn-test-work/repositories/<$repo>. You can also run individual tests by passing the --url option to them, as described above.
+
+davautocheck.sh also respects some environment variables; see the comments at
+the top of the script for details.
+
+Running over ra_svn
+-------------------
+
+It's also easy to run the tests against a local svnserve:
+
+$ subversion/svnserve/svnserve -d -r `pwd`/subversion/tests/cmdline
+$ make check BASE_URL=svn://localhost
+
+or, to run individual tests,
+
+$ ./basic_tests.py --url=svn://localhost 3
+
+To enable Cyrus SASL on the server side you should either set the
+ENABLE_SASL variable when calling make:
+
+$ make check BASE_URL=svn://localhost ENABLE_SASL=true
+
+or if you're running an individual test,
+
+$ ./basic_tests.py --url=svn://localhost --enable-sasl 3
+
+Note that to do this you'll have to have a svn.conf file in your
+SASL lib dir (i.e. something like /usr/lib/sasl2/svn.conf), it
+should contain something like:
+
+pwcheck_method: auxprop
+mech_list: CRAM-MD5
+
+And then you'll have to add the users jrandom and jconstant to your
+SASL password db,
+
+$ saslpasswd2 -c -u svntest jrandom
+$ saslpasswd2 -c -u svntest jconstant
+
+As usual, both users should use the password 'rayjandom'.
+
+To enable DUMP_LOAD_CROSS_CHECK to work a third user is required,
+
+$ saslpasswd2 -c -u svntest __dumpster__
+
+with password '__loadster__'.
+
+The user running the tests will need read access to the sasl database
+and on some systems this can be arranged by adding the user to the sasl
+group.
+
+There are 'make svnserveautocheck' and ./svnserveautocheck.sh commands,
+analogous to davautocheck.sh documented above.
+
+
+Running tests in a RAM disk
+--------------------------
+
+Test execution can be dramatically sped up by keeping Subversion test
+data on a RAM disk. On a Linux system, you can mount a RAM disk on the
+fly with the command:
+
+ mount -t tmpfs tmpfs /path/to/src/subversion/tests/cmdline/svn-test-work \
+ -o uid=$USER,mode=770,size=32m
+
+Or, for a more permanent solution, add lines like the following in your
+/etc/fstab file:
+
+ tmpfs /path/to/src/svn/subversion/tests/cmdline/svn-test-work tmpfs defaults,user,noauto,exec,size=32m
+
+The minimum required size for testing ramdisk is approximately 700MB.
+However, flagging your test targets for cleanup dramatically reduces
+the space requirements (as shown in the example configuration above),
+and thus your memory usage. Cleanup means more I/O, but since test
+data is in-memory, there will be no performance degradation. Example:
+
+ make check CLEANUP=true
+
+See http://svn.haxx.se/dev/archive-2003-02/0068.shtml for the original
+authoritative discussion on use of RAM disks.
+
+
+Directory Contents
+==================
+
+ *.py The tests themselves.
+
+ svntest/ Python package, provides test suite framework
+
+ /main.py: Global vars, utility routines; exports
+ run_tests(), the main test routine.
+
+ /tree.py: Infrastructure for SVNTreeNode class.
+ - tree constructors, tree comparison routines.
+ - routines to parse subcommand output into
+ specific kinds of trees.
+ - routines to parse a working copy and
+ entries files into specific kinds of trees.
+
+ /wc.py: Functions for interacting with a working
+ copy, and converting to/from trees.
+
+ /actions.py: Main API for driving subversion client and
+ using trees to verify results.
+
+ /verify.py: Verifies output from Subversion.
+
+ /testcase.py: Control of test case execution - contains
+ decorators for expected failures and conditionally
+ executed tests.
+
+ /sandbox.py: Tools for manipulating a test's working area
+ ("a sandbox"), those are handy for most simple
+ actions a test might want to perform on a wc.
+
+ /objects.py: Objects that keep track of state during a test.
+ (not directly used by the test scripts.)
+
+ /mergetrees.py: Routines that create merge scenarios.
+
+ /factory.py: Automatically generate a (near-)complete new
+ cmdline test from a series of shell commands.
+
+ /error.py: Error codes as constants, for convenience.
+ (auto-generated by tools/dev/gen-py-error.py)
+
+
+What the Python Tests are Doing
+===============================
+
+I. Theory
+
+ A. Types of Verification
+
+ The point of this test system is that it's *automated*: that is,
+ each test can algorithmically verify the results and indicate "PASS"
+ or "FAIL".
+
+ We've identified two broad classes of verification:
+
+ 1. Verifying svn subcommand output.
+
+ Most important subcommands (co, up, ci, im, st) print results to
+ stdout as a list of paths. Even though the paths may be printed
+ out in an unpredictable order, we still want to make sure this
+ list is exactly the *set* of lines we expect to get.
+
+ 2. Verifying the working copy itself.
+
+ Every time a subcommand could potentially change something on
+ disk, we need to inspect the working copy. Specifically, this
+ means we need to make sure the working copy has exactly the
+ tree-structure we expect, and each file has exactly the contents
+ and properties we expect.
+
+
+II. Practice: Trees
+
+ Sam TH <sam@uchicago.edu> proposed and began work on a solution
+ whereby all important, inspectable information is parsed into a
+ general, in-memory tree representation. By comparing actual
+ vs. expected tree structures, we get automated verification.
+
+ A. Tree node structure
+
+ Each "tree node" in a tree has these fields:
+
+ - name : the name of the node
+ - children: list of child nodes (if the node is a dir)
+ - contents: textual contents (if the node is a file)
+ - properties: a hash to hold subversion props
+ - atts: a hash of meta-information about tree nodes themselves
+
+
+ B. Parsing subcommand output into a tree
+
+ Special parsers examine lines printed by subcommands, and
+ convert them into a tree of tree-nodes. The 'contents' and
+ 'properties' fields are empty; but prepending on the subcommand,
+ specific attributes in the 'atts' field are set in tree-nodes:
+
+ - svn co/up: a 'status' attribute is set to a two-character
+ value from the set (A, D, G, U, C, _, ' ') or
+ a 'verb' attribute is set to ('Restored')
+
+ - svn status: a 'status' attribute (as above), plus 'wc_rev'
+ and 'repos_rev' attributes to hold the wc
+ and repos revision numbers.
+
+ - svn ci/im: a 'verb' attribute is set to one of
+ (Adding, Sending, Deleting)
+
+
+ C. Parsing a working copy into a tree
+
+ We also have a routines that walks a regular working copy and
+ returns a tree representing disk contents and props. In this
+ case the 'atts' hash in each node is empty, but the 'contents'
+ and 'props' fields are filled in.
+
+
+
+How to Write New Tests
+======================
+
+If you'd like to write a new python test, first decide which file it
+might fit into; test scripts each contain collections of tests grouped
+by rough categories. (Is it testing a new subcommand? New
+enhancement? Tricky use-case? Regression test?)
+
+Next, read the long documentation comment at the top of
+svntest/tree.py. It will explain the general API that most tests use.
+
+Finally, try copying-and-pasting a simple test and then edit from
+there. Don't forget to add your test to the 'test_list' variable at
+the bottom of the file. To avoid renumbering of existing tests, you
+should add new tests to the end of the list.
+
+
+Testing Compatibility With Previous Release
+===========================================
+
+You can run the Python test suite against older installed versions of
+the Subversion servers. This mail fragment introduces the ability:
+
+ Message-ID: <1ea387f60804091828q48c9d18ah7bf8d89ef7d39461@mail.gmail.com>
+ Date: Wed, 9 Apr 2008 18:28:40 -0700
+ From: "David Glasser" <glasser@davidglasser.net>
+ To: "Subversion Developers" <dev@subversion.tigris.org>
+ Subject: backwards-compatibility testing!
+
+ I've updated the expectations on trunk so that you can cleanly run the
+ test suite against 1.4.x svnserve or DAV. You do this by adding
+ SERVER_MINOR_VERSION=4 with make check, or --server-minor-version 4 to
+ run_tests.py or a specific Python test.
+
+ [...]
+
+We expect that post-1.5, this support will expand in the obvious ways
+(allowing "--server-minor-version 5" and SERVER_MINOR_VERSION=5).
diff --git a/subversion/tests/cmdline/atomic-ra-revprop-change.c b/subversion/tests/cmdline/atomic-ra-revprop-change.c
new file mode 100644
index 0000000..4f3d763
--- /dev/null
+++ b/subversion/tests/cmdline/atomic-ra-revprop-change.c
@@ -0,0 +1,213 @@
+/*
+ * atomic-ra-revprop-change.c : wrapper around svn_ra_change_rev_prop2()
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#include "svn_types.h"
+#include "svn_pools.h"
+#include "svn_dirent_uri.h"
+#include "svn_ra.h"
+#include "svn_cmdline.h"
+
+#include "private/svn_skel.h"
+
+#include "svn_private_config.h"
+
+
+#define KEY_OLD_PROPVAL "old_value_p"
+#define KEY_NEW_PROPVAL "value"
+
+#define USAGE_MSG \
+ "Usage: %s URL REVISION PROPNAME VALUES_SKEL WANT_ERROR CONFIG_DIR\n" \
+ "\n" \
+ "VALUES_SKEL is a proplist skel containing pseudo-properties '%s' \n" \
+ "and '%s'. A pseudo-property missing from the skel is interpreted \n" \
+ "as unset.\n" \
+ "\n" \
+ "WANT_ERROR is 1 if the propchange is expected to fail due to the atomicity,"\
+ "and 0 if it is expected to succeed. If the expectation matches reality," \
+ "the exit code shall be zero.\n"
+
+
+static svn_error_t *
+construct_auth_baton(svn_auth_baton_t **auth_baton_p,
+ const char *config_dir,
+ apr_pool_t *pool)
+{
+ SVN_ERR(svn_cmdline_create_auth_baton2(auth_baton_p,
+ TRUE /* non_interactive */,
+ "jrandom", "rayjandom",
+ config_dir,
+ TRUE /* no_auth_cache */,
+ FALSE /* trust_server_cert */,
+ FALSE, FALSE, FALSE, FALSE,
+ NULL, NULL, NULL, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+construct_config(apr_hash_t **config_p,
+ const char *config_dir,
+ apr_pool_t *pool)
+{
+ SVN_ERR(svn_config_get_config(config_p, config_dir, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+change_rev_prop(const char *url,
+ svn_revnum_t revision,
+ const char *propname,
+ const svn_string_t *propval,
+ const svn_string_t *old_value,
+ svn_boolean_t want_error,
+ const char *config_dir,
+ apr_pool_t *pool)
+{
+ svn_ra_callbacks2_t *callbacks;
+ svn_ra_session_t *sess;
+ apr_hash_t *config;
+ svn_boolean_t capable;
+ svn_error_t *err;
+
+ SVN_ERR(svn_ra_create_callbacks(&callbacks, pool));
+ SVN_ERR(construct_auth_baton(&callbacks->auth_baton, config_dir, pool));
+ SVN_ERR(construct_config(&config, config_dir, pool));
+
+ SVN_ERR(svn_ra_open4(&sess, NULL, url, NULL, callbacks, NULL /* baton */,
+ config, pool));
+
+ SVN_ERR(svn_ra_has_capability(sess, &capable,
+ SVN_RA_CAPABILITY_ATOMIC_REVPROPS,
+ pool));
+ if (capable)
+ {
+ err = svn_ra_change_rev_prop2(sess, revision, propname,
+ &old_value, propval, pool);
+
+ if (want_error && err
+ && svn_error_find_cause(err, SVN_ERR_FS_PROP_BASEVALUE_MISMATCH))
+ {
+ /* Expectation was matched. Get out. */
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+ }
+ else if (! want_error && ! err)
+ /* Expectation was matched. Get out. */
+ return SVN_NO_ERROR;
+ else if (want_error && ! err)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "An error was expected but not seen");
+ else
+ /* A real (non-SVN_ERR_FS_PROP_BASEVALUE_MISMATCH) error. */
+ return svn_error_trace(err);
+ }
+ else
+ /* Running under --server-minor-version? */
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Server doesn't advertise "
+ "SVN_RA_CAPABILITY_ATOMIC_REVPROPS");
+}
+
+/* Parse SKEL_CSTR according to the description in USAGE_MSG. */
+static svn_error_t *
+extract_values_from_skel(svn_string_t **old_propval_p,
+ svn_string_t **propval_p,
+ const char *skel_cstr,
+ apr_pool_t *pool)
+{
+ apr_hash_t *proplist;
+ svn_skel_t *skel;
+
+ skel = svn_skel__parse(skel_cstr, strlen(skel_cstr), pool);
+ SVN_ERR(svn_skel__parse_proplist(&proplist, skel, pool));
+ *old_propval_p = apr_hash_get(proplist, KEY_OLD_PROPVAL, APR_HASH_KEY_STRING);
+ *propval_p = apr_hash_get(proplist, KEY_NEW_PROPVAL, APR_HASH_KEY_STRING);
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+ const char *url;
+ svn_revnum_t revision;
+ const char *propname;
+ svn_string_t *propval;
+ svn_string_t *old_propval;
+ char *digits_end = NULL;
+ svn_boolean_t want_error;
+ const char *config_dir;
+
+ if (argc != 7)
+ {
+ fprintf(stderr, USAGE_MSG, argv[0], KEY_OLD_PROPVAL, KEY_NEW_PROPVAL);
+ exit(1);
+ }
+
+ if (apr_initialize() != APR_SUCCESS)
+ {
+ fprintf(stderr, "apr_initialize() failed.\n");
+ exit(1);
+ }
+
+ /* set up the global pool */
+ pool = svn_pool_create(NULL);
+
+ /* Parse argv. */
+ url = svn_uri_canonicalize(argv[1], pool);
+ revision = strtol(argv[2], &digits_end, 10);
+ propname = argv[3];
+ SVN_INT_ERR(extract_values_from_skel(&old_propval, &propval, argv[4], pool));
+ want_error = !strcmp(argv[5], "1");
+ config_dir = svn_dirent_canonicalize(argv[6], pool);
+
+
+ if ((! SVN_IS_VALID_REVNUM(revision)) || (! digits_end) || *digits_end)
+ SVN_INT_ERR(svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("Invalid revision number supplied")));
+
+ /* Do something. */
+ err = change_rev_prop(url, revision, propname, propval, old_propval,
+ want_error, config_dir, pool);
+ if (err)
+ {
+ svn_handle_error2(err, stderr, FALSE, "atomic-ra-revprop-change: ");
+ svn_error_clear(err);
+ exit_code = EXIT_FAILURE;
+ }
+
+ /* Clean up, and get outta here */
+ svn_pool_destroy(pool);
+ apr_terminate();
+
+ return exit_code;
+}
diff --git a/subversion/tests/cmdline/authz_tests.py b/subversion/tests/cmdline/authz_tests.py
new file mode 100755
index 0000000..8878418
--- /dev/null
+++ b/subversion/tests/cmdline/authz_tests.py
@@ -0,0 +1,1711 @@
+#!/usr/bin/env python
+#
+# authz_tests.py: testing authorization.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+
+# Our testing module
+import svntest
+
+from svntest.main import write_restrictive_svnserve_conf
+from svntest.main import write_authz_file
+from svntest.main import server_authz_has_aliases
+from upgrade_tests import (replace_sbox_with_tarfile,
+ replace_sbox_repo_with_tarfile,
+ wc_is_too_old_regex)
+
+# (abbreviation)
+Item = svntest.wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+# regression test for issue #2486 - part 1: open_root
+@Issue(2486)
+@Skip(svntest.main.is_ra_type_file)
+def authz_open_root(sbox):
+ "authz issue #2486 - open root"
+
+ sbox.build()
+
+ write_authz_file(sbox, {"/": "", "/A": "jrandom = rw"})
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # we have write access in folder /A, but not in root. Test on too
+ # restrictive access needed in open_root by modifying a file in /A
+ wc_dir = sbox.wc_dir
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, "hi")
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None,
+ [],
+ mu_path)
+
+#----------------------------------------------------------------------
+
+# regression test for issue #2486 - part 2: open_directory
+@Issue(2486)
+@Skip(svntest.main.is_ra_type_file)
+def authz_open_directory(sbox):
+ "authz issue #2486 - open directory"
+
+ sbox.build()
+
+ write_authz_file(sbox, {"/": "*=rw", "/A/B": "*=", "/A/B/E": "jrandom = rw"})
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # we have write access in folder /A/B/E, but not in /A/B. Test on too
+ # restrictive access needed in open_directory by moving file /A/mu to
+ # /A/B/E
+ wc_dir = sbox.wc_dir
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+
+ svntest.main.run_svn(None, 'mv', mu_path, E_path)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Deleting'),
+ 'A/B/E/mu' : Item(verb='Adding'),
+ })
+
+ # Commit the working copy.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+@Skip(svntest.main.is_ra_type_file)
+@SkipDumpLoadCrossCheck()
+def broken_authz_file(sbox):
+ "broken authz files cause errors"
+
+ sbox.build(create_wc = False)
+
+ # No characters but 'r', 'w', and whitespace are allowed as a value
+ # in an authz rule.
+ write_authz_file(sbox, {"/": "jrandom = rw # End-line comments disallowed"})
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ exit_code, out, err = svntest.main.run_svn(1,
+ "delete",
+ sbox.repo_url + "/A",
+ "-m", "a log message")
+ if out:
+ raise svntest.verify.SVNUnexpectedStdout(out)
+ if not err:
+ raise svntest.verify.SVNUnexpectedStderr("Missing stderr")
+
+# test whether read access is correctly granted and denied
+@Skip(svntest.main.is_ra_type_file)
+def authz_read_access(sbox):
+ "test authz for read operations"
+
+ sbox.build(create_wc = False)
+
+ root_url = sbox.repo_url
+ A_url = root_url + '/A'
+ B_url = A_url + '/B'
+ C_url = A_url + '/C'
+ E_url = B_url + '/E'
+ mu_url = A_url + '/mu'
+ iota_url = root_url + '/iota'
+ lambda_url = B_url + '/lambda'
+ alpha_url = E_url + '/alpha'
+ F_alpha_url = B_url + '/F/alpha'
+ D_url = A_url + '/D'
+ G_url = D_url + '/G'
+ pi_url = G_url + '/pi'
+ H_url = D_url + '/H'
+ chi_url = H_url + '/chi'
+ fws_url = B_url + '/folder with spaces'
+ fws_empty_folder_url = fws_url + '/empty folder'
+
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ # create some folders with spaces in their names
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'logmsg',
+ fws_url, fws_empty_folder_url)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ write_authz_file(sbox, { "/": "* = r",
+ "/A/B": "* =",
+ "/A/B/F": "* = rw",
+ "/A/D": "* = rw",
+ "/A/D/G": ("* = rw\n" +
+ svntest.main.wc_author + " ="),
+ "/A/D/H": ("* = \n" +
+ svntest.main.wc_author + " = rw"),
+ "/A/B/folder with spaces":
+ (svntest.main.wc_author + " = r")})
+
+ # read a remote file
+ svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n"],
+ [], 'cat',
+ iota_url)
+
+ # read a remote file, readably by user specific exception
+ svntest.actions.run_and_verify_svn(["This is the file 'chi'.\n"],
+ [], 'cat',
+ chi_url)
+
+ # read a remote file, unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cat',
+ lambda_url)
+
+ # read a remote file, unreadable through recursion: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cat',
+ alpha_url)
+
+ # read a remote file, user specific authorization is ignored because * = rw
+ svntest.actions.run_and_verify_svn(["This is the file 'pi'.\n"],
+ [], 'cat',
+ pi_url)
+ # open a remote folder(ls)
+ svntest.actions.run_and_verify_svn(["A/\n", "iota\n"],
+ [], 'ls',
+ root_url)
+
+ # open a remote folder(ls), unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'ls',
+ B_url)
+
+ # open a remote folder(ls) with spaces, should succeed
+ svntest.actions.run_and_verify_svn(None, [], 'ls',
+ fws_empty_folder_url)
+
+ # open a remote folder(ls), unreadable through recursion: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ls',
+ E_url)
+
+ # copy a remote file
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ iota_url, D_url,
+ '-m', 'logmsg')
+
+ # copy a remote file, source is unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ lambda_url, D_url)
+
+ # copy a remote folder
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ C_url, D_url,
+ '-m', 'logmsg')
+
+ # copy a remote folder, source is unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ E_url, D_url)
+
+ # move a remote file, source/target ancestor is readonly: should fail
+ #
+ # Note: interesting, we deem it okay for someone to break this move
+ # into two operations, a committed copy followed by a committed
+ # deletion. But the editor drive required to do these atomically
+ # today is prohibitive.
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv', '-m', 'logmsg',
+ alpha_url, F_alpha_url)
+
+ ## copy a remote file, source/target ancestor is readonly
+ ## we fail here due to issue #3242.
+ #svntest.actions.run_and_verify_svn(# None, [],
+ # 'cp', '-m', 'logmsg',
+ # alpha_url, F_alpha_url)
+
+
+# test whether write access is correctly granted and denied
+@Skip(svntest.main.is_ra_type_file)
+def authz_write_access(sbox):
+ "test authz for write operations"
+
+ sbox.build(create_wc = False)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E220004: Access denied.*"
+
+ write_authz_file(sbox, { "/": "* = r",
+ "/A/B": "* = rw",
+ "/A/C": "* = rw"})
+
+ root_url = sbox.repo_url
+ A_url = root_url + '/A'
+ B_url = A_url + '/B'
+ C_url = A_url + '/C'
+ E_url = B_url + '/E'
+ mu_url = A_url + '/mu'
+ iota_url = root_url + '/iota'
+ lambda_url = B_url + '/lambda'
+ D_url = A_url + '/D'
+
+ # copy a remote file, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ lambda_url, D_url)
+
+ # copy a remote folder, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ E_url, D_url)
+
+ # delete a file, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'rm',
+ '-m', 'logmsg',
+ iota_url)
+
+ # delete a folder, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'rm',
+ '-m', 'logmsg',
+ D_url)
+
+ # create a folder, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mkdir',
+ '-m', 'logmsg',
+ A_url+'/newfolder')
+
+ # move a remote file, source is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv',
+ '-m', 'logmsg',
+ mu_url, C_url)
+
+ # move a remote folder, source is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv',
+ '-m', 'logmsg',
+ D_url, C_url)
+
+ # move a remote file, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv',
+ '-m', 'logmsg',
+ lambda_url, D_url)
+
+ # move a remote folder, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv',
+ '-m', 'logmsg',
+ B_url, D_url)
+
+#----------------------------------------------------------------------
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_checkout_test(sbox):
+ "test authz for checkout"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # 1st part: disable all read access, checkout should fail
+
+ # write an authz file with *= on /
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ write_authz_file(sbox, { "/": "* ="})
+
+ # checkout a working copy, should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'co', sbox.repo_url, local_dir)
+
+ # 2nd part: now enable read access
+
+ write_authz_file(sbox, { "/": "* = r"})
+
+ # checkout a working copy, should succeed because we have read access
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = local_dir
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ local_dir,
+ expected_output,
+ expected_wc)
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_checkout_and_update_test(sbox):
+ "test authz for checkout and update"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # 1st part: disable read access on folder A/B, checkout should not
+ # download this folder
+
+ # write an authz file with *= on /A/B and /A/mu.
+ write_authz_file(sbox, { "/": "* = r",
+ "/A/B": "* =",
+ "/A/mu": "* =",
+ })
+
+ # checkout a working copy, should not dl /A/B or /A/mu.
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = local_dir
+ expected_output.tweak(status='A ', contents=None)
+ expected_output.remove('A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/mu')
+
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.remove('A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/mu')
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, local_dir,
+ expected_output,
+ expected_wc)
+
+ # 2nd part: now enable read access
+
+ # write an authz file with *=r on /. continue to exclude mu.
+ write_authz_file(sbox, { "/": "* = r",
+ "/A/mu": "* =",
+ })
+
+ # update the working copy, should download /A/B because we now have read
+ # access
+ expected_output = svntest.wc.State(local_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.remove('A/mu')
+ expected_status = svntest.actions.get_virginal_state(local_dir, 1)
+ expected_status.remove('A/mu')
+
+ svntest.actions.run_and_verify_update(local_dir,
+ expected_output,
+ expected_wc,
+ expected_status,
+ [], True)
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_partial_export_test(sbox):
+ "test authz for export with unreadable subfolder"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+
+ # cleanup remains of a previous test run.
+ svntest.main.safe_rmtree(local_dir)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # 1st part: disable read access on folder A/B, export should not
+ # download this folder
+
+ # write an authz file with *= on /A/B
+ write_authz_file(sbox, { "/": "* = r", "/A/B": "* =" })
+
+ # export a working copy, should not dl /A/B
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = local_dir
+ expected_output.desc[''] = Item()
+ expected_output.tweak(status='A ', contents=None)
+ expected_output.remove('A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F')
+
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.remove('A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F')
+
+ svntest.actions.run_and_verify_export(sbox.repo_url, local_dir,
+ expected_output,
+ expected_wc)
+
+#----------------------------------------------------------------------
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_log_and_tracing_test(sbox):
+ "test authz for log and tracing path changes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # write an authz file with *=rw on /
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ write_authz_file(sbox, { "/": "* = rw\n" })
+
+ root_url = sbox.repo_url
+ D_url = root_url + '/A/D'
+ G_url = D_url + '/G'
+
+ # check if log doesn't spill any info on which you don't have read access
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'add file rho', sbox.wc_dir)
+
+ svntest.main.file_append(rho_path, 'extra change in rho')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'changed file rho',
+ sbox.wc_dir)
+
+ # copy a remote file
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ rho_path, D_url,
+ '-m', 'copy rho to readable area')
+
+ # now disable read access on the first version of rho, keep the copy in
+ # /A/D readable.
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ authz = { "/": "* = rw",
+ "/A/D/G": "* ="}
+ write_authz_file(sbox, authz)
+
+ ## log
+
+ # changed file in this rev. is not readable anymore, so author and date
+ # should be hidden, like this:
+ # r2 | (no author) | (no date) | 1 line
+ svntest.actions.run_and_verify_svn(".*(no author).*(no date).*|-+\n|\n", [],
+ 'log', '-r', '2', '--limit', '1',
+ wc_dir)
+
+ if sbox.repo_url.startswith('http'):
+ expected_err2 = expected_err
+ else:
+ expected_err2 = ".*svn: E220001: ((Unreadable path encountered; " \
+ "access denied)|(Item is not readable)).*"
+
+ # if we do the same thing directly on the unreadable file, we get:
+ # svn: Item is not readable
+ svntest.actions.run_and_verify_svn(None, expected_err2,
+ 'log', rho_path)
+
+ # while the HEAD rev of the copy is readable in /A/D, its parent in
+ # /A/D/G is not, so don't spill any info there either.
+ svntest.actions.run_and_verify_svn(".*(no author).*(no date).*|-+\n|\n", [],
+ 'log', '-r', '2', '--limit', '1', D_url)
+
+ # Test that only author/date are shown for partially visible revisions.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ write_authz_file(sbox, { "/": "* = rw"})
+ svntest.actions.run_and_verify_svn(
+ None, [], # expected_stdout, expected_stderr
+ 'ps', '--revprop', '-r1', 'foobar', 'foo bar', sbox.repo_url)
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': svntest.main.wc_author, 'svn:date': '',
+ 'svn:log': 'Log message for revision 1.',
+ 'foobar': 'foo bar'}],
+ args=['--with-all-revprops', '-r1', sbox.repo_url])
+ write_authz_file(sbox, authz)
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': svntest.main.wc_author, 'svn:date': ''}],
+ args=['--with-all-revprops', '-r1', sbox.repo_url])
+
+
+ ## cat
+
+ # now see if we can look at the older version of rho
+
+ expected_err2 = ".*svn: E195012: Unable to find repository location.*"
+
+ svntest.actions.run_and_verify_svn(None, expected_err2,
+ 'cat', '-r', '2', D_url+'/rho')
+
+ if sbox.repo_url.startswith('http'):
+ expected_err2 = expected_err
+ else:
+ expected_err2 = ".*svn: E220001: Unreadable path encountered; access denied.*"
+
+ svntest.actions.run_and_verify_svn(None, expected_err2,
+ 'cat', '-r', '2', G_url+'/rho')
+
+ ## diff
+
+ # we shouldn't see the diff of a file in an unreadable path
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'diff', '-r', 'HEAD', G_url+'/rho')
+
+ # diff treats the unreadable path as indicating an add so no error
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '-r', '2', D_url+'/rho')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '-r', '2:4', D_url+'/rho')
+
+# test whether read access is correctly granted and denied
+@SkipUnless(server_authz_has_aliases)
+@Skip(svntest.main.is_ra_type_file)
+def authz_aliases(sbox):
+ "test authz for aliases"
+
+ sbox.build(create_wc = False)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ write_authz_file(sbox, { "/" : "* = r",
+ "/A/B" : "&jray = rw" },
+ { "aliases" : 'jray = jrandom' } )
+
+ root_url = sbox.repo_url
+ A_url = root_url + '/A'
+ B_url = A_url + '/B'
+ iota_url = root_url + '/iota'
+
+ # copy a remote file, target is readonly for jconstant: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '--username', svntest.main.wc_author2,
+ '-m', 'logmsg',
+ iota_url, B_url)
+
+ # try the same action, but as user jray (alias of jrandom), should work.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ '-m', 'logmsg',
+ iota_url, B_url)
+
+@Skip(svntest.main.is_ra_type_file)
+@Issue(2486)
+def authz_validate(sbox):
+ "test the authz validation rules"
+
+ sbox.build(create_wc = False)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ A_url = sbox.repo_url + '/A'
+
+ # If any of the validate rules fail, the authz isn't loaded so there's no
+ # access at all to the repository.
+
+ # Test 1: Undefined group
+ write_authz_file(sbox, { "/" : "* = r",
+ "/A/B" : "@undefined_group = rw" })
+
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ elif sbox.repo_url.startswith("svn"):
+ expected_err = ".*Invalid authz configuration"
+ else:
+ expected_err = ".*@undefined_group.*"
+
+ # validation of this authz file should fail, so no repo access
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ls',
+ A_url)
+
+ # Test 2: Circular dependency
+ write_authz_file(sbox, { "/" : "* = r" },
+ { "groups" : """admins = admin1, admin2, @devs
+devs1 = @admins, dev1
+devs2 = @admins, dev2
+devs = @devs1, dev3, dev4""" })
+
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ elif sbox.repo_url.startswith("svn"):
+ expected_err = ".*Invalid authz configuration"
+ else:
+ expected_err = ".*Circular dependency.*"
+
+ # validation of this authz file should fail, so no repo access
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ls',
+ A_url)
+
+ # Test 3: Group including other group 2 times (issue 2684)
+ write_authz_file(sbox, { "/" : "* = r" },
+ { "groups" : """admins = admin1, admin2
+devs1 = @admins, dev1
+devs2 = @admins, dev2
+users = @devs1, @devs2, user1, user2""" })
+
+ # validation of this authz file should *not* fail (where formerly,
+ # it complained about circular dependencies that do not, in fact,
+ # exist), so this is business as usual.
+ svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'],
+ [],
+ 'ls',
+ A_url)
+
+# test locking/unlocking with authz
+@Skip(svntest.main.is_ra_type_file)
+@Issue(2700)
+def authz_locking(sbox):
+ "test authz for locking"
+
+ sbox.build()
+
+ write_authz_file(sbox, {"/": "", "/A": "jrandom = rw"})
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: warning: W170001: Authorization failed.*"
+
+ root_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+ iota_url = root_url + '/iota'
+ iota_path = os.path.join(wc_dir, 'iota')
+ A_url = root_url + '/A'
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ # lock a file url, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'lock',
+ '-m', 'lock msg',
+ iota_url)
+
+ # lock a file path, target is readonly: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'lock',
+ '-m', 'lock msg',
+ iota_path)
+
+ # Test for issue 2700: we have write access in folder /A, but not in root.
+ # Get a lock on /A/mu and try to commit it.
+
+ # lock a file path, target is writeable: should succeed
+ svntest.actions.run_and_verify_svn(None, [],
+ 'lock',
+ '-m', 'lock msg',
+ mu_path)
+
+ svntest.main.file_append(mu_path, "hi")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ [],
+ [],
+ mu_path)
+
+ # Lock two paths one of which fails. First add read access to '/' so
+ # that OPTIONS on common ancestor works.
+ write_authz_file(sbox, {"/": "jrandom = r", "/A": "jrandom = rw"})
+
+ # Two unlocked paths
+ svntest.actions.run_and_verify_info([{'Lock Token' : None}],
+ sbox.ospath('iota'))
+ svntest.actions.run_and_verify_info([{'Lock Token' : None}],
+ sbox.ospath('A/mu'))
+
+ if sbox.repo_url.startswith('http'):
+ expected_err = ".*svn: warning: W160039: .*([Aa]uth.*perf|[Ff]orbidden).*"
+ else:
+ expected_err = ".*svn: warning: W170001: Authorization failed.*"
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'lock',
+ '-m', 'lock msg',
+ mu_path,
+ iota_path)
+
+ # One path locked, one still unlocked
+ svntest.actions.run_and_verify_info([{'Lock Token' : None}],
+ sbox.ospath('iota'))
+ svntest.actions.run_and_verify_info([{'Lock Token' : 'opaquelocktoken:.*'}],
+ sbox.ospath('A/mu'))
+
+
+
+# test for issue #2712: if anon-access == read, svnserve should also check
+# authz to determine whether a checkout/update is actually allowed for
+# anonymous users, and, if not, attempt authentication.
+@XFail()
+@Issue(2712)
+@SkipUnless(svntest.main.is_ra_type_svn)
+def authz_svnserve_anon_access_read(sbox):
+ "authz issue #2712"
+
+ sbox.build(create_wc = False)
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ B_path = os.path.join(sbox.wc_dir, 'A', 'B')
+ other_B_path = B_path + '_other'
+ B_url = sbox.repo_url + '/A/B'
+ D_path = os.path.join(sbox.wc_dir, 'A', 'D')
+ D_url = sbox.repo_url + '/A/D'
+
+ # We want a svnserve.conf with anon-access = read.
+ write_restrictive_svnserve_conf(sbox.repo_dir, "read")
+
+ # Give jrandom read access to /A/B. Anonymous users can only
+ # access /A/D.
+ write_authz_file(sbox, { "/A/B" : "jrandom = rw",
+ "/A/D" : "* = r" })
+
+ # Perform a checkout of /A/B, expecting to see no errors.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ B_url, B_path)
+
+ # Anonymous users should be able to check out /A/D.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ D_url, D_path)
+
+ # Now try a switch.
+ svntest.main.safe_rmtree(D_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', D_url, B_path)
+
+ # Check out /A/B with an unknown username, expect error.
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Authentication error from server: Username not found.*",
+ 'checkout',
+ '--non-interactive',
+ '--username', 'losing_user',
+ B_url, B_path + '_unsuccessful')
+
+ # Check out a second copy of /A/B, make changes for later merge.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ B_url, other_B_path)
+ other_alpha_path = os.path.join(other_B_path, 'E', 'alpha')
+ svntest.main.file_append(other_alpha_path, "fish\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'log msg',
+ other_B_path)
+
+ # Now try to merge. This is an atypical merge, since our "branch"
+ # is not really a branch (it's the same URL), but we only care about
+ # authz here, not the semantics of the merge. (Merges had been
+ # failing in authz, for the reasons summarized in
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=2712#desc13.)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c', '2',
+ B_url, B_path)
+
+@XFail()
+@Issue(3796)
+@Skip(svntest.main.is_ra_type_file)
+def authz_switch_to_directory(sbox):
+ "switched to directory, no read access on parents"
+
+ sbox.build()
+
+ write_authz_file(sbox, {"/": "*=rw", "/A/B": "*=", "/A/B/E": "jrandom = rw"})
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ wc_dir = sbox.wc_dir
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ F_path = os.path.join(wc_dir, 'A', 'B', 'F')
+ G_path = os.path.join(wc_dir, 'A', 'D', 'G')
+
+ # Switch /A/B/E to /A/B/F.
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + "/A/B/E", G_path,
+ '--ignore-ancestry')
+
+# Test to reproduce the problem identified by Issue 3242 in which
+# Subversion's authz, as of Subversion 1.5, requires access to the
+# repository root for copy and move operations.
+@Skip(svntest.main.is_ra_type_file)
+@Issue(3242)
+def authz_access_required_at_repo_root(sbox):
+ "authz issue #3242 - access required at repo root"
+
+ sbox.build(create_wc = False)
+ root_url = sbox.repo_url
+
+ # Create a copy-level copy of A, just so we have something to work with.
+ svntest.main.run_svn(None, 'cp', '-m', 'logmsg',
+ root_url + '/A',
+ root_url + '/A-copy')
+
+ # Now we get all restrictive.
+ write_authz_file(sbox, {'/': '* =',
+ '/A': 'jrandom = rw',
+ '/A-copy': 'jrandom = rw'})
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # Do some copies and moves where the common parents of the source(s)
+ # and destination(s) are unreadable. All we currently hope to support
+ # is the case where the sources are individually (and recursively)
+ # readable, and the destination tree is writable.
+
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'copy in readable space',
+ root_url + '/A/B',
+ root_url + '/A/B-copy')
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'copy across disjoint readable spaces',
+ root_url + '/A/B',
+ root_url + '/A-copy/B-copy')
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'multi-copy across disjoint readable spaces',
+ root_url + '/A/B',
+ root_url + '/A/mu',
+ root_url + '/A-copy/C')
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'copy from disjoint readable spaces',
+ root_url + '/A/B/E/alpha',
+ root_url + '/A-copy/B/E/beta',
+ root_url + '/A-copy/C')
+
+@Skip(svntest.main.is_ra_type_file)
+@Issue(3242)
+def authz_access_required_at_repo_root2(sbox):
+ "more authz issue #3242 - update to renamed file"
+
+ sbox.build(create_wc = False)
+ root_url = sbox.repo_url
+
+ # Now we get all restrictive.
+ write_authz_file(sbox, {'/': '* =',
+ '/A': 'jrandom = rw'})
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # Rename a file.
+ svntest.main.run_svn(None, 'mv',
+ '-m', 'rename file in readable writable space',
+ root_url + '/A/B/E/alpha',
+ root_url + '/A/B/E/alpha-renamed')
+
+ # Check out original greek sub tree below /A/B/E
+ # and update it to the above rename.
+ wc_dir = sbox.add_wc_path('ABE')
+ os.mkdir(wc_dir)
+ svntest.main.run_svn(None, 'co', '-r', '1', root_url + '/A/B/E', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Rename a directory.
+ svntest.main.run_svn(None, 'mv',
+ '-m', 'rename diretory in readable writable space',
+ root_url + '/A/D/H',
+ root_url + '/A/D/a g e')
+
+ # Check out original greek sub tree below /A/D
+ # and update it to the above rename.
+ wc_dir = sbox.add_wc_path('AD')
+ os.mkdir(wc_dir)
+ svntest.main.run_svn(None, 'co', '-r', '1', root_url + '/A/D', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+@Skip(svntest.main.is_ra_type_file)
+def multiple_matches(sbox):
+ "multiple lines matching a user"
+
+ sbox.build(create_wc = False)
+ root_url = sbox.repo_url
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*svn: E175013: .*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ # Prohibit access and commit fails
+ write_authz_file(sbox, {'/': 'jrandom ='})
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp', '-m', 'fail copy',
+ root_url, root_url + '/fail')
+
+ # At present if multiple lines match the permissions of all the
+ # matching lines are amalgamated. So jrandom gets access regardless
+ # of the line prohibiting access and regardless of the order of the
+ # lines. This might be a bug, but we probably can't simply fix it as
+ # that would change the behaviour of lots of existing authz files.
+
+ write_authz_file(sbox, {'/': 'jrandom =' + '\n' + '* = rw'})
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'first copy',
+ root_url, root_url + '/first')
+
+ write_authz_file(sbox, {'/': '* = rw' + '\n' + 'jrandom ='})
+ svntest.main.run_svn(None, 'cp',
+ '-m', 'second copy',
+ root_url, root_url + '/second')
+
+@Issues(4025,4026)
+@Skip(svntest.main.is_ra_type_file)
+def wc_wc_copy_revert(sbox):
+ "wc-to-wc-copy with absent nodes and then revert"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ write_authz_file(sbox, {'/' : '* = r',
+ '/A/B/E' : '* =', })
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = local_dir
+ expected_output.tweak(status='A ', contents=None)
+ expected_output.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, local_dir,
+ expected_output,
+ expected_wc)
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E155035: Cannot copy.*excluded by server',
+ 'cp', sbox.ospath('A'), sbox.ospath('A2'))
+
+
+ # The copy failed and A2/B/E is incomplete. That means A2 and A2/B
+ # are complete, but for the other parts of A2 the status is undefined.
+ expected_output = svntest.verify.ExpectedOutput(
+ ['A + - 1 jrandom ' + sbox.ospath('A2') + '\n',
+ ' + - 1 jrandom ' + sbox.ospath('A2/B') + '\n',
+ '! - ? ? ' + sbox.ospath('A2/B/E') + '\n',
+ ])
+ expected_output.match_all = False
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '--verbose', sbox.ospath('A2'))
+
+
+ # Issue 4025, info SEGV on incomplete working node
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E145000: .*unrecognized node kind',
+ 'info', sbox.ospath('A2/B/E'))
+
+ # Issue 4026, copy assertion on incomplete working node
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E145001: cannot handle node kind',
+ 'cp', sbox.ospath('A2/B'), sbox.ospath('B3'))
+
+ expected_output = svntest.verify.ExpectedOutput(
+ ['A + - 1 jrandom ' + sbox.ospath('B3') + '\n',
+ '! - ? ? ' + sbox.ospath('B3/E') + '\n',
+ ])
+ expected_output.match_all = False
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '--verbose', sbox.ospath('B3'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive',
+ sbox.ospath('A2'), sbox.ospath('B3'))
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_recursive_ls(sbox):
+ "recursive ls with private subtrees"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ write_authz_file(sbox, {'/' : '* = r',
+ '/A/B/E' : '* =',
+ '/A/mu' : '* =',
+ })
+ expected_entries = [
+ 'A/',
+ 'A/B/',
+ 'A/B/F/',
+ 'A/B/lambda',
+ 'A/C/',
+ 'A/D/',
+ 'A/D/G/',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H/',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ 'A/D/gamma',
+ 'iota',
+ ]
+ with_newline = svntest.main.ensure_list(map(lambda x: x + '\n',
+ expected_entries))
+ svntest.actions.run_and_verify_svn(with_newline,
+ [], 'ls', '-R',
+ sbox.repo_url)
+
+@Issue(3781)
+@Skip(svntest.main.is_ra_type_file)
+def case_sensitive_authz(sbox):
+ "authz issue #3781, check case sensitivity"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ mu_url = sbox.repo_url + '/A/mu'
+ mu_repo_path = sbox.repo_dir + "/A/mu"
+ svntest.main.file_append(mu_path, "hi")
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # error messages
+ expected_error_for_commit = ".*Commit failed.*"
+
+ if sbox.repo_url.startswith("http"):
+ expected_error_for_cat = ".*[Ff]orbidden.*"
+ else:
+ expected_error_for_cat = ".*svn: E170001: Authorization failed.*"
+
+ # test the case-sensitivity of the path inside the repo
+ write_authz_file(sbox, {"/": "jrandom = r",
+ "/A/mu": "jrandom =", "/a/Mu": "jrandom = rw"})
+ svntest.actions.run_and_verify_svn2(None,
+ expected_error_for_cat,
+ 1, 'cat', mu_url)
+
+ write_authz_file(sbox, {"/": "jrandom = r",
+ "/A": "jrandom = r",
+ "/a/Mu": "jrandom = rw"})
+ # Commit the file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ expected_error_for_commit,
+ mu_path)
+
+ def mixcases(repo_name):
+ mixed_repo_name = ''
+ for i in range(0, len(repo_name)):
+ if i % 2 == 0:
+ mixed_val = repo_name[i].upper()
+ mixed_repo_name = mixed_repo_name + mixed_val
+ else:
+ mixed_val = repo_name[i].lower()
+ mixed_repo_name = mixed_repo_name + mixed_val
+ return mixed_repo_name
+
+ mixed_case_repo_dir = mixcases(os.path.basename(sbox.repo_dir))
+
+ # test the case-sensitivity of the repo name
+ sec_mixed_case = {mixed_case_repo_dir + ":/": "jrandom = r",
+ mixed_case_repo_dir + ":/A": "jrandom = r",
+ os.path.basename(sbox.repo_dir) + ":/A/mu": "jrandom =",
+ mixed_case_repo_dir + ":/A/mu": "jrandom = rw"}
+ write_authz_file(sbox, {}, sec_mixed_case)
+ svntest.actions.run_and_verify_svn2(None,
+ expected_error_for_cat,
+ 1, 'cat', mu_url)
+
+ write_authz_file(sbox, {},
+ sections = {mixed_case_repo_dir + ":/": "jrandom = r",
+ mixed_case_repo_dir + ":/A": "jrandom = r",
+ mixed_case_repo_dir + ":/A/mu": "jrandom = rw"})
+
+ # Commit the file again.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ expected_error_for_commit,
+ mu_path)
+
+ # test the case-sensitivity
+ write_authz_file(sbox, {"/": "jrandom = r",
+ "/A": "jrandom = r", "/A/mu": "jrandom = rw"})
+
+ svntest.actions.run_and_verify_svn2(svntest.verify.AnyOutput, [],
+ 0, 'cat', mu_url)
+ # Commit the file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None,
+ [],
+ mu_path)
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_tree_conflict(sbox):
+ "authz should notice a tree conflict"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_rm('A/C')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ write_authz_file(sbox, {"/": "jrandom = rw", "/A/C": "*="})
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # And now create an obstruction
+ sbox.simple_mkdir('A/C')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(status=' ', treeconflict='C'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/C', status='R ', treeconflict='C')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+@Issue(3900)
+@Skip(svntest.main.is_ra_type_file)
+def wc_delete(sbox):
+ "wc delete with absent nodes"
+
+ sbox.build(create_wc = False)
+ local_dir = sbox.wc_dir
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ write_authz_file(sbox, {'/' : '* = r',
+ '/A/B/E' : '* =', })
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = local_dir
+ expected_output.tweak(status='A ', contents=None)
+ expected_output.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, local_dir,
+ expected_output,
+ expected_wc)
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ expected_err = ".*svn: E155035: .*excluded by server*"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'rm', sbox.ospath('A/B/E'), '--force')
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'rm', sbox.ospath('A'))
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+
+@Skip(svntest.main.is_ra_type_file)
+def wc_commit_error_handling(sbox):
+ "verify commit error reporting"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ sbox.simple_mkdir('A/Z')
+
+ write_authz_file(sbox, {'/' : '* = r', })
+
+ # Creating editor fail: unfriendly error
+ expected_err = "(svn: E175013: .*[Ff]orbidden.*)|" + \
+ "(svn: E170001: Authorization failed)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+ write_authz_file(sbox, {'/' : '* = rw',
+ '/A' : '* = r', })
+
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing directory '.*Z' is forbidden)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*Z' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+ sbox.simple_revert('A/Z')
+
+ svntest.main.file_write(sbox.ospath('A/zeta'), "Zeta")
+ sbox.simple_add('A/zeta')
+
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing file '.*zeta' is forbidden)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*zeta' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+ sbox.simple_revert('A/zeta')
+
+ sbox.simple_propset('a', 'b', 'A/D')
+
+ # Allow a generic dav error and the ra_svn specific one that is returned
+ # on editor->edit_close().
+ expected_err = "(svn: E175013: .*[Ff]orbidden.*)|" + \
+ "(svn: E220004: Access denied)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+ sbox.simple_revert('A/D')
+
+ sbox.simple_propset('a', 'b', 'A/B/lambda')
+
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing file '.*lambda' is forbidden.*)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*lambda' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+ sbox.simple_revert('A/B/lambda')
+
+ svntest.main.file_write(sbox.ospath('A/B/lambda'), "New lambda")
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing file '.*lambda' is forbidden.*)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*lambda' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+ sbox.simple_revert('A/B/lambda')
+
+ sbox.simple_rm('A/B/F')
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing directory '.*F' is forbidden.*)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*F' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+ sbox.simple_revert('A/B/F')
+
+ svntest.main.file_write(sbox.ospath('A/mu'), "Updated mu")
+ # Allow the informative error for dav and the ra_svn specific one that is
+ # returned on editor->edit_close().
+ expected_err = "(svn: E195023: Changing file '.*mu' is forbidden.*)|" + \
+ "(svn: E220004: Access denied)|" + \
+ "(svn: E175013: Access to '.*mu' forbidden)"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'ci', wc_dir, '-m', '')
+
+
+@Skip(svntest.main.is_ra_type_file)
+def upgrade_absent(sbox):
+ "upgrade absent nodes to server-excluded"
+
+ # Install wc and repos
+ replace_sbox_with_tarfile(sbox, 'upgrade_absent.tar.bz2')
+ replace_sbox_repo_with_tarfile(sbox, 'upgrade_absent_repos.tar.bz2')
+
+ # Update config for authz
+ svntest.main.write_restrictive_svnserve_conf(sbox.repo_dir)
+ svntest.main.write_authz_file(sbox, { "/" : "*=rw",
+ "/A/B" : "*=",
+ "/A/B/E" : "jrandom = rw"})
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # Relocate to allow finding the repository
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'svn://127.0.0.1/authz_tests-2',
+ sbox.repo_url, sbox.wc_dir)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ })
+
+ # Expect no changes and certainly no errors
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
+ None, None)
+
+@Issue(4183)
+@XFail()
+@Skip(svntest.main.is_ra_type_file)
+def remove_subdir_with_authz_and_tc(sbox):
+ "remove a subdir with authz file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_rm('A/B')
+ sbox.simple_commit()
+
+ svntest.main.write_restrictive_svnserve_conf(sbox.repo_dir)
+ svntest.main.write_authz_file(sbox, { "/" : "*=rw",
+ "/A/B/E" : "*="})
+
+ # Now update back to r1. This will reintroduce A/B except A/B/E.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ wc_dir, '-r', '1')
+
+ # Perform some edit operation to introduce a tree conflict
+ svntest.main.file_write(sbox.ospath('A/B/lambda'), 'qq')
+
+ # And now update to r2. This tries to delete A/B and causes a tree conflict
+ # ### But is also causes an error in creating the copied state
+ # ### svn: E220001: Cannot copy '<snip>\A\B\E' excluded by server
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ None)
+
+@SkipUnless(svntest.main.is_ra_type_svn)
+def authz_svnserve_groups(sbox):
+ "authz with configured global groups"
+
+ sbox.build(create_wc = False)
+
+ svntest.main.write_restrictive_svnserve_conf_with_groups(sbox.repo_dir)
+
+ svntest.main.write_authz_file(sbox, { "/A/B" : "@senate = r",
+ "/A/D" : "@senate = rw",
+ "/A/B/E" : "@senate = " })
+
+ svntest.main.write_groups_file(sbox, { "senate" : "jrandom" })
+
+ root_url = sbox.repo_url
+ A_url = root_url + '/A'
+ B_url = A_url + '/B'
+ E_url = B_url + '/E'
+ F_url = B_url + '/F'
+ D_url = A_url + '/D'
+ G_url = D_url + '/G'
+ lambda_url = B_url + '/lambda'
+ pi_url = G_url + '/pi'
+ alpha_url = E_url + '/alpha'
+
+ expected_err = ".*svn: E170001: Authorization failed.*"
+
+ # read a remote file
+ svntest.actions.run_and_verify_svn(["This is the file 'lambda'.\n"],
+ [], 'cat',
+ lambda_url)
+
+ # read a remote file
+ svntest.actions.run_and_verify_svn(["This is the file 'pi'.\n"],
+ [], 'cat',
+ pi_url)
+
+ # read a remote file, unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cat',
+ alpha_url)
+
+ # copy a remote file, source is unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ alpha_url, B_url)
+
+ # copy a remote folder
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ '-m', 'logmsg',
+ F_url, D_url)
+
+ # copy a remote folder, source is unreadable: should fail
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'cp',
+ '-m', 'logmsg',
+ E_url, D_url)
+
+@Skip(svntest.main.is_ra_type_file)
+@Issue(4332)
+def authz_del_from_subdir(sbox):
+ "delete file without rights on the root"
+
+ sbox.build(create_wc = False)
+
+ write_authz_file(sbox, {"/": "* = ", "/A": "jrandom = rw"})
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/A/mu',
+ '-m', '')
+
+
+@SkipUnless(svntest.main.is_ra_type_dav) # dontdothat is dav only
+def log_diff_dontdothat(sbox):
+ "log --diff on dontdothat"
+ sbox.build(create_wc = False)
+
+ ddt_url = sbox.repo_url.replace('/svn-test-work/', '/ddt-test-work/')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log', sbox.repo_url,
+ '-c', 1, '--diff')
+
+ # We should expect a PASS or a proper error message instead of
+ # svn: E175009: XML parsing failed: (403 Forbidden)
+ expected_err = ".*E175013: Access to '.*authz_tests-28.*' forbidden"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'log', ddt_url,
+ '-c', 1, '--diff')
+
+@Issue(4422)
+@Skip(svntest.main.is_ra_type_file)
+def authz_file_external_to_authz(sbox):
+ "replace file external with authz node"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ write_authz_file(sbox, {"/": "* = rw"})
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ sbox.simple_propset('svn:externals', 'Z ' + repo_url + '/iota', '')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', status=' M')
+ expected_status.add({
+ 'Z' : Item(status=' ', wc_rev='1', switched='X'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, None, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', repo_url + '/A',
+ repo_url + '/Z',
+ '-m', 'Add Z')
+
+ write_authz_file(sbox, {"/": "* = rw", "/Z": "* = "})
+
+ expected_status.tweak(wc_rev=2)
+
+ # ### This used to assert with
+ # ### svn: E235000: In file 'update_editor.c' line 3043: assertion failed
+ # ### (status != svn_wc__db_status_normal)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, None, expected_status)
+
+@Skip(svntest.main.is_ra_type_file)
+def authz_log_censor_revprops(sbox):
+ "log censors revprops for partially visible revs"
+
+ sbox.build(create_wc = False)
+
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+ write_authz_file(sbox, {"/" : "* = rw"})
+
+ # Add the revision property 's'.
+ svntest.actions.run_and_verify_svn(None, [], 'ps', '--revprop',
+ '-r1', 's', 'secret', sbox.repo_url)
+
+ # With blanket access, both 'svn:author' and 's' are a part of the output.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': svntest.main.wc_author, 's': 'secret'}],
+ args=['--with-revprop', 'svn:author', '--with-revprop', 's',
+ '-r1', sbox.repo_url])
+
+ # Make the revision partially visible, but ask for both 'svn:author' and
+ # 's'. The second revision property should be censored out, as we only
+ # allow 'svn:author' and 'svn:date' for partially visible revisions.
+ # This used to fail around trunk@1658379.
+ write_authz_file(sbox, {"/" : "* = rw", "/A/B" : "* = "})
+
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': svntest.main.wc_author}],
+ args=['--with-revprop', 'svn:author', '--with-revprop', 's',
+ '-r1', sbox.repo_url])
+
+@Skip(svntest.main.is_ra_type_file)
+def remove_access_after_commit(sbox):
+ "remove a subdir with authz file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.write_restrictive_svnserve_conf(sbox.repo_dir)
+ svntest.main.write_authz_file(sbox, { "/" : "*=rw"})
+
+ # Modification in subtree
+ sbox.simple_append('A/B/E/alpha', 'appended\n')
+ sbox.simple_append('A/D/G/rho', 'appended\n')
+ sbox.simple_commit()
+
+ svntest.main.write_authz_file(sbox, { "/" : "*=rw",
+ "/A/B" : "*=",
+ "/A/D" : "*="})
+
+ # Local modification
+ sbox.simple_append('A/D/G/pi', 'appended\n')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='D '),
+ 'A/D' : Item(status=' ', treeconflict='C'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/rho',
+ contents="This is the file 'rho'.\nappended\n")
+ expected_disk.tweak('A/D/G/pi',
+ contents="This is the file 'pi'.\nappended\n")
+ expected_disk.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+
+ expected_status.tweak('A/D', status='R ',treeconflict='C', )
+ expected_status.tweak('A/D', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/omega', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/gamma', copied='+', wc_rev='-')
+ expected_status.tweak('A/D/G/pi', status='M ')
+ expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F',
+ 'A/B/lambda')
+
+ # And expect a mixed rev copy
+ expected_status.tweak('A/D/G/rho', status='A ', entry_status=' ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True)
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ authz_open_root,
+ authz_open_directory,
+ broken_authz_file,
+ authz_read_access,
+ authz_write_access,
+ authz_checkout_test,
+ authz_log_and_tracing_test,
+ authz_checkout_and_update_test,
+ authz_partial_export_test,
+ authz_aliases,
+ authz_validate,
+ authz_locking,
+ authz_svnserve_anon_access_read,
+ authz_switch_to_directory,
+ authz_access_required_at_repo_root,
+ authz_access_required_at_repo_root2,
+ multiple_matches,
+ wc_wc_copy_revert,
+ authz_recursive_ls,
+ case_sensitive_authz,
+ authz_tree_conflict,
+ wc_delete,
+ wc_commit_error_handling,
+ upgrade_absent,
+ remove_subdir_with_authz_and_tc,
+ authz_svnserve_groups,
+ authz_del_from_subdir,
+ log_diff_dontdothat,
+ authz_file_external_to_authz,
+ authz_log_censor_revprops,
+ remove_access_after_commit,
+ ]
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list, serial_only = serial_only)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/autoprop_tests.py b/subversion/tests/cmdline/autoprop_tests.py
new file mode 100755
index 0000000..962d3ab
--- /dev/null
+++ b/subversion/tests/cmdline/autoprop_tests.py
@@ -0,0 +1,767 @@
+#!/usr/bin/env python
+#
+# autoprop_tests.py: testing automatic properties
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, logging, stat
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+from svntest.main import SVN_PROP_INHERITABLE_AUTOPROPS
+
+# Helper function
+def check_proplist(path, exp_out):
+ """Verify that property list on PATH has a value of EXP_OUT"""
+
+ props = svntest.tree.get_props([path]).get(path, {})
+ if props != exp_out:
+ logger.warn("Expected properties: %s", exp_out)
+ logger.warn("Actual properties: %s", props)
+ raise svntest.Failure
+
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+
+def create_config(sbox, enable_flag):
+ "create config directories and files"
+
+ # contents of the file 'config'
+ config_contents = '''\
+[auth]
+password-stores =
+
+[miscellany]
+enable-auto-props = %s
+
+[auto-props]
+*.c = cfile=yes
+*.jpg = jpgfile=ja
+fubar* = tarfile=si
+foobar.lha = lhafile=da;lzhfile=niet
+spacetest = abc = def ; ghi = ; = j
+escapetest = myval=;;;;val;myprop=p
+quotetest = svn:keywords="Author Date Id Rev URL";
+* = auto=oui
+''' % (enable_flag and 'yes' or 'no')
+
+ return sbox.create_config_dir(config_contents)
+
+#----------------------------------------------------------------------
+
+def autoprops_test(sbox, cmd, cfgenable, clienable, subdir):
+ """configurable autoprops test.
+
+ CMD is the subcommand to test: 'import' or 'add'
+ if CFGENABLE is true, enable autoprops in the config file, else disable
+ if CLIENABLE == 1: --auto-props is added to the command line
+ 0: nothing is added
+ -1: --no-auto-props is added to command line
+ if string SUBDIR is not empty files are created in that subdir and the
+ directory is added/imported"""
+
+ # Bootstrap
+ sbox.build()
+
+ # some directories
+ wc_dir = sbox.wc_dir
+ tmp_dir = os.path.abspath(sbox.add_wc_path('autoprops'))
+ os.makedirs(tmp_dir)
+ repos_url = sbox.repo_url
+
+ config_dir = create_config(sbox, cfgenable)
+
+ # initialize parameters
+ if cmd == 'import':
+ parameters = ['import', '-m', 'bla']
+ files_dir = tmp_dir
+ else:
+ parameters = ['add']
+ files_dir = wc_dir
+
+ parameters = parameters + ['--config-dir', config_dir]
+
+ # add comandline flags
+ if clienable == 1:
+ parameters = parameters + ['--auto-props']
+ enable_flag = 1
+ elif clienable == -1:
+ parameters = parameters + ['--no-auto-props']
+ enable_flag = 0
+ else:
+ enable_flag = cfgenable
+
+ # setup subdirectory if needed
+ if len(subdir) > 0:
+ files_dir = os.path.join(files_dir, subdir)
+ files_wc_dir = os.path.join(wc_dir, subdir)
+ os.makedirs(files_dir)
+ else:
+ files_wc_dir = wc_dir
+
+ # create test files
+ filenames = ['foo.h',
+ 'foo.c',
+ 'foo.jpg',
+ 'fubar.tar',
+ 'foobar.lha',
+ 'spacetest',
+ 'escapetest',
+ 'quotetest']
+ for filename in filenames:
+ svntest.main.file_write(os.path.join(files_dir, filename),
+ 'foo\nbar\nbaz\n')
+
+ if len(subdir) == 0:
+ # add/import the files
+ for filename in filenames:
+ path = os.path.join(files_dir, filename)
+ if cmd == 'import':
+ tmp_params = parameters + [path, repos_url + '/' + filename]
+ else:
+ tmp_params = parameters + [path]
+ svntest.main.run_svn(None, *tmp_params)
+ else:
+ # add/import subdirectory
+ if cmd == 'import':
+ parameters = parameters + [files_dir, repos_url]
+ else:
+ parameters = parameters + [files_wc_dir]
+ svntest.main.run_svn(None, *parameters)
+
+ # do an svn co if needed
+ if cmd == 'import':
+ svntest.main.run_svn(None, 'checkout', repos_url, files_wc_dir,
+ '--config-dir', config_dir)
+
+ # check the properties
+ if enable_flag:
+ filename = os.path.join(files_wc_dir, 'foo.h')
+ check_proplist(filename, {'auto':'oui'})
+ filename = os.path.join(files_wc_dir, 'foo.c')
+ check_proplist(filename, {'auto':'oui', 'cfile':'yes'})
+ filename = os.path.join(files_wc_dir, 'foo.jpg')
+ check_proplist(filename, {'auto':'oui', 'jpgfile':'ja'})
+ filename = os.path.join(files_wc_dir, 'fubar.tar')
+ check_proplist(filename, {'auto':'oui', 'tarfile':'si'})
+ filename = os.path.join(files_wc_dir, 'foobar.lha')
+ check_proplist(filename, {'auto':'oui', 'lhafile':'da', 'lzhfile':'niet'})
+ filename = os.path.join(files_wc_dir, 'spacetest')
+ check_proplist(filename, {'auto':'oui', 'abc':'def', 'ghi':''})
+ filename = os.path.join(files_wc_dir, 'escapetest')
+ check_proplist(filename, {'auto':'oui', 'myval':';;val', 'myprop':'p'})
+ filename = os.path.join(files_wc_dir, 'quotetest')
+ check_proplist(filename, {'auto':'oui',
+ 'svn:keywords': 'Author Date Id Rev URL'})
+ else:
+ for filename in filenames:
+ check_proplist(os.path.join(files_wc_dir, filename), {})
+
+
+#----------------------------------------------------------------------
+
+def autoprops_add_no_none(sbox):
+ "add: config=no, commandline=none"
+
+ autoprops_test(sbox, 'add', 0, 0, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_yes_none(sbox):
+ "add: config=yes, commandline=none"
+
+ autoprops_test(sbox, 'add', 1, 0, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_no_yes(sbox):
+ "add: config=no, commandline=yes"
+
+ autoprops_test(sbox, 'add', 0, 1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_yes_yes(sbox):
+ "add: config=yes, commandline=yes"
+
+ autoprops_test(sbox, 'add', 1, 1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_no_no(sbox):
+ "add: config=no, commandline=no"
+
+ autoprops_test(sbox, 'add', 0, -1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_yes_no(sbox):
+ "add: config=yes, commandline=no"
+
+ autoprops_test(sbox, 'add', 1, -1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_no_none(sbox):
+ "import: config=no, commandline=none"
+
+ autoprops_test(sbox, 'import', 0, 0, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_yes_none(sbox):
+ "import: config=yes, commandline=none"
+
+ autoprops_test(sbox, 'import', 1, 0, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_no_yes(sbox):
+ "import: config=no, commandline=yes"
+
+ autoprops_test(sbox, 'import', 0, 1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_yes_yes(sbox):
+ "import: config=yes, commandline=yes"
+
+ autoprops_test(sbox, 'import', 1, 1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_no_no(sbox):
+ "import: config=no, commandline=no"
+
+ autoprops_test(sbox, 'import', 0, -1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_yes_no(sbox):
+ "import: config=yes, commandline=no"
+
+ autoprops_test(sbox, 'import', 1, -1, '')
+
+#----------------------------------------------------------------------
+
+def autoprops_add_dir(sbox):
+ "add directory"
+
+ autoprops_test(sbox, 'add', 1, 0, 'autodir')
+
+#----------------------------------------------------------------------
+
+def autoprops_imp_dir(sbox):
+ "import directory"
+
+ autoprops_test(sbox, 'import', 1, 0, 'autodir')
+
+#----------------------------------------------------------------------
+
+# Issue #2713: adding a file with an svn:eol-style property, svn should abort
+# if the file has mixed EOL style. Previously, svn aborted but had added the
+# file anyway.
+@Issue(2713)
+def fail_add_mixed_eol_style(sbox):
+ "fail to add a file with mixed EOL style"
+
+ from svntest.actions import run_and_verify_svn, run_and_verify_unquiet_status
+
+ # Bootstrap
+ sbox.build()
+
+ filename = 'mixed-eol.txt'
+ filepath = os.path.join(sbox.wc_dir, filename)
+ parameters = ['--auto-props',
+ '--config-option=config:auto-props:' + filename
+ + '=svn:eol-style=native']
+
+ svntest.main.file_write(filepath, 'foo\nbar\r\nbaz\r')
+
+ expected_stderr = "svn: E200009: File '.*" + filename + \
+ "' has inconsistent newlines" + \
+ "|" + "svn: E135000: Inconsistent line ending style\n"
+ run_and_verify_svn([], expected_stderr,
+ 'add', filepath, *parameters)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {filename : Item(status='? ')})
+ run_and_verify_unquiet_status(filepath, expected_status)
+
+#----------------------------------------------------------------------
+
+def create_inherited_autoprops_config(sbox, enable_flag):
+ "create config stuffs for inherited autoprops tests"
+
+ # contents of the file 'config'
+ config_contents = '''\
+[auth]
+password-stores =
+
+[miscellany]
+enable-auto-props = %s
+
+[auto-props]
+*.c = svn:keywords=Author Date Id Rev URL;svn:eol-style=native;
+''' % (enable_flag and 'yes' or 'no')
+
+ return sbox.create_config_dir(config_contents)
+
+#----------------------------------------------------------------------
+def check_inheritable_autoprops(sbox, auto_props_cfg_enabled,
+ inheritable_auto_props_enabled):
+ """Check that the autoprops added or imported by inheritable_autoprops_test
+ are as expected based on whether auto props are active or
+ not, as indicated by AUTO_PROPS_CFG_ENABLED and
+ INHERITABLE_AUTO_PROPS_ENABLED."""
+
+ foo_path = sbox.ospath('foo.c')
+ bar_path = sbox.ospath('B/bar.c')
+ baf_path = sbox.ospath('C/baf.c')
+ qux_path = sbox.ospath('D/qux.c')
+ rip_path = sbox.ospath('D/rip.bat')
+ snk_path = sbox.ospath('D/H/snk.py')
+ sir_path = sbox.ospath('D/H/sir.c')
+
+ if auto_props_cfg_enabled:
+ check_proplist(foo_path, {'svn:eol-style':'CRLF',
+ 'svn:keywords':'Author Date Id Rev URL'})
+ check_proplist(bar_path, {'svn:eol-style':'CR',
+ 'svn:keywords':'Date'})
+ check_proplist(baf_path, {'svn:eol-style':'LF',
+ 'svn:keywords':'Rev'})
+ check_proplist(qux_path, {'svn:eol-style':'CRLF',
+ 'svn:keywords':'Author Date Id Rev URL'})
+ check_proplist(rip_path, {'svn:executable':'*'})
+ check_proplist(snk_path, {'svn:mime-type':'text/x-python'})
+ check_proplist(sir_path, {'svn:eol-style':'CRLF',
+ 'svn:keywords':'Author Date Id Rev URL'})
+ elif inheritable_auto_props_enabled: # Config auto-props disabled,
+ # but not svn:auto-props.
+ check_proplist(foo_path, {'svn:eol-style':'CRLF'})
+ check_proplist(bar_path, {'svn:eol-style':'CR',
+ 'svn:keywords':'Date'})
+ check_proplist(baf_path, {'svn:eol-style':'LF',
+ 'svn:keywords':'Rev'})
+ check_proplist(qux_path, {'svn:eol-style':'CRLF'})
+ check_proplist(rip_path, {'svn:executable':'*'})
+ check_proplist(snk_path, {'svn:mime-type':'text/x-python'})
+ check_proplist(sir_path, {'svn:eol-style':'CRLF'})
+ else: # No autoprops of any kind.
+ check_proplist(foo_path, {})
+ check_proplist(bar_path, {})
+ check_proplist(baf_path, {})
+ check_proplist(qux_path, {})
+ check_proplist(rip_path, {})
+ check_proplist(snk_path, {})
+ check_proplist(sir_path, {})
+
+#----------------------------------------------------------------------
+def inheritable_autoprops_test(sbox, cmd, cfgenable, clienable, subdir,
+ do_import_or_add=True):
+ """configurable autoprops and svn:auto-props test.
+
+ CMD is the subcommand to test: 'import' or 'add'
+ if CFGENABLE is true, enable autoprops in the config file, else disable
+ if CLIENABLE == 1: --auto-props is added to the command line
+ 0: nothing is added
+ -1: --no-auto-props is added to command line
+ if string SUBDIR is not empty files are created in that subdir and the
+ directory is added/imported
+ if DO_IMPORT_OR_ADD is false, setup the test, but don't perform
+ the actual import or add.
+
+ Return the directory where the config dir (if any) is located."""
+
+ # Bootstrap
+ sbox.build()
+
+ # some directories
+ wc_dir = sbox.wc_dir
+ tmp_dir = os.path.abspath(sbox.add_wc_path('iautoprops'))
+ os.makedirs(tmp_dir)
+ repos_url = sbox.repo_url
+
+ config_dir = create_inherited_autoprops_config(sbox, cfgenable)
+
+ # initialize parameters
+ if cmd == 'import':
+ parameters = ['import', '-m', 'importing']
+ files_dir = tmp_dir
+ else:
+ parameters = ['add']
+ files_dir = wc_dir
+
+ parameters = parameters + ['--config-dir', config_dir]
+
+ # add comandline flags
+ inheritable_auto_props_enabled = 1
+ if clienable == 1:
+ parameters = parameters + ['--auto-props']
+ auto_props_cfg_enabled = 1
+ elif clienable == -1:
+ parameters = parameters + ['--no-auto-props']
+ auto_props_cfg_enabled = 0
+ inheritable_auto_props_enabled = 0
+ else:
+ auto_props_cfg_enabled = cfgenable
+
+ # setup subdirectory if needed
+ if len(subdir) > 0:
+ files_dir = os.path.join(files_dir, subdir)
+ files_wc_dir = os.path.join(wc_dir, subdir)
+ os.makedirs(files_dir)
+ else:
+ files_wc_dir = wc_dir
+
+ # Set differing svn:auto-props properties on various
+ # directories.
+ sbox.simple_propset(SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.c = svn:eol-style=CRLF\n'
+ '*.bat = svn:executable',
+ '.')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.c = svn:eol-style=CR;svn:keywords=Date',
+ 'A/B')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.c = svn:eol-style=LF;svn:keywords=Rev',
+ 'A/C')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.py = svn:mime-type=text/x-python',
+ 'A/D')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Add some ' + SVN_PROP_INHERITABLE_AUTOPROPS +
+ ' properties', wc_dir)
+
+ # Switch the root of the WC to ^/A.
+ svntest.main.run_svn(None, 'switch', '--ignore-ancestry',
+ sbox.repo_url + '/A', wc_dir)
+
+ # Array of file names to add or import, their WC locations (relative to the
+ # WC root) if being added, and their repository locations if being imported.
+ filenames = [['foo.c', 'foo.c', 'A/foo.c'],
+ ['bar.c', os.path.join('B', 'bar.c'), 'A/B/bar.c'],
+ ['baf.c', os.path.join('C', 'baf.c'), 'A/C/baf.c'],
+ ['qux.c', os.path.join('D', 'qux.c'), 'A/D/qux.c'],
+ ['rip.bat', os.path.join('D', 'rip.bat'), 'A/D/rip.bat'],
+ ['snk.py', os.path.join('D', 'H', 'snk.py'), 'A/D/H/snk.py'],
+ ['ric.c', os.path.join('D', 'H', 'sir.c'), 'A/D/H/sir.c']]
+
+ for filename in filenames:
+ if cmd == 'import':
+ svntest.main.file_write(os.path.join(files_dir, filename[0]),
+ 'foo\nbar\nbaz\n')
+ else:
+ svntest.main.file_write(os.path.join(files_dir, filename[1]),
+ 'foo\nbar\nbaz\n')
+
+ if do_import_or_add:
+ if len(subdir) == 0:
+ # add/import the files
+ for filename in filenames:
+ if cmd == 'import':
+ path = os.path.join(files_dir, filename[0])
+ tmp_params = parameters + [path, repos_url + '/' + filename[2]]
+ else:
+ path = os.path.join(files_dir, filename[1])
+ tmp_params = parameters + [path]
+ svntest.main.run_svn(None, *tmp_params)
+ else:
+ # add/import subdirectory
+ if cmd == 'import':
+ parameters = parameters + [files_dir, repos_url]
+ else:
+ parameters = parameters + [files_wc_dir]
+ svntest.main.run_svn(None, *parameters)
+
+ # do an svn co if needed
+ if cmd == 'import':
+ svntest.main.run_svn(None, 'checkout', repos_url + '/A', files_wc_dir,
+ '--config-dir', config_dir)
+
+ check_inheritable_autoprops(sbox, auto_props_cfg_enabled,
+ inheritable_auto_props_enabled)
+
+ return config_dir
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_no_none(sbox):
+ "inherit add: config=no, commandline=none"
+ inheritable_autoprops_test(sbox, 'add', False, 0, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_yes_none(sbox):
+ "inherit add: config=yes, commandline=none"
+ inheritable_autoprops_test(sbox, 'add', True, 0, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_no_yes(sbox):
+ "inherit add: config=no, commandline=yes"
+
+ inheritable_autoprops_test(sbox, 'add', 0, 1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_yes_yes(sbox):
+ "inherit add: config=yes, commandline=yes"
+
+ inheritable_autoprops_test(sbox, 'add', 1, 1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_no_no(sbox):
+ "inherit add: config=no, commandline=no"
+
+ inheritable_autoprops_test(sbox, 'add', 0, -1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_add_yes_no(sbox):
+ "inherit add: config=yes, commandline=no"
+
+ inheritable_autoprops_test(sbox, 'add', 1, -1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_import_no_none(sbox):
+ "inherit import: config=no, commandline=none"
+
+ inheritable_autoprops_test(sbox, 'import', False, 0, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_imp_yes_none(sbox):
+ "inherit import: config=yes, commandline=none"
+
+ inheritable_autoprops_test(sbox, 'import', 1, 0, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_imp_no_yes(sbox):
+ "inherit import: config=no, commandline=yes"
+
+ inheritable_autoprops_test(sbox, 'import', 0, 1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_imp_yes_yes(sbox):
+ "inherit import: config=yes, commandline=yes"
+
+ inheritable_autoprops_test(sbox, 'import', 1, 1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_imp_no_no(sbox):
+ "inherit import: config=no, commandline=no"
+
+ inheritable_autoprops_test(sbox, 'import', 0, -1, '')
+
+#----------------------------------------------------------------------
+
+def svn_prop_inheritable_autoprops_imp_yes_no(sbox):
+ "inherit import: config=yes, commandline=no"
+
+ inheritable_autoprops_test(sbox, 'import', 1, -1, '')
+
+#----------------------------------------------------------------------
+# Test svn:auto-props when 'svn add' targets an already versioned
+# target.
+def svn_prop_inheritable_autoprops_add_versioned_target(sbox):
+ "svn:auto-props and versioned target"
+
+ config_dir = inheritable_autoprops_test(sbox, 'add', 1, 0, '', False)
+
+ # Perform the add with the --force flag, and check the status.
+ ### Note: You have to be inside the working copy or else Subversion
+ ### will think you're trying to add the working copy to its parent
+ ### directory, and will (possibly, if the parent directory isn't
+ ### versioned) fail -- see also schedule_tests.py 11 "'svn add'
+ ### should traverse already-versioned dirs"
+ saved_wd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ svntest.main.run_svn(None, 'add', '.', '--force', '--config-dir',
+ config_dir)
+ os.chdir(saved_wd)
+ check_inheritable_autoprops(sbox, True, True)
+
+ # Revert additions and try with --no-auto-props
+ svntest.main.run_svn(None, 'revert', '-R', sbox.wc_dir)
+
+ # When the add above sets svn:executable on D/rip.bat, subversion
+ # also sets the execute bits on the file (on systems that support
+ # that). The revert above does not return the file to its original
+ # permissions, so we do so manually now. Otherwise the follwing
+ # addition will notice the executable bits and set svn:executable
+ # again, which is not what we are here to test.
+ if os.name == 'posix':
+ os.chmod(os.path.join(sbox.wc_dir, 'D', 'rip.bat'),
+ svntest.main.S_ALL_READ | stat.S_IWUSR | stat.S_IWGRP)
+
+ os.chdir(sbox.wc_dir)
+ svntest.main.run_svn(None, 'add', '.', '--force', '--no-auto-props',
+ '--config-dir', config_dir)
+ os.chdir(saved_wd)
+ check_inheritable_autoprops(sbox, False, False)
+
+ # Create a new config with auto-props disabled.
+ #
+ # Then revert the previous additions and add again, only the
+ # svn:auto-props should be applied.
+ config_dir = create_inherited_autoprops_config(sbox, False)
+
+ svntest.main.run_svn(None, 'revert', '-R', sbox.wc_dir)
+ os.chdir(sbox.wc_dir)
+ svntest.main.run_svn(None, 'add', '.', '--force',
+ '--config-dir', config_dir)
+ os.chdir(saved_wd)
+ check_inheritable_autoprops(sbox, False, True)
+
+ # Revert a final time and add again with the --auto-props switch.
+ # Both the config defined and svn:auto-props should be applied.
+ svntest.main.run_svn(None, 'revert', '-R', sbox.wc_dir)
+ os.chdir(sbox.wc_dir)
+ svntest.main.run_svn(None, 'add', '.', '--force', '--auto-props',
+ '--config-dir', config_dir)
+ os.chdir(saved_wd)
+ check_inheritable_autoprops(sbox, True, True)
+
+#----------------------------------------------------------------------
+# Can't set svn:auto-props on files.
+def svn_prop_inheritable_autoprops_propset_file_target(sbox):
+ "svn:auto-props can't be set on files"
+
+ sbox.build()
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Cannot set '" + SVN_PROP_INHERITABLE_AUTOPROPS + "' on a file.*",
+ 'ps', SVN_PROP_INHERITABLE_AUTOPROPS, '*.c=svn:eol-style=native',
+ sbox.ospath('iota'))
+
+#----------------------------------------------------------------------
+# Multiple unversioned subtrees under a versioned target shouldn't segfault.
+def svn_prop_inheritable_autoprops_unversioned_subtrees_versioned_target(sbox):
+ "versioned target and unversioned subtrees"
+
+ sbox.build()
+ Z_path = sbox.ospath('A/D/Z')
+ Y_path = sbox.ospath('A/B/Y')
+ foo_path = sbox.ospath('A/D/Z/foo.c')
+ bar_path = sbox.ospath('A/B/Y/bar.c')
+
+ # Set svn:auto-props properties on two directories.
+ svntest.main.run_svn(None, 'ps', SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.c=svn:eol-style=CR', sbox.ospath('A/B'))
+ svntest.main.run_svn(None, 'ps', SVN_PROP_INHERITABLE_AUTOPROPS,
+ '*.c=svn:eol-style=native', sbox.ospath('A/D'))
+ sbox.simple_commit(message='Add inheritable autoprops')
+
+ # Create two subtrees, each with one new file.
+ os.mkdir(Z_path)
+ os.mkdir(Y_path)
+ svntest.main.file_write(foo_path,
+ '/* Someday there will be code here. */\n')
+ svntest.main.file_write(bar_path,
+ '/* Someday there will be code here. */\n')
+
+ # Perform the add with the --force flag, targeting the root of the WC.
+ ### Note: You have to be inside the working copy or else Subversion
+ ### will think you're trying to add the working copy to its parent
+ ### directory, and will (possibly, if the parent directory isn't
+ ### versioned) fail -- see also schedule_tests.py 11 "'svn add'
+ ### should traverse already-versioned dirs"
+ saved_wd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ # This was causing a segfault at one point.
+ svntest.main.run_svn(None, 'add', '.', '--force')
+ os.chdir(saved_wd)
+
+ # Check the resulting autoprops.
+ svntest.actions.run_and_verify_svn('native\n', [],
+ 'pg', 'svn:eol-style', foo_path)
+ svntest.actions.run_and_verify_svn('CR\n', [],
+ 'pg', 'svn:eol-style', bar_path)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ autoprops_add_no_none,
+ autoprops_add_yes_none,
+ autoprops_add_no_yes,
+ autoprops_add_yes_yes,
+ autoprops_add_no_no,
+ autoprops_add_yes_no,
+ autoprops_imp_no_none,
+ autoprops_imp_yes_none,
+ autoprops_imp_no_yes,
+ autoprops_imp_yes_yes,
+ autoprops_imp_no_no,
+ autoprops_imp_yes_no,
+ autoprops_add_dir,
+ autoprops_imp_dir,
+ fail_add_mixed_eol_style,
+ svn_prop_inheritable_autoprops_add_no_none,
+ svn_prop_inheritable_autoprops_add_yes_none,
+ svn_prop_inheritable_autoprops_add_no_yes,
+ svn_prop_inheritable_autoprops_add_yes_yes,
+ svn_prop_inheritable_autoprops_add_no_no,
+ svn_prop_inheritable_autoprops_add_yes_no,
+ svn_prop_inheritable_autoprops_import_no_none,
+ svn_prop_inheritable_autoprops_imp_yes_none,
+ svn_prop_inheritable_autoprops_imp_no_yes,
+ svn_prop_inheritable_autoprops_imp_yes_yes,
+ svn_prop_inheritable_autoprops_imp_no_no,
+ svn_prop_inheritable_autoprops_imp_yes_no,
+ svn_prop_inheritable_autoprops_add_versioned_target,
+ svn_prop_inheritable_autoprops_propset_file_target,
+ svn_prop_inheritable_autoprops_unversioned_subtrees_versioned_target,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/basic_tests.py b/subversion/tests/cmdline/basic_tests.py
new file mode 100755
index 0000000..4b0b8d5
--- /dev/null
+++ b/subversion/tests/cmdline/basic_tests.py
@@ -0,0 +1,3281 @@
+#!/usr/bin/env python
+#
+# basic_tests.py: testing working-copy interactions with ra_local
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, stat, re, os, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+
+# Generic UUID-matching regular expression
+uuid_regex = re.compile(r"[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}")
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+
+def basic_checkout(sbox):
+ "basic checkout of a wc"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Checkout of a different URL into a working copy fails
+ A_url = sbox.repo_url + '/A'
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ # "Obstructed update",
+ 'co', A_url,
+ wc_dir)
+
+ # Make some changes to the working copy
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ lambda_path = sbox.ospath('A/B/lambda')
+ os.remove(lambda_path)
+ G_path = sbox.ospath('A/D/G')
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', G_path)
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.tweak('A/mu', status='M ')
+ expected_output.tweak('A/B/lambda', status='! ')
+ expected_output.tweak('A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau', status='D ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Repeat checkout of original URL into working copy with modifications
+ url = sbox.repo_url
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', url,
+ wc_dir)
+
+ # lambda is restored, modifications remain, deletes remain scheduled
+ # for deletion although files are restored to the filesystem
+ expected_output.tweak('A/B/lambda', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+#----------------------------------------------------------------------
+
+def basic_status(sbox):
+ "basic status command"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Created expected output tree for 'svn status'
+ output = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_status(wc_dir, output)
+
+ os.chdir(sbox.ospath('A'))
+ output = svntest.actions.get_virginal_state("..", 1)
+ svntest.actions.run_and_verify_status("..", output)
+
+#----------------------------------------------------------------------
+
+def basic_commit(sbox):
+ "basic commit command"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a couple of local mods to files
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+def basic_update(sbox):
+ "basic update command"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/mu' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='U '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+ expected_disk.tweak('A/D/G/rho',
+ contents=expected_disk.desc['A/D/G/rho'].contents
+ + 'new appended text for rho')
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Unversioned paths, those that are not immediate children of a versioned
+ # path, are skipped and do raise an error if they are the only targets
+ xx_path = sbox.ospath('xx/xx')
+ expected_err = "svn: E155007: "
+ svntest.actions.run_and_verify_svn(
+ ["Skipped '"+xx_path+"'\n", ],
+ expected_err,
+ 'update', xx_path)
+ svntest.actions.run_and_verify_svn(
+ [], expected_err,
+ 'update', '--quiet', xx_path)
+
+ # Unversioned paths, that are not the only targets of the command are
+ # skipped without an error
+ svntest.actions.run_and_verify_svn(
+ ["Updating '"+mu_path+"':\n",
+ "At revision 2.\n",
+ "Skipped '"+xx_path+"'\n",
+ "Summary of updates:\n",
+ " Updated '"+mu_path+"' to r2.\n"
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1),
+ [], 'update', mu_path, xx_path)
+ svntest.actions.run_and_verify_svn(
+ [], [], 'update', '--quiet', mu_path, xx_path)
+
+#----------------------------------------------------------------------
+def basic_mkdir_url(sbox):
+ "basic mkdir URL"
+
+ sbox.build()
+
+ Y_url = sbox.repo_url + '/Y'
+ Y_Z_url = sbox.repo_url + '/Y/Z'
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ 'mkdir', '-m', 'log_msg', Y_url, Y_Z_url)
+
+ expected_output = wc.State(sbox.wc_dir, {
+ 'Y' : Item(status='A '),
+ 'Y/Z' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'Y' : Item(),
+ 'Y/Z' : Item()
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 2)
+ expected_status.add({
+ 'Y' : Item(status=' ', wc_rev=2),
+ 'Y/Z' : Item(status=' ', wc_rev=2)
+ })
+
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+def basic_mkdir_url_with_parents(sbox):
+ "basic mkdir URL, including parent directories"
+
+ sbox.build()
+
+ X_url = sbox.repo_url + '/X'
+ X_Y_Z_url = sbox.repo_url + '/X/Y/Z'
+ X_Y_Z2_url = sbox.repo_url + '/X/Y/Z2'
+ X_T_C_url = sbox.repo_url + '/X/T/C'
+ U_url = sbox.repo_url + '/U'
+ U_V_url = sbox.repo_url + '/U/V'
+ U_V_W_url = sbox.repo_url + '/U/V/W'
+ svntest.actions.run_and_verify_svn(None,
+ ".*Try 'svn mkdir --parents' instead.*",
+ 'mkdir', '-m', 'log_msg',
+ X_Y_Z_url, X_Y_Z2_url, X_T_C_url, U_V_W_url)
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ 'mkdir', '-m', 'log_msg',
+ X_url, U_url)
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 3.\n"], [],
+ 'mkdir', '-m', 'log_msg', '--parents',
+ X_Y_Z_url, X_Y_Z2_url, X_T_C_url, U_V_W_url)
+
+ expected_output = wc.State(sbox.wc_dir, {
+ 'X' : Item(status='A '),
+ 'X/Y' : Item(status='A '),
+ 'X/Y/Z' : Item(status='A '),
+ 'X/Y/Z2' : Item(status='A '),
+ 'X/T' : Item(status='A '),
+ 'X/T/C' : Item(status='A '),
+ 'U' : Item(status='A '),
+ 'U/V' : Item(status='A '),
+ 'U/V/W' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/Y' : Item(),
+ 'X/Y/Z' : Item(),
+ 'X/Y/Z2' : Item(),
+ 'X/T' : Item(),
+ 'X/T/C' : Item(),
+ 'U' : Item(),
+ 'U/V' : Item(),
+ 'U/V/W' : Item(),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 3)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=3),
+ 'X/Y' : Item(status=' ', wc_rev=3),
+ 'X/Y/Z' : Item(status=' ', wc_rev=3),
+ 'X/Y/Z2' : Item(status=' ', wc_rev=3),
+ 'X/T' : Item(status=' ', wc_rev=3),
+ 'X/T/C' : Item(status=' ', wc_rev=3),
+ 'U' : Item(status=' ', wc_rev=3),
+ 'U/V' : Item(status=' ', wc_rev=3),
+ 'U/V/W' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+def basic_mkdir_wc_with_parents(sbox):
+ "basic mkdir, including parent directories"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ Y_Z_path = sbox.ospath('Y/Z')
+
+ svntest.actions.run_and_verify_svn([],
+ ".*Try 'svn mkdir --parents' instead.*",
+ 'mkdir', Y_Z_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '--parents', Y_Z_path)
+
+ # Verify the WC status, because there was a regression in which parts of
+ # the WC were left locked.
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'Y' : Item(status='A ', wc_rev=0),
+ 'Y/Z' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+def basic_commit_corruption(sbox):
+ "basic corruption detection on commit"
+
+ ## I always wanted a test named "basic_corruption". :-)
+ ## Here's how it works:
+ ##
+ ## 1. Make a working copy at rev 1, duplicate it. Now we have
+ ## two working copies at rev 1. Call them first and second.
+ ## 2. Make a local mod to `first/A/mu'.
+ ## 3. Intentionally corrupt `first/A/.svn/text-base/mu.svn-base'.
+ ## 4. Try to commit, expect a failure.
+ ## 5. Repair the text-base, commit again, expect success.
+ ##
+ ## Here we go...
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a local mod to mu
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ # Modify mu's text-base, so we get a checksum failure the first time
+ # we try to commit.
+ mu_tb_path = svntest.wc.text_base_path(mu_path)
+ tb_dir_path = os.path.dirname(mu_tb_path)
+ mu_saved_tb_path = mu_tb_path + "-saved"
+ tb_dir_saved_mode = os.stat(tb_dir_path)[stat.ST_MODE]
+ mu_tb_saved_mode = os.stat(mu_tb_path)[stat.ST_MODE]
+ ### What's a more portable way to do this?
+ os.chmod(tb_dir_path, svntest.main.S_ALL_RWX)
+ os.chmod(mu_tb_path, svntest.main.S_ALL_RW)
+ shutil.copyfile(mu_tb_path, mu_saved_tb_path)
+ svntest.main.file_append(mu_tb_path, 'Aaagggkkk, corruption!')
+ os.chmod(tb_dir_path, tb_dir_saved_mode)
+ os.chmod(mu_tb_path, mu_tb_saved_mode)
+
+ # This commit should fail due to text base corruption.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None, # expected_status,
+ "svn: E200014: Checksum")
+
+ # Restore the uncorrupted text base.
+ os.chmod(tb_dir_path, svntest.main.S_ALL_RWX)
+ os.chmod(mu_tb_path, svntest.main.S_ALL_RW)
+ os.remove(mu_tb_path)
+ os.rename(mu_saved_tb_path, mu_tb_path)
+ os.chmod(tb_dir_path, tb_dir_saved_mode)
+ os.chmod(mu_tb_path, mu_tb_saved_mode)
+
+ # This commit should succeed.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+def basic_update_corruption(sbox):
+ "basic corruption detection on update"
+
+ ## I always wanted a test named "basic_corruption". :-)
+ ## Here's how it works:
+ ##
+ ## 1. Make a working copy at rev 1, duplicate it. Now we have
+ ## two working copies at rev 1. Call them first and second.
+ ## 2. Make a local mod to `first/A/mu'.
+ ## 3. Repair the text-base, commit again, expect success.
+ ## 4. Intentionally corrupt `second/A/.svn/text-base/mu.svn-base'.
+ ## 5. Try to update `second', expect failure.
+ ## 6. Repair the text-base, update again, expect success.
+ ##
+ ## Here we go...
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make the "other" working copy
+ other_wc = sbox.add_wc_path('other')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', sbox.repo_url, other_wc)
+
+ # Make a local mod to mu
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ # This commit should succeed.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the other_wc.
+ expected_output = wc.State(other_wc, {
+ 'A/mu' : Item(status='U '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(other_wc, 2)
+
+ # Modify mu's text-base, so we get a checksum failure the first time
+ # we try to update.
+ other_mu_path = os.path.join(other_wc, 'A', 'mu')
+ mu_tb_path = svntest.wc.text_base_path(other_mu_path)
+ tb_dir_path = os.path.dirname(mu_tb_path)
+ mu_saved_tb_path = mu_tb_path + "-saved"
+ tb_dir_saved_mode = os.stat(tb_dir_path)[stat.ST_MODE]
+ mu_tb_saved_mode = os.stat(mu_tb_path)[stat.ST_MODE]
+ os.chmod(tb_dir_path, svntest.main.S_ALL_RWX)
+ os.chmod(mu_tb_path, svntest.main.S_ALL_RW)
+ shutil.copyfile(mu_tb_path, mu_saved_tb_path)
+ svntest.main.file_append(mu_tb_path, 'Aiyeeeee, corruption!\nHelp!\n')
+ os.chmod(tb_dir_path, tb_dir_saved_mode)
+ os.chmod(mu_tb_path, mu_tb_saved_mode)
+
+ # Do the update and check the results in four ways.
+ fail_output = wc.State(other_wc, {
+ })
+ fail_status = svntest.actions.get_virginal_state(other_wc, 1)
+ fail_status.tweak('A', '', status='! ', wc_rev=2)
+ svntest.actions.run_and_verify_update(other_wc,
+ fail_output,
+ expected_disk,
+ fail_status,
+ "svn: E155017: Checksum")
+
+ # Restore the uncorrupted text base.
+ os.chmod(tb_dir_path, svntest.main.S_ALL_RWX)
+ os.chmod(mu_tb_path, svntest.main.S_ALL_RW)
+ os.remove(mu_tb_path)
+ os.rename(mu_saved_tb_path, mu_tb_path)
+ os.chmod(tb_dir_path, tb_dir_saved_mode)
+ os.chmod(mu_tb_path, mu_tb_saved_mode)
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(other_wc, 2)
+
+ # This update should succeed. (Actually, I'm kind of astonished
+ # that this works without even an intervening "svn cleanup".)
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+
+ svntest.actions.run_and_verify_update(other_wc,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+def basic_merging_update(sbox):
+ "receiving text merges as part of an update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # First change the greek tree to make two files 10 lines long
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ mu_text = ""
+ rho_text = ""
+ for x in range(2,11):
+ mu_text = mu_text + '\nThis is line ' + repr(x) + ' in mu'
+ rho_text = rho_text + '\nThis is line ' + repr(x) + ' in rho'
+ svntest.main.file_append(mu_path, mu_text)
+ svntest.main.file_append(rho_path, rho_text)
+
+ # Create expected output tree for initial commit
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ # Initial commit.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir)
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files
+ svntest.main.file_append(mu_path, ' Appended to line 10 of mu')
+ svntest.main.file_append(rho_path, ' Appended to line 10 of rho')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 3.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=3)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir)
+
+ # Make local mods to wc_backup by recreating mu and rho
+ mu_path_backup = os.path.join(wc_backup, 'A', 'mu')
+ rho_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'rho')
+
+ # open in 'truncate to zero then write" mode
+ backup_mu_text = 'This is the new line 1 in the backup copy of mu'
+ for x in range(2,11):
+ backup_mu_text = backup_mu_text + '\nThis is line ' + repr(x) + ' in mu'
+ svntest.main.file_write(mu_path_backup, backup_mu_text, 'w+')
+
+ backup_rho_text = 'This is the new line 1 in the backup copy of rho'
+ for x in range(2,11):
+ backup_rho_text = backup_rho_text + '\nThis is line ' + repr(x) + ' in rho'
+ svntest.main.file_write(rho_path_backup, backup_rho_text, 'w+')
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/mu' : Item(status='G '),
+ 'A/D/G/rho' : Item(status='G '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=backup_mu_text + ' Appended to line 10 of mu')
+ expected_disk.tweak('A/D/G/rho',
+ contents=backup_rho_text + ' Appended to line 10 of rho')
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 3)
+ expected_status.tweak('A/mu', 'A/D/G/rho', status='M ')
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+
+def basic_conflict(sbox):
+ "basic conflict creation and resolution"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files which will be committed
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(mu_path, 'Original appended text for mu\n')
+ svntest.main.file_append(rho_path, 'Original appended text for rho\n')
+
+ # Make a couple of local mods to files which will be conflicted
+ mu_path_backup = os.path.join(wc_backup, 'A', 'mu')
+ rho_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(mu_path_backup,
+ 'Conflicting appended text for mu\n')
+ svntest.main.file_append(rho_path_backup,
+ 'Conflicting appended text for rho\n')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/mu' : Item(status='C '),
+ 'A/D/G/rho' : Item(status='C '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents="\n".join(["This is the file 'mu'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for mu",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for mu",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/D/G/rho',
+ contents="\n".join(["This is the file 'rho'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for rho",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for rho",
+ ">>>>>>> .r2",
+ ""]))
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, '2')
+ expected_status.tweak('A/mu', 'A/D/G/rho', status='C ')
+
+ # "Extra" files that we expect to result from the conflicts.
+ # These are expressed as list of regexps. What a cool system! :-)
+ extra_files = ['mu.*\.r1', 'mu.*\.r2', 'mu.*\.mine',
+ 'rho.*\.r1', 'rho.*\.r2', 'rho.*\.mine',]
+
+ # Do the update and check the results in three ways.
+ # All "extra" files are passed to detect_conflict_files().
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+ # verify that the extra_files list is now empty.
+ if len(extra_files) != 0:
+ # Because we want to be a well-behaved test, we silently raise if
+ # the test fails. However, these two print statements would
+ # probably reveal the cause for the failure, if they were
+ # uncommented:
+ #
+ # logger.warn("Not all extra reject files have been accounted for:")
+ # logger.warn(extra_files)
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+
+ # So now mu and rho are both in a "conflicted" state. Run 'svn
+ # resolved' on them.
+
+ svntest.actions.run_and_verify_resolved([mu_path_backup, rho_path_backup])
+
+ # See if they've changed back to plain old 'M' state.
+ expected_status.tweak('A/mu', 'A/D/G/rho', status='M ')
+
+ # There should be *no* extra backup files lying around the working
+ # copy after resolving the conflict; thus we're not passing a custom
+ # singleton handler.
+ svntest.actions.run_and_verify_status(wc_backup, expected_status)
+
+
+#----------------------------------------------------------------------
+
+def basic_cleanup(sbox):
+ "basic cleanup command"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Lock some directories.
+ B_path = sbox.ospath('A/B')
+ G_path = sbox.ospath('A/D/G')
+ C_path = sbox.ospath('A/C')
+ svntest.actions.lock_admin_dir(B_path)
+ svntest.actions.lock_admin_dir(G_path)
+ svntest.actions.lock_admin_dir(C_path)
+
+ # Verify locked status.
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.tweak('A/B', 'A/D/G', 'A/C', locked='L')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # corrupted/non-existing temporary directory should be restored while
+ # we are not at single-db (where this tmp dir will be gone)
+ tmp_path = os.path.join(B_path, svntest.main.get_admin_name(), 'tmp')
+ if os.path.exists(tmp_path):
+ svntest.main.safe_rmtree(tmp_path)
+
+ # Run cleanup (### todo: cleanup doesn't currently print anything)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cleanup', wc_dir)
+
+ # Verify unlocked status.
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+
+#----------------------------------------------------------------------
+
+def basic_revert(sbox):
+ "basic revert command"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Modify some files and props.
+ beta_path = sbox.ospath('A/B/E/beta')
+ gamma_path = sbox.ospath('A/D/gamma')
+ iota_path = sbox.ospath('iota')
+ rho_path = sbox.ospath('A/D/G/rho')
+ zeta_path = sbox.ospath('A/D/H/zeta')
+ svntest.main.file_append(beta_path, "Added some text to 'beta'.\n")
+ svntest.main.file_append(iota_path, "Added some text to 'iota'.\n")
+ svntest.main.file_append(rho_path, "Added some text to 'rho'.\n")
+ svntest.main.file_append(zeta_path, "Added some text to 'zeta'.\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', zeta_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ gamma_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ iota_path)
+
+ # Verify modified status.
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.tweak('A/B/E/beta', 'A/D/G/rho', status='M ')
+ expected_output.tweak('iota', status='MM')
+ expected_output.tweak('A/D/gamma', status=' M')
+ expected_output.add({
+ 'A/D/H/zeta' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Run revert (### todo: revert doesn't currently print anything)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', beta_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', gamma_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', iota_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', rho_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', zeta_path)
+
+ # Verify unmodified status.
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Now, really make sure the contents are back to their original state.
+ fp = open(beta_path, 'r')
+ lines = fp.readlines()
+ if not ((len (lines) == 1) and (lines[0] == "This is the file 'beta'.\n")):
+ logger.warn("Revert failed to restore original text.")
+ raise svntest.Failure
+ fp = open(iota_path, 'r')
+ lines = fp.readlines()
+ if not ((len (lines) == 1) and (lines[0] == "This is the file 'iota'.\n")):
+ logger.warn("Revert failed to restore original text.")
+ raise svntest.Failure
+ fp = open(rho_path, 'r')
+ lines = fp.readlines()
+ if not ((len (lines) == 1) and (lines[0] == "This is the file 'rho'.\n")):
+ logger.warn("Revert failed to restore original text.")
+ raise svntest.Failure
+ fp = open(zeta_path, 'r')
+ lines = fp.readlines()
+ if not ((len (lines) == 1) and (lines[0] == "Added some text to 'zeta'.\n")):
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+
+ # Finally, check that reverted file is not readonly
+ os.remove(beta_path)
+ svntest.actions.run_and_verify_svn(None, [], 'revert', beta_path)
+ if not (open(beta_path, 'r+')):
+ raise svntest.Failure
+
+ # Check that a directory scheduled to be added, but physically
+ # removed, can be reverted.
+ X_path = sbox.ospath('X')
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', X_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.main.safe_rmtree(X_path)
+
+ svntest.actions.run_and_verify_svn(None, [], 'revert', X_path)
+
+ expected_status.remove('X')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Check that a directory scheduled for deletion, but physically
+ # removed, can be reverted.
+ E_path = sbox.ospath('A/B/E')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ ### Most of the rest of this test is ineffective, due to the
+ ### problems described in issue #1611.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', E_path)
+ svntest.main.safe_rmtree(E_path)
+ expected_status.tweak('A/B/E', status='D ')
+ expected_status.tweak('A/B/E', wc_rev='?')
+ ### FIXME: A weakness in the test framework, described in detail
+ ### in issue #1611, prevents us from checking via status. Grr.
+ #
+ # svntest.actions.run_and_verify_status(wc_dir, expected_status,
+ # None, None, None, None)
+ #
+ #
+ ### If you were to uncomment the above, you'd get an error like so:
+ #
+ # =============================================================
+ # Expected E and actual E are different!
+ # =============================================================
+ # EXPECTED NODE TO BE:
+ # =============================================================
+ # * Node name: E
+ # Path: working_copies/basic_tests-10/A/B/E
+ # Contents: None
+ # Properties: {}
+ # Attributes: {'status': 'D ', 'wc_rev': '?'}
+ # Children: 2
+ # =============================================================
+ # ACTUAL NODE FOUND:
+ # =============================================================
+ # * Node name: E
+ # Path: working_copies/basic_tests-10/A/B/E
+ # Contents: None
+ # Properties: {}
+ # Attributes: {'status': 'D ', 'wc_rev': '?'}
+ # Children: is a file.
+ # Unequal Types: one Node is a file, the other is a directory
+
+ # This will actually print
+ #
+ # "Failed to revert 'working_copies/basic_tests-10/A/B/E' -- \
+ # try updating instead."
+ #
+ # ...but due to test suite lossage, it'll still look like success.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', E_path)
+
+ ### FIXME: Again, the problem described in issue #1611 bites us here.
+ #
+ # expected_status.tweak('A/B/E', status=' ')
+ # svntest.actions.run_and_verify_status(wc_dir, expected_status,
+ # None, None, None, None)
+
+
+#----------------------------------------------------------------------
+
+def basic_switch(sbox):
+ "basic switch command"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ ### Switch the file `iota' to `A/D/gamma'.
+
+ # Construct some paths for convenience
+ iota_path = sbox.ospath('iota')
+ gamma_url = sbox.repo_url + '/A/D/gamma'
+
+ # Create expected output tree
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ })
+
+ # Create expected disk tree (iota will have gamma's contents)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents=expected_disk.desc['A/D/gamma'].contents)
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', switched='S')
+
+ # First, try the switch without the --ignore-ancestry flag,
+ # expecting failure.
+ expected_error = "svn: E195012: .*no common ancestry.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'switch', gamma_url, iota_path)
+
+ # Now ignore ancestry so we can ge through this switch.
+ svntest.actions.run_and_verify_switch(wc_dir, iota_path, gamma_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+ ### Switch the directory `A/D/H' to `A/D/G'.
+
+ # Construct some paths for convenience
+ ADH_path = sbox.ospath('A/D/H')
+ chi_path = os.path.join(ADH_path, 'chi')
+ omega_path = os.path.join(ADH_path, 'omega')
+ psi_path = os.path.join(ADH_path, 'psi')
+ pi_path = os.path.join(ADH_path, 'pi')
+ tau_path = os.path.join(ADH_path, 'tau')
+ rho_path = os.path.join(ADH_path, 'rho')
+ ADG_url = sbox.repo_url + '/A/D/G'
+
+ # Create expected output tree
+ expected_output = wc.State(wc_dir, {
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ 'A/D/H/pi' : Item(status='A '),
+ 'A/D/H/rho' : Item(status='A '),
+ 'A/D/H/tau' : Item(status='A '),
+ })
+
+ # Create expected disk tree (iota will have gamma's contents,
+ # A/D/H/* will look like A/D/G/*)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents=expected_disk.desc['A/D/gamma'].contents)
+ expected_disk.remove('A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+ expected_disk.add({
+ 'A/D/H/pi' : Item("This is the file 'pi'.\n"),
+ 'A/D/H/rho' : Item("This is the file 'rho'.\n"),
+ 'A/D/H/tau' : Item("This is the file 'tau'.\n"),
+ })
+
+ # Create expected status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi')
+ expected_status.add({
+ 'A/D/H/pi' : Item(status=' ', wc_rev=1),
+ 'A/D/H/rho' : Item(status=' ', wc_rev=1),
+ 'A/D/H/tau' : Item(status=' ', wc_rev=1),
+ })
+ expected_status.tweak('iota', 'A/D/H', switched='S')
+
+ # First, try the switch without the --ignore-ancestry flag,
+ # expecting failure.
+ expected_error = "svn: E195012: .*no common ancestry.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'switch', ADG_url, ADH_path)
+
+ # Do the switch and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, ADH_path, ADG_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+
+def verify_file_deleted(message, path):
+ try:
+ open(path, 'r')
+ except IOError:
+ return
+ if message is not None:
+ logger.warn(message)
+ ###TODO We should raise a less generic error here. which?
+ raise svntest.Failure
+
+def verify_dir_deleted(path):
+ if not os.path.isdir(path):
+ return 0
+
+ return 1
+
+@Issue(687,4074)
+def basic_delete(sbox):
+ "basic delete command"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Copies of unmodified
+ sbox.simple_copy('iota', 'iota-copied')
+ sbox.simple_copy('A/B/F', 'F-copied')
+
+ # modify text of chi
+ chi_parent_path = sbox.ospath('A/D/H')
+ chi_path = os.path.join(chi_parent_path, 'chi')
+ svntest.main.file_append(chi_path, 'added to chi')
+
+ # modify props of rho (file)
+ rho_parent_path = sbox.ospath('A/D/G')
+ rho_path = os.path.join(rho_parent_path, 'rho')
+ svntest.main.run_svn(None, 'ps', 'abc', 'def', rho_path)
+
+ # modify props of F (dir)
+ F_parent_path = sbox.ospath('A/B')
+ F_path = os.path.join(F_parent_path, 'F')
+ svntest.main.run_svn(None, 'ps', 'abc', 'def', F_path)
+
+ # unversioned file
+ sigma_parent_path = sbox.ospath('A/C')
+ sigma_path = os.path.join(sigma_parent_path, 'sigma')
+ svntest.main.file_append(sigma_path, 'unversioned sigma')
+
+ # unversioned directory
+ Q_parent_path = sigma_parent_path
+ Q_path = os.path.join(Q_parent_path, 'Q')
+ os.mkdir(Q_path)
+
+ # added directory hierarchies
+ X_parent_path = sbox.ospath('A/B')
+ X_path = os.path.join(X_parent_path, 'X')
+ svntest.main.run_svn(None, 'mkdir', X_path)
+ X_child_path = os.path.join(X_path, 'xi')
+ svntest.main.file_append(X_child_path, 'added xi')
+ svntest.main.run_svn(None, 'add', X_child_path)
+ Y_parent_path = sbox.ospath('A/D')
+ Y_path = os.path.join(Y_parent_path, 'Y')
+ svntest.main.run_svn(None, 'mkdir', Y_path)
+
+ # check status
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.tweak('A/D/H/chi', status='M ')
+ expected_output.tweak('A/D/G/rho', 'A/B/F', status=' M')
+# expected_output.tweak('A/C/sigma', status='? ')
+ expected_output.add({
+ 'A/B/X' : Item(status='A ', wc_rev='-'),
+ 'A/B/X/xi' : Item(status='A ', wc_rev='-'),
+ 'A/D/Y' : Item(status='A ', wc_rev='-'),
+ 'F-copied' : Item(status='A ', copied='+', wc_rev='-'),
+ 'iota-copied' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # 'svn rm' that should fail
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', chi_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', chi_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', rho_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', rho_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', F_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', F_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', sigma_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', sigma_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', X_path)
+
+ # check status has not changed
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # 'svn rm' that should work
+ E_path = sbox.ospath('A/B/E')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', E_path)
+
+ # 'svn rm --force' that should work
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force',
+ chi_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', rho_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', F_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', sigma_parent_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', X_path)
+
+ # Deleting an unchanged copy shouldn't error.
+ sbox.simple_mkdir('Z-added')
+ svntest.main.run_svn(None, 'rm', sbox.ospath('iota-copied'),
+ sbox.ospath('F-copied'),
+ sbox.ospath('Z-added'))
+
+ # Deleting already removed from wc versioned item with --force
+ iota_path = sbox.ospath('iota')
+ os.remove(iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', iota_path)
+
+ # and without --force
+ gamma_path = sbox.ospath('A/D/gamma')
+ os.remove(gamma_path)
+ svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path)
+
+ # Deleting already scheduled for deletion doesn't require --force
+ svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path)
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', E_path)
+
+ # check status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ 'A/D/G',
+ 'A/D/G/rho',
+ 'A/D/G/pi',
+ 'A/D/G/tau',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/B/F',
+ 'A/C',
+ 'iota',
+ 'A/D/gamma', status='D ')
+ expected_status.add({
+ 'A/D/Y' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # issue 687 delete directory with uncommitted directory child
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', Y_parent_path)
+
+ expected_status.tweak('A/D', status='D ')
+ expected_status.remove('A/D/Y')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # check files have been removed
+ verify_file_deleted("Failed to remove text modified file", rho_path)
+ verify_file_deleted("Failed to remove prop modified file", chi_path)
+ verify_file_deleted("Failed to remove unversioned file", sigma_path)
+ verify_file_deleted("Failed to remove unmodified file",
+ os.path.join(E_path, 'alpha'))
+
+ # check versioned dir is not removed
+ if not verify_dir_deleted(F_path):
+ # If we are not running in single-db, this is an error
+ if os.path.isdir(os.path.join(F_path, '../' + svntest.main.get_admin_name())):
+ raise svntest.Failure("Removed administrative area")
+
+ # check unversioned and added dirs has been removed
+ if verify_dir_deleted(Q_path):
+ logger.warn("Failed to remove unversioned dir")
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+ if verify_dir_deleted(X_path):
+ logger.warn("Failed to remove added dir")
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+
+ # Deleting unversioned file explicitly
+ foo_path = sbox.ospath('foo')
+ svntest.main.file_append(foo_path, 'unversioned foo')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', foo_path)
+ verify_file_deleted("Failed to remove unversioned file foo", foo_path)
+
+ # At one stage deleting a URL dumped core
+ iota_URL = sbox.repo_url + '/iota'
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ 'rm', '-m', 'delete iota URL',
+ iota_URL)
+
+ # Issue 4074, deleting a root url SEGV.
+ expected_error = 'svn: E170000: .*not within a repository'
+ svntest.actions.run_and_verify_svn([], expected_error,
+ 'rm', sbox.repo_url,
+ '--message', 'delete root')
+
+#----------------------------------------------------------------------
+
+def basic_checkout_deleted(sbox):
+ "checkout a path no longer in HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete A/D and commit.
+ D_path = sbox.ospath('A/D')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', D_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega',
+ 'A/D/gamma')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Now try to checkout revision 1 of A/D.
+ url = sbox.repo_url + '/A/D'
+ wc2 = sbox.ospath('new_D')
+ svntest.actions.run_and_verify_svn(None, [], 'co', '-r', '1',
+ url + "@1", wc2)
+
+#----------------------------------------------------------------------
+
+# Issue 846, changing a deleted file to an added directory was not
+# supported before WC-NG. But we can handle it.
+@Issue(846)
+def basic_node_kind_change(sbox):
+ "attempt to change node kind"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Schedule a file for deletion
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ # Status shows deleted file
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Try and fail to create a directory (file scheduled for deletion)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', gamma_path)
+
+ # Status is replaced
+ expected_status.tweak('A/D/gamma', status='R ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Commit file deletion
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Replacing'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status=' ', wc_rev='2')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Try and fail to create a directory (file deleted)
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'mkdir', gamma_path)
+
+ # Status is unchanged
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Update to finally get rid of file
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # mkdir should succeed
+ svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', gamma_path)
+
+ expected_status.tweak(wc_rev=2)
+ expected_status.add({
+ 'A/D/gamma' : Item(status='R ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def basic_import(sbox):
+ "basic import of single new file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create a new directory with files of various permissions
+ new_path = sbox.ospath('new_file')
+
+ svntest.main.file_append(new_path, "some text")
+
+ # import new files into repository
+ url = sbox.repo_url + "/dirA/dirB/new_file"
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'import',
+ '-m', 'Log message for new import', new_path, url)
+
+ lastline = output.pop().strip()
+ cm = re.compile("(Committed|Imported) revision [0-9]+.")
+ match = cm.search(lastline)
+ if not match:
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+
+ # remove (uncontrolled) local file
+ os.remove(new_path)
+
+ # Create expected disk tree for the update (disregarding props)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'dirA/dirB/new_file' : Item('some text'),
+ })
+
+ # Create expected status tree for the update (disregarding props).
+ # Newly imported file should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'dirA' : Item(status=' ', wc_rev=2),
+ 'dirA/dirB' : Item(status=' ', wc_rev=2),
+ 'dirA/dirB/new_file' : Item(status=' ', wc_rev=2),
+ })
+
+ # Create expected output tree for the update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'dirA' : Item(status='A '),
+ 'dirA/dirB' : Item(status='A '),
+ 'dirA/dirB/new_file' : Item(status='A '),
+ })
+
+ # do update and check three ways
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True)
+
+#----------------------------------------------------------------------
+
+def basic_cat(sbox):
+ "basic cat of files"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+
+ # Get repository text even if wc is modified
+ svntest.main.file_append(mu_path, "some text")
+ svntest.actions.run_and_verify_svn(["This is the file 'mu'.\n"],
+ [], 'cat',
+ ###TODO is user/pass really necessary?
+ mu_path)
+
+
+#----------------------------------------------------------------------
+
+def basic_ls(sbox):
+ 'basic ls'
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Even on Windows, the output will use forward slashes, so that's
+ # what we expect below.
+
+ cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn(["A/\n", "iota\n"],
+ [], 'ls')
+ os.chdir(cwd)
+
+ svntest.actions.run_and_verify_svn(['A/\n', 'iota\n'],
+ [], 'ls',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'],
+ [], 'ls',
+ sbox.ospath('A'))
+
+ svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'],
+ [], 'ls', '-r', 'BASE',
+ sbox.ospath('A'))
+
+ svntest.actions.run_and_verify_svn(['mu\n'],
+ [], 'ls',
+ sbox.ospath('A/mu'))
+
+ svntest.actions.run_and_verify_svn(['E/\n', 'E/alpha\n', 'E/beta\n', 'F/\n',
+ 'lambda\n' ], [], 'ls', '-R',
+ sbox.ospath('A/B'))
+
+
+#----------------------------------------------------------------------
+def nonexistent_repository(sbox):
+ "'svn log file:///nonexistent_path' should fail"
+
+ # The bug was that
+ #
+ # $ svn log file:///nonexistent_path
+ #
+ # would go into an infinite loop, instead of failing immediately as
+ # it should. The loop was because svn_ra_local__split_URL() used
+ # svn_path_split() to lop off components and look for a repository
+ # in each shorter path in turn, depending on svn_path_is_empty()
+ # to test if it had reached the end. Somewhere along the line we
+ # changed the path functions (perhaps revision 3113?), and
+ # svn_path_split() stopped cooperating with svn_path_is_empty() in
+ # this particular context -- svn_path_split() would reach "/",
+ # svn_path_is_empty() would correctly claim that "/" is not empty,
+ # the next svn_path_split() would return "/" again, and so on,
+ # forever.
+ #
+ # This bug was fixed in revision 3150, by checking for "/"
+ # explicitly in svn_ra_local__split_URL(). By the time you read
+ # this, that may or may not be the settled fix, however, so check
+ # the logs to see if anything happened later.
+ #
+ # Anyway: this test _always_ operates on a file:/// path. Note that
+ # if someone runs this test on a system with "/nonexistent_path" in
+ # the root directory, the test could fail, and that's just too bad :-).
+
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, svntest.verify.AnyOutput,
+ 'log', 'file:///nonexistent_path')
+
+ for line in errput:
+ if re.match(".*Unable to connect to a repository at URL.*", line):
+ return
+
+ # Else never matched the expected error output, so the test failed.
+ raise svntest.main.SVNUnmatchedError
+
+
+#----------------------------------------------------------------------
+# Issue 1064. This test is only useful if running over a non-local RA
+# with authentication enabled, otherwise it will pass trivially.
+@Issue(1064)
+def basic_auth_cache(sbox):
+ "basic auth caching"
+
+ sbox.build(create_wc = False, read_only = True)
+ wc_dir = sbox.wc_dir
+
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy without auth tokens
+ svntest.main.safe_rmtree(wc_dir)
+
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Failed with "not locked" error on missing directory
+ svntest.main.safe_rmtree(sbox.ospath('A/B/E'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'status', '-u',
+ sbox.ospath('A/B'))
+
+ # Failed with "already locked" error on new dir
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ repo_url + '/A/B/E',
+ sbox.ospath('A/D/G'))
+
+
+#----------------------------------------------------------------------
+def basic_add_ignores(sbox):
+ 'ignored files in added dirs should not be added'
+
+ # The bug was that
+ #
+ # $ svn add dir
+ #
+ # where dir contains some items that match the ignore list and some
+ # do not would add all items, ignored or not.
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ dir_path = sbox.ospath('dir')
+ foo_c_path = os.path.join(dir_path, 'foo.c')
+ foo_o_path = os.path.join(dir_path, 'foo.o')
+
+ os.mkdir(dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ open(foo_c_path, 'w')
+ open(foo_o_path, 'w')
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ 'add', dir_path)
+
+ for line in output:
+ # If we see foo.o in the add output, fail the test.
+ if re.match(r'^A\s+.*foo.o$', line):
+ raise svntest.verify.SVNUnexpectedOutput
+
+ # Else never matched the unwanted output, so the test passed.
+
+
+#----------------------------------------------------------------------
+@Issue(2243)
+def basic_add_local_ignores(sbox):
+ 'ignore files matching local ignores in added dirs'
+
+ #Issue #2243
+ #svn add command not keying off svn:ignore value
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ dir_path = sbox.ospath('dir')
+ file_path = os.path.join(dir_path, 'app.lock')
+
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'mkdir', dir_path)
+ svntest.main.run_svn(None, 'propset', 'svn:ignore', '*.lock', dir_path)
+ open(file_path, 'w')
+ svntest.actions.run_and_verify_svn([], [],
+ 'add', '--force', dir_path)
+
+#----------------------------------------------------------------------
+def basic_add_no_ignores(sbox):
+ 'add ignored files in added dirs'
+
+ # add ignored files using the '--no-ignore' option
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ dir_path = sbox.ospath('dir')
+ foo_c_path = os.path.join(dir_path, 'foo.c')
+ # add a few files that match the default ignore patterns
+ foo_o_path = os.path.join(dir_path, 'foo.o')
+ foo_lo_path = os.path.join(dir_path, 'foo.lo')
+ foo_rej_path = os.path.join(dir_path, 'foo.rej')
+
+ os.mkdir(dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ open(foo_c_path, 'w')
+ open(foo_o_path, 'w')
+ open(foo_lo_path, 'w')
+ open(foo_rej_path, 'w')
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ 'add', '--no-ignore', dir_path)
+
+ for line in output:
+ # If we don't see ignores in the add output, fail the test.
+ if not re.match(r'^A\s+.*(foo.(o|rej|lo|c)|dir)$', line):
+ raise svntest.verify.SVNUnexpectedOutput
+
+#----------------------------------------------------------------------
+def basic_add_parents(sbox):
+ 'test add --parents'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ X_path = sbox.ospath('X')
+ Y_path = os.path.join(X_path, 'Y')
+ Z_path = os.path.join(Y_path, 'Z')
+ zeta_path = os.path.join(Z_path, 'zeta')
+ omicron_path = os.path.join(Y_path, 'omicron')
+
+ # Create some unversioned directories
+ os.mkdir(X_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ os.mkdir(Y_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ os.mkdir(Z_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+
+ # Create new files
+ z = open(zeta_path, 'w')
+ z.write("This is the file 'zeta'.\n")
+ z.close()
+ o = open(omicron_path, 'w')
+ o.write("This is the file 'omicron'.\n")
+ o.close()
+
+ # Add the file, with it's parents
+ svntest.actions.run_and_verify_svn(None, [], 'add', '--parents',
+ zeta_path)
+
+ # Build expected state
+ expected_output = wc.State(wc_dir, {
+ 'X' : Item(verb='Adding'),
+ 'X/Y' : Item(verb='Adding'),
+ 'X/Y/Z' : Item(verb='Adding'),
+ 'X/Y/Z/zeta' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2),
+ 'X/Y' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit and verify
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', X_path, '--keep-local')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', '--parents', zeta_path)
+
+#----------------------------------------------------------------------
+def uri_syntax(sbox):
+ 'make sure URI syntaxes are parsed correctly'
+
+ sbox.build(create_wc = False, read_only = True)
+ local_dir = sbox.wc_dir
+
+ # Revision 6638 made 'svn co http://host' seg fault, this tests the fix.
+ url = sbox.repo_url
+ scheme = url[:url.find(":")]
+ url = scheme + "://some_nonexistent_host_with_no_trailing_slash"
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'co', url, local_dir)
+
+ # Different RA layers give different errors for failed checkouts;
+ # for us, it's only important to know that it _did_ error (as
+ # opposed to segfaulting), so we don't examine the error text.
+
+#----------------------------------------------------------------------
+def basic_checkout_file(sbox):
+ "trying to check out a file should fail"
+
+ sbox.build(read_only = True)
+
+ iota_url = sbox.repo_url + '/iota'
+
+ exit_code, output, errput = svntest.main.run_svn(1, 'co', iota_url)
+
+ for line in errput:
+ if line.find("refers to a file") != -1:
+ break
+ else:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def basic_info(sbox):
+ "basic info command"
+
+ def check_paths(lines, expected_paths):
+ "check that paths found on input lines beginning 'Path: ' are as expected"
+ paths = []
+ for line in lines:
+ if line.startswith('Path: '):
+ paths.append(line[6:].rstrip())
+ if paths != expected_paths:
+ logger.warn("Reported paths: %s" % paths)
+ logger.warn("Expected paths: %s" % expected_paths)
+ raise svntest.Failure
+
+ sbox.build(read_only = True)
+
+ os.chdir(sbox.wc_dir)
+
+ # Check that "info" works with 0, 1 and more than 1 explicit targets.
+ exit_code, output, errput = svntest.main.run_svn(None, 'info')
+ check_paths(output, ['.'])
+ exit_code, output, errput = svntest.main.run_svn(None, 'info', 'iota')
+ check_paths(output, ['iota'])
+ exit_code, output, errput = svntest.main.run_svn(None, 'info', 'iota', '.')
+ check_paths(output, ['iota', '.'])
+
+def repos_root(sbox):
+ "check that repos root gets set on checkout"
+
+ def check_repos_root(lines):
+ for line in lines:
+ if line == "Repository Root: " + sbox.repo_url + "\n":
+ break
+ else:
+ logger.warn("Bad or missing repository root")
+ raise svntest.Failure
+
+ sbox.build(read_only = True)
+
+ exit_code, output, errput = svntest.main.run_svn(None, "info",
+ sbox.wc_dir)
+ check_repos_root(output)
+
+ exit_code, output, errput = svntest.main.run_svn(None, "info",
+ os.path.join(sbox.wc_dir,
+ "A"))
+ check_repos_root(output)
+
+ exit_code, output, errput = svntest.main.run_svn(None, "info",
+ os.path.join(sbox.wc_dir,
+ "A", "B",
+ "lambda"))
+ check_repos_root(output)
+
+def basic_peg_revision(sbox):
+ "checks peg revision on filename with @ sign"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repos_dir = sbox.repo_url
+ filename = 'abc@abc'
+ wc_file = os.path.join(wc_dir, filename)
+ url = repos_dir + '/' + filename
+
+ svntest.main.file_append(wc_file, 'xyz\n')
+ # We need to escape the @ in the middle of abc@abc by appending another @
+ svntest.main.run_svn(None, 'add', wc_file + '@')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'secret log msg', wc_file + '@')
+
+ # Without the trailing "@", expect failure.
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ None, ".*Syntax error parsing peg revision 'abc'", 'cat', wc_file)
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ None, ".*Syntax error parsing peg revision 'abc'", 'cat', url)
+
+ # With the trailing "@", expect success.
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ ["xyz\n"], [], 'cat', wc_file + '@')
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ ["xyz\n"], [], 'cat', url + '@')
+
+ # Test with leading @ character in filename.
+ filename = '@abc'
+ wc_file = os.path.join(wc_dir, filename)
+ url = repos_dir + '/' + filename
+
+ svntest.main.file_append(wc_file, 'xyz\n')
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ None, [], 'add', wc_file + '@')
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ None, [], 'ci', '-m', 'secret log msg', wc_file + '@')
+
+ # With a leading "@" which isn't escaped, expect failure.
+ # Note that we just test with filename starting with '@', because
+ # wc_file + '@' + filename is a different situation where svn
+ # will try to parse filename as a peg revision.
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ None, ".*'%s' is just a peg revision.*" % filename,
+ 'cat', filename)
+
+ # With a leading "@" which is escaped, expect success.
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ ["xyz\n"], [], 'cat', wc_file + '@')
+ exit_code, output, errlines = svntest.actions.run_and_verify_svn(
+ ["xyz\n"], [], 'cat', repos_dir + '/' + filename + '@')
+
+def info_nonhead(sbox):
+ "info on file not existing in HEAD"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ fname = sbox.ospath('iota')
+ furl = repo_url + "/iota"
+
+ # Remove iota and commit.
+ svntest.actions.run_and_verify_svn(None, [],
+ "delete", fname)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove("iota")
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ # Get info for old iota at r1.
+ expected_infos = [
+ { 'URL' : '.*' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, furl + '@1', '-r1')
+
+
+#----------------------------------------------------------------------
+# Issue #2442.
+@Issue(2442)
+def ls_nonhead(sbox):
+ "ls a path no longer in HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete A/D/rho and commit.
+ G_path = sbox.ospath('A/D/G')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', G_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Check that we can list a file in A/D/G at revision 1.
+ rho_url = sbox.repo_url + "/A/D/G/rho"
+ svntest.actions.run_and_verify_svn('.* rho\n', [],
+ 'ls', '--verbose', rho_url + '@1')
+
+
+#----------------------------------------------------------------------
+# Issue #2315.
+@Issue(2315)
+def cat_added_PREV(sbox):
+ "cat added file using -rPREV"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ f_path = sbox.ospath('f')
+
+ # Create and add a file.
+ svntest.main.file_append(f_path, 'new text')
+ svntest.actions.run_and_verify_svn(None, [], 'add', f_path)
+
+ # Cat'ing the previous version should fail.
+ svntest.actions.run_and_verify_svn(None, ".*has no committed revision.*",
+ 'cat', '-rPREV', f_path)
+
+# Issue #2612.
+@Issue(2612)
+def ls_space_in_repo_name(sbox):
+ 'basic ls of repos with space in name'
+
+ sbox.build(name = "repo with spaces")
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(['A/\n', 'iota\n'],
+ [], 'ls',
+ sbox.repo_url)
+
+
+def delete_keep_local(sbox):
+ 'delete file and directory with --keep-local'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ C_path = sbox.ospath('A/C')
+
+ # Remove file iota
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local',
+ iota_path)
+
+ # Remove directory 'A/C'
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local',
+ C_path)
+
+ # Commit changes
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(verb='Deleting'),
+ 'A/C' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('iota')
+ expected_status.remove('A/C')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update working copy to check disk state still greek tree
+ expected_disk = svntest.main.greek_state.copy()
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev = 2)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+def delete_keep_local_twice(sbox):
+ 'delete file and directory with --keep-local twice'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ dir = sbox.ospath('dir')
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', dir)
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', dir)
+
+ if not os.path.isdir(dir):
+ logger.warn('Directory was really deleted')
+ raise svntest.Failure
+
+@XFail(svntest.main.is_mod_dav_url_quoting_broken)
+def special_paths_in_repos(sbox):
+ "use folders with names like 'c:hi'"
+
+ sbox.build(create_wc = False)
+ test_file_source = os.path.join(sbox.repo_dir, 'format')
+ repo_url = sbox.repo_url
+
+ test_urls = [ sbox.repo_url + '/c:hi',
+ sbox.repo_url + '/C:',
+ sbox.repo_url + '/C&',
+ sbox.repo_url + '/C<',
+ sbox.repo_url + '/C# hi',
+ sbox.repo_url + '/C?',
+ sbox.repo_url + '/C+',
+ sbox.repo_url + '/C%']
+
+ # On Windows Apache HTTPD breaks '\' for us :(
+ if not (svntest.main.is_os_windows() and
+ svntest.main.is_ra_type_dav()):
+ test_urls += [ sbox.repo_url + '/C\\ri' ]
+
+ for test_url in test_urls:
+ test_file_url = test_url + '/' + test_url[test_url.rindex('/')+1:]
+
+ # do some manipulations on a folder which problematic names
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ test_url)
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'log_msg',
+ 'put', test_file_source,
+ test_file_url)
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ 'propset', '-m', 'log_msg',
+ 'propname', 'propvalue', test_url)
+
+ svntest.actions.run_and_verify_svn('propvalue', [],
+ 'propget', 'propname', test_url)
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ 'propset', '-m', 'log_msg',
+ 'propname', 'propvalue', test_file_url)
+
+ svntest.actions.run_and_verify_svn('propvalue', [],
+ 'propget', 'propname', test_file_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', 'log_msg',
+ test_file_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', 'log_msg',
+ test_url)
+
+
+def basic_rm_urls_one_repo(sbox):
+ "remotely remove directories from one repository"
+
+ sbox.build()
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+
+ # Test 1: remotely delete one directory
+ E_url = repo_url + '/A/B/E'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', 'log_msg',
+ E_url)
+
+ # Create expected trees and update
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Test 2: remotely delete two directories in the same repository
+ F_url = repo_url + '/A/B/F'
+ C_url = repo_url + '/A/C'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', 'log_msg',
+ F_url, C_url)
+
+ # Create expected output tree for an update of wc_backup.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(status='D '),
+ 'A/C' : Item(status='D '),
+ })
+
+ # Create expected disk tree for the update
+ expected_disk.remove('A/B/F', 'A/C')
+
+ # Create expected status tree for the update.
+ expected_status.tweak(wc_rev = 3)
+ expected_status.remove('A/B/F', 'A/C')
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+# Test for issue #1199
+@Issue(1199)
+def basic_rm_urls_multi_repos(sbox):
+ "remotely remove directories from two repositories"
+
+ sbox.build()
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+
+ # create a second repository and working copy
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
+ other_wc_dir = sbox.add_wc_path("other")
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co",
+ other_repo_url,
+ other_wc_dir)
+
+ # Remotely delete two x two directories in the two repositories
+ F_url = repo_url + '/A/B/F'
+ C_url = repo_url + '/A/C'
+ F2_url = other_repo_url + '/A/B/F'
+ C2_url = other_repo_url + '/A/C'
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'log_msg',
+ F_url, C_url, F2_url, C2_url)
+
+ # Check that the two rm's to each of the repositories were handled in one
+ # revision (per repo)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(status='D '),
+ 'A/C' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/F', 'A/C')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/B/F', 'A/C')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ expected_status = svntest.actions.get_virginal_state(other_wc_dir, 2)
+ expected_status.remove('A/B/F', 'A/C')
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/B/F' : Item(status='D '),
+ 'A/C' : Item(status='D '),
+ })
+
+ svntest.actions.run_and_verify_update(other_wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#-----------------------------------------------------------------------
+def automatic_conflict_resolution(sbox):
+ "automatic conflict resolution"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files which will be committed
+ mu_path = sbox.ospath('A/mu')
+ lambda_path = sbox.ospath('A/B/lambda')
+ rho_path = sbox.ospath('A/D/G/rho')
+ tau_path = sbox.ospath('A/D/G/tau')
+ omega_path = sbox.ospath('A/D/H/omega')
+ svntest.main.file_append(mu_path, 'Original appended text for mu\n')
+ svntest.main.file_append(lambda_path, 'Original appended text for lambda\n')
+ svntest.main.file_append(rho_path, 'Original appended text for rho\n')
+ svntest.main.file_append(tau_path, 'Original appended text for tau\n')
+ svntest.main.file_append(omega_path, 'Original appended text for omega\n')
+
+ # Make a couple of local mods to files which will be conflicted
+ mu_path_backup = os.path.join(wc_backup, 'A', 'mu')
+ lambda_path_backup = os.path.join(wc_backup, 'A', 'B', 'lambda')
+ rho_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'rho')
+ tau_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'tau')
+ omega_path_backup = os.path.join(wc_backup, 'A', 'D', 'H', 'omega')
+ svntest.main.file_append(mu_path_backup,
+ 'Conflicting appended text for mu\n')
+ svntest.main.file_append(lambda_path_backup,
+ 'Conflicting appended text for lambda\n')
+ svntest.main.file_append(rho_path_backup,
+ 'Conflicting appended text for rho\n')
+ svntest.main.file_append(tau_path_backup,
+ 'Conflicting appended text for tau\n')
+ svntest.main.file_append(omega_path_backup,
+ 'Conflicting appended text for omega\n')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ 'A/D/G/tau' : Item(verb='Sending'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but lambda, mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/B/lambda', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H/omega', wc_rev=2)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/mu' : Item(status='C '),
+ 'A/B/lambda' : Item(status='C '),
+ 'A/D/G/rho' : Item(status='C '),
+ 'A/D/G/tau' : Item(status='C '),
+ 'A/D/H/omega' : Item(status='C '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/lambda',
+ contents="\n".join(["This is the file 'lambda'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for lambda",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for lambda",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/mu',
+ contents="\n".join(["This is the file 'mu'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for mu",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for mu",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/D/G/rho',
+ contents="\n".join(["This is the file 'rho'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for rho",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for rho",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/D/G/tau',
+ contents="\n".join(["This is the file 'tau'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for tau",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for tau",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/D/H/omega',
+ contents="\n".join(["This is the file 'omega'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for omega",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for omega",
+ ">>>>>>> .r2",
+ ""]))
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, '2')
+ expected_status.tweak('A/mu', 'A/B/lambda', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H/omega', status='C ')
+
+ # "Extra" files that we expect to result from the conflicts.
+ # These are expressed as list of regexps. What a cool system! :-)
+ extra_files = ['mu.*\.r1', 'mu.*\.r2', 'mu.*\.mine',
+ 'lambda.*\.r1', 'lambda.*\.r2', 'lambda.*\.mine',
+ 'omega.*\.r1', 'omega.*\.r2', 'omega.*\.mine',
+ 'rho.*\.r1', 'rho.*\.r2', 'rho.*\.mine',
+ 'tau.*\.r1', 'tau.*\.r2', 'tau.*\.mine',
+ ]
+
+ # Do the update and check the results in three ways.
+ # All "extra" files are passed to detect_conflict_files().
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+ # So now lambda, mu and rho are all in a "conflicted" state. Run 'svn
+ # resolve' with the respective "--accept[mine|orig|repo]" flag.
+
+ # But first, check --accept actions resolved does not accept.
+ svntest.actions.run_and_verify_svn(# stdout, stderr
+ None,
+ ".*invalid 'accept' ARG",
+ 'resolve', '--accept=postpone')
+ svntest.actions.run_and_verify_svn(# stdout, stderr
+ None,
+ ".*invalid 'accept' ARG",
+ 'resolve', '--accept=edit',
+ '--force-interactive')
+ svntest.actions.run_and_verify_svn(# stdout, stderr
+ None,
+ ".*invalid 'accept' ARG",
+ 'resolve', '--accept=launch',
+ '--force-interactive')
+ # Run 'svn resolved --accept=NOPE. Using omega for the test.
+ svntest.actions.run_and_verify_svn(None,
+ ".*NOPE' is not a valid --accept value",
+ 'resolve',
+ '--accept=NOPE',
+ omega_path_backup)
+
+ # Resolve lambda, mu, and rho with different --accept options.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--accept=base',
+ lambda_path_backup)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-full',
+ mu_path_backup)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=theirs-full',
+ rho_path_backup)
+ fp = open(tau_path_backup, 'w')
+ fp.write("Resolution text for 'tau'.\n")
+ fp.close()
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=working',
+ tau_path_backup)
+
+ # Set the expected disk contents for the test
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_disk.tweak('A/B/lambda', contents="This is the file 'lambda'.\n")
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.\n"
+ "Conflicting appended text for mu\n")
+ expected_disk.tweak('A/D/G/rho', contents="This is the file 'rho'.\n"
+ "Original appended text for rho\n")
+ expected_disk.tweak('A/D/G/tau', contents="Resolution text for 'tau'.\n")
+ expected_disk.tweak('A/D/H/omega',
+ contents="\n".join(["This is the file 'omega'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for omega",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for omega",
+ ">>>>>>> .r2",
+ ""]))
+
+ # Set the expected extra files for the test
+ extra_files = ['omega.*\.r1', 'omega.*\.r2', 'omega.*\.mine',]
+
+ # Set the expected status for the test
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('A/mu', 'A/B/lambda', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H/omega', wc_rev=2)
+
+ expected_status.tweak('A/mu', status='M ')
+ expected_status.tweak('A/B/lambda', status='M ')
+ expected_status.tweak('A/D/G/rho', status=' ')
+ expected_status.tweak('A/D/G/tau', status='M ')
+ expected_status.tweak('A/D/H/omega', status='C ')
+
+ # Set the expected output for the test
+ expected_output = wc.State(wc_backup, {})
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+def info_nonexisting_file(sbox):
+ "get info on a file not in the repo"
+
+ sbox.build(create_wc = False, read_only = True)
+ idonotexist_url = sbox.repo_url + '/IdoNotExist'
+ exit_code, output, errput = svntest.main.run_svn(1, 'info', idonotexist_url)
+
+ # Check for the correct error message
+ for line in errput:
+ if re.match(".*" + idonotexist_url + ".*non-existent in revision 1.*",
+ line):
+ return
+
+ # Else never matched the expected error output, so the test failed.
+ raise svntest.main.SVNUnmatchedError
+
+
+#----------------------------------------------------------------------
+# Relative urls
+#
+# Use blame to test three specific cases for relative url support.
+def basic_relative_url_using_current_dir(sbox):
+ "basic relative url target using current dir"
+
+ # We'll use blame to test relative url parsing
+ sbox.build()
+
+ # First, make a new revision of iota.
+ iota = sbox.ospath('iota')
+ svntest.main.file_append(iota, "New contents for iota\n")
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ expected_output = [
+ " 1 jrandom This is the file 'iota'.\n",
+ " 2 jrandom New contents for iota\n",
+ ]
+
+ orig_dir = os.getcwd()
+ os.chdir(sbox.wc_dir)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '^/iota')
+
+ os.chdir(orig_dir)
+
+def basic_relative_url_using_other_targets(sbox):
+ "basic relative url target using other targets"
+
+ sbox.build()
+
+ # First, make a new revision of iota.
+ iota = sbox.ospath('iota')
+ svntest.main.file_append(iota, "New contents for iota\n")
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ # Now, make a new revision of A/mu .
+ mu = sbox.ospath('A/mu')
+ mu_url = sbox.repo_url + '/A/mu'
+
+ svntest.main.file_append(mu, "New contents for mu\n")
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', mu)
+
+
+ expected_output = [
+ " 1 jrandom This is the file 'iota'.\n",
+ " 2 jrandom New contents for iota\n",
+ " 1 jrandom This is the file 'mu'.\n",
+ " 3 jrandom New contents for mu\n",
+ ]
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'blame',
+ '^/iota', mu_url)
+
+def basic_relative_url_multi_repo(sbox):
+ "basic relative url target with multiple repos"
+
+ sbox.build()
+ repo_url1 = sbox.repo_url
+ repo_dir1 = sbox.repo_dir
+ wc_dir1 = sbox.wc_dir
+
+ repo_dir2, repo_url2 = sbox.add_repo_path("other")
+ svntest.main.copy_repos(repo_dir1, repo_dir2, 1, 1)
+ wc_dir2 = sbox.add_wc_path("other")
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co",
+ repo_url2,
+ wc_dir2)
+
+ # Don't bother with making new revisions, the command should not work.
+ iota_url_repo1 = repo_url1 + '/iota'
+ iota_url_repo2 = repo_url2 + '/iota'
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn([],
+ svntest.verify.AnyOutput, 'blame',
+ '^/A/mu', iota_url_repo1, iota_url_repo2)
+
+def basic_relative_url_non_canonical(sbox):
+ "basic relative url non-canonical targets"
+
+ sbox.build()
+
+ iota_url = sbox.repo_url + '/iota'
+
+ expected_output = [
+ "B/\n",
+ "C/\n",
+ "D/\n",
+ "mu\n",
+ "iota\n"
+ ]
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls',
+ '^/A/', iota_url)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls',
+ '^//A/', iota_url)
+
+def basic_relative_url_with_peg_revisions(sbox):
+ "basic relative url targets with peg revisions"
+
+ sbox.build()
+
+ # First, make a new revision of iota.
+ iota = sbox.ospath('iota')
+ svntest.main.file_append(iota, "New contents for iota\n")
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ iota_url = sbox.repo_url + '/iota'
+
+ # Now, make a new revision of A/mu .
+ mu = sbox.ospath('A/mu')
+ mu_url = sbox.repo_url + '/A/mu'
+
+ svntest.main.file_append(mu, "New contents for mu\n")
+ svntest.main.run_svn(None, 'ci', '-m', '', mu)
+
+ # Delete the file from the current revision
+ svntest.main.run_svn(None, 'rm', '-m', '', mu_url)
+
+ expected_output = [
+ "B/\n",
+ "C/\n",
+ "D/\n",
+ "mu\n",
+ "iota\n"
+ ]
+
+ # Canonical version with peg revision
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '-r3',
+ '^/A/@3', iota_url)
+
+ # Non-canonical version with peg revision
+ exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '-r3',
+ '^//A/@3', iota_url)
+
+
+def basic_auth_test_xfail_predicate():
+ """Predicate for XFail for basic_auth_test:
+ The test will fail if plaintext password storage is disabled,
+ and the RA method requires authentication."""
+ return (not svntest.main.is_os_windows()
+ and svntest.main.is_ra_type_dav()
+ and svntest.main.is_plaintext_password_storage_disabled())
+
+# Issue 2242, auth cache picking up password from wrong username entry
+@Issue(2242)
+@XFail(basic_auth_test_xfail_predicate)
+def basic_auth_test(sbox):
+ "basic auth test"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Set up a custom config directory
+ config_dir = sbox.create_config_dir()
+
+ # Checkout with jrandom
+ exit_code, output, errput = svntest.main.run_command(
+ svntest.main.svn_binary, None, True, 'co', sbox.repo_url, wc_dir,
+ '--username', 'jrandom', '--password', 'rayjandom',
+ '--config-dir', config_dir)
+
+ exit_code, output, errput = svntest.main.run_command(
+ svntest.main.svn_binary, None, True, 'co', sbox.repo_url, wc_dir,
+ '--username', 'jrandom', '--non-interactive', '--config-dir', config_dir)
+
+ # Checkout with jconstant
+ exit_code, output, errput = svntest.main.run_command(
+ svntest.main.svn_binary, None, True, 'co', sbox.repo_url, wc_dir,
+ '--username', 'jconstant', '--password', 'rayjandom',
+ '--config-dir', config_dir)
+
+ exit_code, output, errput = svntest.main.run_command(
+ svntest.main.svn_binary, None, True, 'co', sbox.repo_url, wc_dir,
+ '--username', 'jconstant', '--non-interactive',
+ '--config-dir', config_dir)
+
+ # Checkout with jrandom which should fail since we do not provide
+ # a password and the above cached password belongs to jconstant
+ expected_err = ["authorization failed: Could not authenticate to server:"]
+ exit_code, output, errput = svntest.main.run_command(
+ svntest.main.svn_binary, expected_err, True, 'co', sbox.repo_url, wc_dir,
+ '--username', 'jrandom', '--non-interactive', '--config-dir', config_dir)
+
+def basic_add_svn_format_file(sbox):
+ 'test add --parents .svn/format'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ entries_path = os.path.join(wc_dir, svntest.main.get_admin_name(), 'format')
+
+ output = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # The .svn directory and the format file should not be added as this
+ # breaks the administrative area handling, so we expect some error here
+ svntest.actions.run_and_verify_svn(None,
+ ".*reserved name.*",
+ 'add', '--parents', entries_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, output)
+
+# Issue 2586, Unhelpful error message: Unrecognized URL scheme for ''
+# See also input_validation_tests.py:invalid_mkdir_targets(), which tests
+# the same thing the other way around.
+@Issue(2586)
+def basic_mkdir_mix_targets(sbox):
+ "mkdir mix url and local path should error"
+
+ sbox.build()
+ Y_url = sbox.repo_url + '/Y'
+ expected_error = "svn: E200009: Cannot mix repository and working copy targets"
+
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'mkdir', '-m', 'log_msg', Y_url, 'subdir')
+
+def delete_from_url_with_spaces(sbox):
+ "delete a directory with ' ' using its url"
+
+ sbox.build()
+ sbox.simple_mkdir('Dir With Spaces')
+ sbox.simple_mkdir('Dir With')
+ sbox.simple_mkdir('Dir With/Spaces')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', sbox.wc_dir, '-m', 'Added dir')
+
+ # This fails on 1.6.11 with an escaping error.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/Dir%20With%20Spaces',
+ '-m', 'Deleted')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/Dir%20With/Spaces',
+ '-m', 'Deleted')
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def meta_correct_library_being_used(sbox):
+ "verify that neon/serf are compiled if tested"
+ expected_re = (r'^\* ra_%s :' % svntest.main.options.http_library)
+ expected_output = svntest.verify.RegexOutput(expected_re, match_all=False)
+ svntest.actions.run_and_verify_svn(expected_output, [], '--version')
+
+def delete_and_add_same_file(sbox):
+ "commit deletes a file and adds one with same text"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ iota = sbox.ospath('iota')
+ iota2 = sbox.ospath('iota2')
+
+ shutil.copyfile(iota, iota2)
+
+ svntest.main.run_svn(None, 'rm', iota)
+ svntest.main.run_svn(None, 'add', iota2)
+
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(verb='Deleting'),
+ 'iota2' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('iota')
+ expected_status.add({ 'iota2': Item(status=' ', wc_rev='2')})
+
+ # At one time the commit post-processing used to fail with "Pristine text
+ # not found".
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+def delete_child_parent_update(sbox):
+ "rm child, commit, rm parent"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(wc_dir, 'rm', sbox.ospath('A/B/E/alpha'))
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.main.run_svn(wc_dir, 'rm', sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', 'A/B/E/beta', status='D ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+
+ # This produced a tree-conflict until we fixed issue #3533
+ expected_status.tweak(wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_dir,
+ [],
+ expected_disk,
+ expected_status)
+
+
+
+#----------------------------------------------------------------------
+
+def basic_relocate(sbox):
+ "basic relocate of a wc"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0)
+
+ def _verify_url(wc_path, url):
+ name = os.path.basename(wc_path)
+ expected = {'Path' : re.escape(wc_path),
+ 'URL' : url,
+ 'Repository Root' : '.*',
+ 'Revision' : '.*',
+ 'Node Kind' : 'directory',
+ 'Repository UUID' : uuid_regex,
+ }
+ svntest.actions.run_and_verify_info([expected], wc_path)
+
+ # No-op relocation of just the scheme.
+ scheme = repo_url[:repo_url.index('://')+3]
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate',
+ scheme, scheme, wc_dir)
+ _verify_url(wc_dir, repo_url)
+
+ # No-op relocation of a bit more of the URL.
+ substring = repo_url[:repo_url.index('://')+7]
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate',
+ substring, substring, wc_dir)
+ _verify_url(wc_dir, repo_url)
+
+ # Real relocation to OTHER_REPO_URL.
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate',
+ repo_url, other_repo_url, wc_dir)
+ _verify_url(wc_dir, other_repo_url)
+
+ # ... and back again, using the newer 'svn relocate' subcommand.
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ other_repo_url, repo_url, wc_dir)
+ _verify_url(wc_dir, repo_url)
+
+ # To OTHER_REPO_URL again, this time with the single-URL form of
+ # 'svn relocate'.
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ other_repo_url, wc_dir)
+ _verify_url(wc_dir, other_repo_url)
+
+ ### TODO: When testing ra_dav or ra_svn, do relocations between
+ ### those and ra_local URLs.
+
+#----------------------------------------------------------------------
+
+def delete_urls_with_spaces(sbox):
+ "delete multiple targets with spaces"
+ sbox.build(create_wc = False)
+
+ # Create three directories with a space in their name
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ sbox.repo_url + '/A spaced',
+ sbox.repo_url + '/B spaced',
+ sbox.repo_url + '/C spaced',
+ '-m', 'Created dirs')
+
+ # Try to delete the first
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ sbox.repo_url + '/A spaced',
+ '-m', 'Deleted A')
+
+ # And then two at once
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ sbox.repo_url + '/B spaced',
+ sbox.repo_url + '/C spaced',
+ '-m', 'Deleted B and C')
+
+def ls_url_special_characters(sbox):
+ """special characters in svn ls URL"""
+ sbox.build(create_wc = False)
+
+ special_urls = [sbox.repo_url + '/A' + '/%2E',
+ sbox.repo_url + '%2F' + 'A']
+
+ for url in special_urls:
+ svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'],
+ [], 'ls',
+ url)
+
+def ls_multiple_and_non_existent_targets(sbox):
+ "ls multiple and non-existent targets"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ def non_existent_wc_target():
+ "non-existent wc target"
+ non_existent_path = sbox.ospath('non-existent')
+
+ expected_err = ".*W155010.*"
+ svntest.actions.run_and_verify_svn2(None, expected_err,
+ 1, 'ls', non_existent_path)
+
+ def non_existent_url_target():
+ "non-existent url target"
+ non_existent_url = sbox.repo_url + '/non-existent'
+ expected_err = ".*W160013.*"
+
+ svntest.actions.run_and_verify_svn2(None, expected_err,
+ 1, 'ls', non_existent_url)
+ def multiple_wc_targets():
+ "multiple wc targets"
+
+ alpha = sbox.ospath('A/B/E/alpha')
+ beta = sbox.ospath('A/B/E/beta')
+ non_existent_path = sbox.ospath('non-existent')
+
+ # All targets are existing
+ svntest.actions.run_and_verify_svn2(None, [],
+ 0, 'ls', alpha, beta)
+
+ # One non-existing target
+ expected_err = ".*W155010.*\n.*E200009.*"
+ expected_err_re = re.compile(expected_err, re.DOTALL)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'ls', alpha,
+ non_existent_path, beta)
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('ls failed: expected error "%s", but received '
+ '"%s"' % (expected_err, "".join(error)))
+
+ def multiple_url_targets():
+ "multiple url targets"
+
+ alpha = sbox.repo_url + '/A/B/E/alpha'
+ beta = sbox.repo_url + '/A/B/E/beta'
+ non_existent_url = sbox.repo_url + '/non-existent'
+
+ # All targets are existing
+ svntest.actions.run_and_verify_svn2(None, [],
+ 0, 'ls', alpha, beta)
+
+ # One non-existing target
+ expected_err = ".*W160013.*\n.*E200009.*"
+ expected_err_re = re.compile(expected_err, re.DOTALL)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'ls', alpha,
+ non_existent_url, beta)
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('ls failed: expected error "%s", but received '
+ '"%s"' % (expected_err, "".join(error)))
+ # Test one by one
+ non_existent_wc_target()
+ non_existent_url_target()
+ multiple_wc_targets()
+ multiple_url_targets()
+
+def add_multiple_targets(sbox):
+ "add multiple targets"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ file1 = sbox.ospath('file1')
+ file2 = sbox.ospath('file2')
+ non_existent_path = sbox.ospath('non-existent')
+
+ svntest.main.file_write(file1, "file1 contents", 'w+')
+ svntest.main.file_write(file2, "file2 contents", 'w+')
+
+ # One non-existing target
+ expected_err = ".*W155010.*\n.*E200009.*"
+ expected_err_re = re.compile(expected_err, re.DOTALL)
+
+ # Build expected state
+ expected_output = wc.State(wc_dir, {
+ 'file1' : Item(verb='Adding'),
+ 'file2' : Item(verb='Adding'),
+ })
+
+ exit_code, output, error = svntest.main.run_svn(1, 'add', file1,
+ non_existent_path, file2)
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('add failed: expected error "%s", but received '
+ '"%s"' % (expected_err, "".join(error)))
+
+ # Verify status
+ expected_status = svntest.verify.UnorderedOutput(
+ ['A ' + file1 + '\n',
+ 'A ' + file2 + '\n'])
+ svntest.actions.run_and_verify_svn(expected_status, [],
+ 'status', wc_dir)
+
+
+def quiet_commits(sbox):
+ "commits with --quiet"
+
+ sbox.build()
+
+ svntest.main.file_append(sbox.ospath('A/mu'), 'xxx')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'commit', sbox.wc_dir,
+ '--message', 'commit', '--quiet')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'mkdir', sbox.repo_url + '/X',
+ '--message', 'mkdir URL', '--quiet')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'import', sbox.ospath('A/mu'),
+ sbox.repo_url + '/f',
+ '--message', 'import', '--quiet')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'rm', sbox.repo_url + '/f',
+ '--message', 'rm URL', '--quiet')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'copy', sbox.repo_url + '/X',
+ sbox.repo_url + '/Y',
+ '--message', 'cp URL URL', '--quiet')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'move', sbox.repo_url + '/Y',
+ sbox.repo_url + '/Z',
+ '--message', 'mv URL URL', '--quiet')
+
+ # Not fully testing each command, just that they all commit and
+ # produce no output.
+ expected_output = wc.State(sbox.wc_dir, {
+ 'X' : Item(status='A '),
+ 'Z' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 7)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=7),
+ 'Z' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'xxx')
+ expected_disk.add({
+ 'X' : Item(),
+ 'Z' : Item()
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+# Regression test for issue #4023: on Windows, 'svn rm' incorrectly deletes
+# on-disk file if it is case-clashing with intended (non-on-disk) target.
+@Issue(4023)
+def rm_missing_with_case_clashing_ondisk_item(sbox):
+ """rm missing item with case-clashing ondisk item"""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ IOTA_path = sbox.ospath('IOTA')
+
+ # Out-of-svn move, to make iota missing, while IOTA appears as unversioned.
+ os.rename(iota_path, IOTA_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status='! ', wc_rev='1'),
+ 'IOTA' : Item(status='? '),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # Verify that the casing is not updated, because the path is on-disk.
+ expected_output = [ 'D %s\n' % iota_path ]
+ # 'svn rm' iota, should leave IOTA alone.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'rm', iota_path)
+
+ # Test status: the unversioned IOTA should still be there.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status='D ', wc_rev='1'),
+ 'IOTA' : Item(status='? '),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+
+def delete_conflicts_one_of_many(sbox):
+ """delete multiple targets one conflict"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.file_append(sbox.ospath('A/D/G/rho'), 'new rho')
+ sbox.simple_commit()
+ svntest.main.file_append(sbox.ospath('A/D/G/rho'), 'conflict rho')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '-r1', '--accept', 'postpone',
+ wc_dir)
+
+ if not os.path.exists(sbox.ospath('A/D/G/rho.mine')):
+ raise svntest.Failure("conflict file rho.mine missing")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force',
+ sbox.ospath('A/D/G/rho'),
+ sbox.ospath('A/D/G/tau'))
+
+ verify_file_deleted("failed to remove conflict file",
+ sbox.ospath('A/D/G/rho.mine'))
+
+@Issue(3231)
+@XFail()
+def peg_rev_on_non_existent_wc_path(sbox):
+ """peg rev resolution on non-existent wc paths"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # setup some history
+ sbox.simple_move('A', 'A2')
+ sbox.simple_move('A2/mu', 'A2/mu2')
+ open(sbox.ospath('A2/mu2'), 'w').write('r2\n')
+ sbox.simple_commit(message='r2')
+ #
+ sbox.simple_move('A2/mu2', 'A2/mu3')
+ sbox.simple_move('A2', 'A3')
+ open(sbox.ospath('A3/mu3'), 'w').write('r3\n')
+ sbox.simple_commit(message='r3')
+ #
+ sbox.simple_move('A3/mu3', 'A3/mu4')
+ open(sbox.ospath('A3/mu4'), 'w').write('r4\n')
+ sbox.simple_move('A3', 'A4')
+ sbox.simple_commit(message='r4')
+
+ # test something.
+ sbox.simple_update()
+ # This currently fails with ENOENT on A/mu3.
+ svntest.actions.run_and_verify_svn(['r2\n'], [],
+ 'cat', '-r2', sbox.ospath('A3/mu3') + '@3')
+ os.chdir(sbox.ospath('A4'))
+ svntest.actions.run_and_verify_svn(['r2\n'], [],
+ 'cat', '-r2', sbox.ospath('mu3') + '@3')
+
+
+# With 'svn mkdir --parents' the target directory may already exist on disk.
+# In that case it was wrongly performing a recursive 'add' on its contents.
+def mkdir_parents_target_exists_on_disk(sbox):
+ "mkdir parents target exists on disk"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ Y_path = sbox.ospath('Y')
+ Y_Z_path = sbox.ospath('Y/Z')
+
+ os.mkdir(Y_path)
+ os.mkdir(Y_Z_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '--parents', Y_path)
+
+ # Y should be added, and Z should not. There was a regression in which Z
+ # was also added.
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'Y' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@Skip(svntest.main.is_ra_type_file)
+def plaintext_password_storage_disabled(sbox):
+ "test store-plaintext-passwords = no"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ sbox.simple_append("iota", "New content for iota.")
+
+ config_dir_path = sbox.get_tempname(prefix="config-dir")
+ os.mkdir(config_dir_path)
+
+ # disable all encryped password stores
+ config_file = open(os.path.join(config_dir_path, "config"), "w")
+ config_file.write("[auth]\npassword-stores =\n")
+ config_file.close()
+
+ # disable plaintext password storage
+ servers_file = open(os.path.join(config_dir_path, "servers"), "w")
+ servers_file.write("[global]\nstore-plaintext-passwords=no\n")
+ servers_file.close()
+
+ svntest.main.run_command(svntest.main.svn_binary, False, False,
+ "commit", "--config-dir", config_dir_path,
+ "-m", "committing with plaintext password storage disabled",
+ "--username", svntest.main.wc_author,
+ "--password", svntest.main.wc_passwd,
+ "--trust-server-cert-failures", "unknown-ca",
+ "--non-interactive", wc_dir)
+
+ # Verify that the password was not stored in plaintext
+ for root, dirs, files, in os.walk(os.path.join(config_dir_path, "auth")):
+ for file_name in files:
+ path = os.path.join(root, file_name)
+ f = open(path, "r")
+ for line in f.readlines():
+ if svntest.main.wc_passwd in line:
+ f.close()
+ raise svntest.Failure("password was found in '%s'" % path)
+ f.close()
+
+
+@Skip(svntest.main.is_os_windows)
+def filtered_ls(sbox):
+ "filtered 'svn ls'"
+
+ sbox.build(read_only=True)
+ path = sbox.repo_url + "/A/D"
+
+ # check plain info
+ expected = [ "H/omega\n",
+ "gamma\n" ]
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ expected, [], 'ls', path, '--depth=infinity', '--search=*a')
+
+ # check case-insensitivity
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ expected, [], 'ls', path, '--depth=infinity', '--search=*A')
+
+ expected = [ "H/\n" ]
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ expected, [], 'ls', path, '--depth=infinity', '--search=h')
+
+ # we don't match full paths
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ [], [], 'ls', path, '--depth=infinity', '--search=*/*')
+
+@Issue(4700)
+@XFail(svntest.main.is_fs_type_fsx)
+def null_update_last_changed_revision(sbox):
+ "null 'update' updates last changed rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2: Random text change.
+ old_contents = open(sbox.path("iota")).read()
+ sbox.simple_append("iota", "Line 2.\n")
+ sbox.simple_commit(message='r2')
+ sbox.simple_update()
+
+ # r3: Revert r2.
+ sbox.simple_append("iota", old_contents, truncate=True)
+ sbox.simple_commit(message='r3')
+ sbox.simple_update()
+
+ # Perform a null update.
+ #
+ # This used to say '3'; probably because iota@3 and iota@1 were textually
+ # identical. It seems this problem was introduced in r1760570.
+ sbox.simple_update(revision='1')
+ svntest.actions.run_and_verify_svn(["1\n"], [],
+ 'info', sbox.path('iota'),
+ '--show-item', 'last-changed-revision')
+
+@Issue(4700)
+@XFail(svntest.main.is_fs_type_bdb)
+@XFail(svntest.main.is_fs_type_fsx)
+def null_prop_update_last_changed_revision(sbox):
+ "null 'property update' updates last changed rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset("prop", "value", "iota")
+ sbox.simple_commit(message='r2')
+ sbox.simple_update()
+
+ # r3: change the property
+ sbox.simple_propset("prop", "changed", "iota")
+ sbox.simple_commit(message='r3')
+ sbox.simple_update()
+
+ # r4: Revert r3.
+ sbox.simple_propset("prop", "value", "iota")
+ sbox.simple_commit(message='r4')
+ sbox.simple_update()
+
+ # Perform a null update.
+ sbox.simple_update(revision='2')
+ svntest.actions.run_and_verify_svn(["2\n"], [],
+ 'info', sbox.path('iota'),
+ '--show-item', 'last-changed-revision')
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_checkout,
+ basic_status,
+ basic_commit,
+ basic_update,
+ basic_mkdir_url,
+ basic_mkdir_url_with_parents,
+ basic_mkdir_wc_with_parents,
+ basic_commit_corruption,
+ basic_update_corruption,
+ basic_merging_update,
+ basic_conflict,
+ basic_cleanup,
+ basic_revert,
+ basic_switch,
+ basic_delete,
+ basic_checkout_deleted,
+ basic_node_kind_change,
+ basic_import,
+ basic_cat,
+ basic_ls,
+ nonexistent_repository,
+ basic_auth_cache,
+ basic_add_ignores,
+ basic_add_parents,
+ uri_syntax,
+ basic_checkout_file,
+ basic_info,
+ basic_add_local_ignores,
+ basic_add_no_ignores,
+ repos_root,
+ basic_peg_revision,
+ info_nonhead,
+ ls_nonhead,
+ cat_added_PREV,
+ ls_space_in_repo_name,
+ delete_keep_local,
+ delete_keep_local_twice,
+ special_paths_in_repos,
+ basic_rm_urls_one_repo,
+ basic_rm_urls_multi_repos,
+ automatic_conflict_resolution,
+ info_nonexisting_file,
+ basic_relative_url_using_current_dir,
+ basic_relative_url_using_other_targets,
+ basic_relative_url_multi_repo,
+ basic_relative_url_non_canonical,
+ basic_relative_url_with_peg_revisions,
+ basic_auth_test,
+ basic_add_svn_format_file,
+ basic_mkdir_mix_targets,
+ delete_from_url_with_spaces,
+ meta_correct_library_being_used,
+ delete_and_add_same_file,
+ delete_child_parent_update,
+ basic_relocate,
+ delete_urls_with_spaces,
+ ls_url_special_characters,
+ ls_multiple_and_non_existent_targets,
+ add_multiple_targets,
+ quiet_commits,
+ rm_missing_with_case_clashing_ondisk_item,
+ delete_conflicts_one_of_many,
+ peg_rev_on_non_existent_wc_path,
+ mkdir_parents_target_exists_on_disk,
+ plaintext_password_storage_disabled,
+ filtered_ls,
+ null_update_last_changed_revision,
+ null_prop_update_last_changed_revision,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/blame_tests.py b/subversion/tests/cmdline/blame_tests.py
new file mode 100755
index 0000000..f824d25
--- /dev/null
+++ b/subversion/tests/cmdline/blame_tests.py
@@ -0,0 +1,1094 @@
+#!/usr/bin/env python
+#
+# blame_tests.py: testing line-by-line annotation.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, sys, re
+
+# Our testing module
+import svntest
+from svntest.main import server_has_mergeinfo
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# For some basic merge setup used by blame -g tests.
+from svntest.mergetrees import set_up_branch
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+# Helper function to validate the output of a particular run of blame.
+def parse_and_verify_blame(output, expected_blame, with_merged=0):
+ "tokenize and validate the output of blame"
+
+ max_split = 2
+ keys = ['revision', 'author', 'text']
+ if with_merged:
+ keys.append('merged')
+
+ results = []
+
+ # Tokenize and parse each line
+ for line_str in output:
+ this_line = {}
+
+ if with_merged:
+ this_line['merged'] = (line_str[0] == 'G')
+ line_str = line_str[2:]
+
+ tokens = line_str.split(None, max_split)
+
+ if tokens[0] == '-':
+ this_line['revision'] = None
+ else:
+ this_line['revision'] = int(tokens[0])
+
+ if tokens[1] == '-':
+ this_line['author'] = None
+ else:
+ this_line['author'] = tokens[1]
+
+ this_line['text'] = tokens[2]
+
+ results.append(this_line)
+
+ # Verify the results
+ if len(results) != len(expected_blame):
+ raise svntest.Failure("expected and actual results not the same length")
+
+ pairs = list(zip(results, expected_blame))
+ for num in range(len(pairs)):
+ (item, expected_item) = pairs[num]
+ for key in keys:
+ if item[key] != expected_item[key]:
+ raise svntest.Failure('on line %d, expecting %s "%s", found "%s"' % \
+ (num+1, key, str(expected_item[key]), str(item[key])))
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def blame_space_in_name(sbox):
+ "annotate a file whose name contains a space"
+ sbox.build()
+
+ file_path = os.path.join(sbox.wc_dir, 'space in name')
+ svntest.main.file_append(file_path, "Hello\n")
+ svntest.main.run_svn(None, 'add', file_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', file_path)
+
+ svntest.main.run_svn(None, 'blame', file_path)
+
+
+def blame_binary(sbox):
+ "annotate a binary file"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # First, make a new revision of iota.
+ iota = os.path.join(wc_dir, 'iota')
+ svntest.main.file_append(iota, "New contents for iota\n")
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ # Then do it again, but this time we set the mimetype to binary.
+ iota = os.path.join(wc_dir, 'iota')
+ svntest.main.file_append(iota, "More new contents for iota\n")
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type', 'image/jpeg', iota)
+
+ # Blame fails when mime-type is locally modified to binary
+ exit_code, output, errput = svntest.main.run_svn(2, 'blame', iota)
+ if (len(errput) != 1) or (errput[0].find('Skipping') == -1):
+ raise svntest.Failure
+
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ # Blame fails when mime-type is binary
+ exit_code, output, errput = svntest.main.run_svn(2, 'blame', iota)
+ if (len(errput) != 1) or (errput[0].find('Skipping') == -1):
+ raise svntest.Failure
+
+ # Once more, but now let's remove that mimetype.
+ iota = os.path.join(wc_dir, 'iota')
+ svntest.main.file_append(iota, "Still more new contents for iota\n")
+ svntest.main.run_svn(None, 'propdel', 'svn:mime-type', iota)
+ svntest.main.run_svn(None, 'ci',
+ '-m', '', iota)
+
+ # Blame fails when asking about an old revision where the mime-type is binary
+ exit_code, output, errput = svntest.main.run_svn(2, 'blame', iota + '@3')
+ if (len(errput) != 1) or (errput[0].find('Skipping') == -1):
+ raise svntest.Failure
+
+ # But with --force, it should work.
+ exit_code, output, errput = svntest.main.run_svn(2, 'blame', '--force',
+ iota + '@3')
+ if (len(errput) != 0 or len(output) != 3):
+ raise svntest.Failure
+
+
+
+
+# Issue #2154 - annotating a directory should fail
+# (change needed if the desired behavior is to
+# run blame recursively on all the files in it)
+#
+@Issue(2154)
+def blame_directory(sbox):
+ "annotating a directory not allowed"
+
+ # Issue 2154 - blame on directory fails without error message
+
+ import re
+
+ # Setup
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ dir = os.path.join(wc_dir, 'A')
+
+ # Run blame against directory 'A'. The repository error will
+ # probably include a leading slash on the path, but we'll tolerate
+ # it either way, since either way it would still be a clean error.
+ expected_error = ".*'[/]{0,1}A' is not a file"
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'blame', dir)
+
+ # Verify expected error message is output
+ for line in errlines:
+ if re.match(expected_error, line):
+ break
+ else:
+ raise svntest.Failure('Failed to find %s in %s' %
+ (expected_error, str(errlines)))
+
+
+
+# Basic test for svn blame --xml.
+#
+def blame_in_xml(sbox):
+ "blame output in XML format"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+ svntest.main.file_append(file_path, "Testing svn blame --xml\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # Retrieve last changed date from svn info
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'log', file_path, '--xml', '-r1:2')
+
+ date1 = None
+ date2 = None
+ for line in output:
+ if line.find("<date>") >= 0:
+ if date1 is None:
+ date1 = line
+ continue
+ elif date2 is None:
+ date2 = line
+ break
+ else:
+ raise svntest.Failure
+
+ template = ['<?xml version="1.0" encoding="UTF-8"?>\n',
+ '<blame>\n',
+ '<target\n',
+ ' path="' + file_path + '">\n',
+ '<entry\n',
+ ' line-number="1">\n',
+ '<commit\n',
+ ' revision="1">\n',
+ '<author>jrandom</author>\n',
+ '%s' % date1,
+ '</commit>\n',
+ '</entry>\n',
+ '<entry\n',
+ ' line-number="2">\n',
+ '<commit\n',
+ ' revision="2">\n',
+ '<author>jrandom</author>\n',
+ '%s' % date2,
+ '</commit>\n',
+ '</entry>\n',
+ '</target>\n',
+ '</blame>\n']
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'blame', file_path, '--xml')
+
+ for i in range(0, len(output)):
+ if output[i] != template[i]:
+ raise svntest.Failure
+
+
+# For a line changed before the requested start revision, blame should not
+# print a revision number (as fixed in r848109) or crash (as it did with
+# "--verbose" before being fixed in r849964).
+#
+def blame_on_unknown_revision(sbox):
+ "blame lines from unknown revisions"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ for i in range(1,3):
+ svntest.main.file_append(file_path, "\nExtra line %d" % (i))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'blame', file_path, '-rHEAD:HEAD')
+
+ if output[0].find(" - This is the file 'iota'.") == -1:
+ raise svntest.Failure
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'blame', file_path, '--verbose', '-rHEAD:HEAD')
+
+ if output[0].find(" - This is the file 'iota'.") == -1:
+ raise svntest.Failure
+
+
+
+# The default blame revision range should be 1:N, where N is the
+# peg-revision of the target, or BASE or HEAD if no peg-revision is
+# specified.
+#
+def blame_peg_rev(sbox):
+ "blame targets with peg-revisions"
+
+ sbox.build()
+
+ expected_output_r1 = [
+ " 1 jrandom This is the file 'iota'.\n" ]
+
+ os.chdir(sbox.wc_dir)
+
+ # Modify iota and commit it (r2).
+ svntest.main.file_write('iota', "This is no longer the file 'iota'.\n")
+ expected_output = svntest.wc.State('.', {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit('.', expected_output, None)
+
+ # Check that we get a blame of r1 when we specify a peg revision of r1
+ # and no explicit revision.
+ svntest.actions.run_and_verify_svn(expected_output_r1, [],
+ 'blame', 'iota@1')
+
+ # Check that an explicit revision overrides the default provided by
+ # the peg revision.
+ svntest.actions.run_and_verify_svn(expected_output_r1, [],
+ 'blame', 'iota@2', '-r1')
+
+def blame_eol_styles(sbox):
+ "blame with different eol styles"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # CR
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # do the test for each eol-style
+ for eol in ['CR', 'LF', 'CRLF', 'native']:
+ svntest.main.run_svn(None, 'propdel', 'svn:eol-style', file_path)
+ svntest.main.file_write(file_path, "This is no longer the file 'iota'.\n")
+
+ for i in range(1,3):
+ svntest.main.file_append(file_path, "Extra line %d" % (i) + "\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol,
+ file_path)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'blame', file_path, '-r1:HEAD')
+
+ # output is a list of lines, there should be 3 lines
+ if len(output) != 3:
+ raise svntest.Failure('Expected 3 lines in blame output but got %d: \n' %
+ len(output) + str(output))
+
+def blame_ignore_whitespace(sbox):
+ "ignore whitespace when blaming"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # commit only whitespace changes
+ svntest.main.file_write(file_path,
+ " A a \n"
+ " B b \n"
+ " C c \n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # match the blame output, as defined in the blame code:
+ # "%6ld %10s %s %s%s", rev, author ? author : " -",
+ # time_stdout , line, APR_EOL_STR
+ expected_output = [
+ " 2 jrandom A a \n",
+ " 2 jrandom B b \n",
+ " 2 jrandom C c \n",
+ ]
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ expected_output, [],
+ 'blame', '-x', '-w', file_path)
+
+ # commit some changes
+ svntest.main.file_write(file_path,
+ " A a \n"
+ "Xxxx X\n"
+ " Bb b \n"
+ " C c \n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ expected_output = [
+ " 2 jrandom A a \n",
+ " 4 jrandom Xxxx X\n",
+ " 4 jrandom Bb b \n",
+ " 2 jrandom C c \n",
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-x', '-w', file_path)
+
+def blame_ignore_eolstyle(sbox):
+ "ignore eol styles when blaming"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # commit only eol changes
+ svntest.main.file_write(file_path,
+ "Aa\r"
+ "Bb\r"
+ "Cc")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ expected_output = [
+ " 2 jrandom Aa\n",
+ " 2 jrandom Bb\n",
+ " 3 jrandom Cc\n",
+ ]
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ expected_output, [],
+ 'blame', '-x', '--ignore-eol-style', file_path)
+
+
+@SkipUnless(server_has_mergeinfo)
+def blame_merge_info(sbox):
+ "test 'svn blame -g'"
+
+ from log_tests import merge_history_repos
+ merge_history_repos(sbox)
+
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'trunk', 'iota')
+ mu_path = os.path.join(wc_dir, 'trunk', 'A', 'mu')
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [], 'blame', '-g', iota_path)
+
+ expected_blame = [
+ { 'revision' : 2,
+ 'author' : 'jrandom',
+ 'text' : "This is the file 'iota'.\n",
+ 'merged' : 0,
+ },
+ { 'revision' : 11,
+ 'author' : 'jrandom',
+ 'text' : "'A' has changed a bit, with 'upsilon', and 'xi'.\n",
+ 'merged' : 1,
+ },
+ ]
+ parse_and_verify_blame(output, expected_blame, 1)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [], 'blame', '-g', '-r10:11', iota_path)
+
+ expected_blame = [
+ { 'revision' : None,
+ 'author' : None,
+ 'text' : "This is the file 'iota'.\n",
+ 'merged' : 0,
+ },
+ { 'revision' : None,
+ 'author' : None,
+ 'text' : "'A' has changed a bit.\n",
+ 'merged' : 0,
+ },
+ ]
+ parse_and_verify_blame(output, expected_blame, 1)
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [], 'blame', '-g', '-r16:17', mu_path)
+
+ expected_blame = [
+ { 'revision' : None,
+ 'author' : None,
+ 'text' : "This is the file 'mu'.\n",
+ 'merged' : 0,
+ },
+ { 'revision' : 16,
+ 'author' : 'jrandom',
+ 'text' : "Don't forget to look at 'upsilon', as well.\n",
+ 'merged' : 1,
+ },
+ { 'revision' : 16,
+ 'author' : 'jrandom',
+ 'text' : "This is yet more content in 'mu'.\n",
+ 'merged' : 1,
+ },
+ ]
+ parse_and_verify_blame(output, expected_blame, 1)
+
+
+@SkipUnless(server_has_mergeinfo)
+def blame_merge_out_of_range(sbox):
+ "don't look for merged files out of range"
+
+ from log_tests import merge_history_repos
+ merge_history_repos(sbox)
+
+ wc_dir = sbox.wc_dir
+ upsilon_path = os.path.join(wc_dir, 'trunk', 'A', 'upsilon')
+
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'blame', '-g', upsilon_path)
+
+ expected_blame = [
+ { 'revision' : 4,
+ 'author' : 'jrandom',
+ 'text' : "This is the file 'upsilon'.\n",
+ 'merged' : 0,
+ },
+ { 'revision' : 11,
+ 'author': 'jrandom',
+ 'text' : "There is also the file 'xi'.\n",
+ 'merged' : 1,
+ },
+ ]
+ parse_and_verify_blame(output, expected_blame, 1)
+
+# test for issue #2888: 'svn blame' aborts over ra_serf
+@Issue(2888)
+def blame_peg_rev_file_not_in_head(sbox):
+ "blame target not in HEAD with peg-revisions"
+
+ sbox.build()
+
+ expected_output_r1 = [
+ " 1 jrandom This is the file 'iota'.\n" ]
+
+ os.chdir(sbox.wc_dir)
+
+ # Modify iota and commit it (r2).
+ svntest.main.file_write('iota', "This is no longer the file 'iota'.\n")
+ expected_output = svntest.wc.State('.', {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit('.', expected_output, None)
+
+ # Delete iota so that it doesn't exist in HEAD
+ svntest.main.run_svn(None, 'rm', sbox.repo_url + '/iota',
+ '-m', 'log message')
+
+ # Check that we get a blame of r1 when we specify a peg revision of r1
+ # and no explicit revision.
+ svntest.actions.run_and_verify_svn(expected_output_r1, [],
+ 'blame', 'iota@1')
+
+ # Check that an explicit revision overrides the default provided by
+ # the peg revision.
+ svntest.actions.run_and_verify_svn(expected_output_r1, [],
+ 'blame', 'iota@2', '-r1')
+
+def blame_file_not_in_head(sbox):
+ "blame target not in HEAD"
+
+ sbox.build(create_wc = False, read_only = True)
+ notexisting_url = sbox.repo_url + '/notexisting'
+
+ # Check that a correct error message is printed when blaming a target that
+ # doesn't exist (in HEAD).
+ expected_err = ".*notexisting' (is not a file.*|path not found|does not exist)"
+ svntest.actions.run_and_verify_svn([], expected_err,
+ 'blame', notexisting_url)
+
+@SkipUnless(server_has_mergeinfo)
+def blame_output_after_merge(sbox):
+ "blame -g output with inserted lines"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ trunk_url = sbox.repo_url + '/trunk'
+ trunk_A_url = trunk_url + '/A'
+ A_url = sbox.repo_url + '/A'
+
+ # r2: mv greek tree in trunk.
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ 'mv', "--parents", A_url, trunk_A_url,
+ "-m", "move greek tree to trunk")
+
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None)
+
+ # r3: modify trunk/A/mu, modify and add some lines.
+ mu_path = os.path.join(wc_dir, "trunk", "A", "mu")
+ new_content = "New version of file 'mu'.\n" \
+ "2nd line in file 'mu'.\n" \
+ "3rd line in file 'mu'.\n" \
+ "4th line in file 'mu'.\n" \
+ "5th line in file 'mu'.\n" \
+ "6th line in file 'mu'.\n"
+ svntest.main.file_write(mu_path, new_content)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'trunk/A/mu' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # r4: create branches/br from trunk
+ branches_br_url = sbox.repo_url + "/branches/br"
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 4.\n"], [],
+ 'cp', '--parents',
+ trunk_url, branches_br_url,
+ "-m", "create branch")
+
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None)
+
+ # r5: modify single line in branches/br/A/mu
+ branch_mu_path = os.path.join(wc_dir, "branches", "br", "A", "mu")
+ svntest.main.file_write(branch_mu_path,
+ "New version of file 'mu'.\n" \
+ "2nd line in file 'mu'.\n" \
+ "new 3rd line in file 'mu'.\n" \
+ "4th line in file 'mu'.\n" \
+ "5th line in file 'mu'.\n" \
+ "6th line in file 'mu'.\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branches/br/A/mu' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # r6: Insert a single line in branches/A/mu
+ svntest.main.file_write(branch_mu_path,
+ "New version of file 'mu'.\n" \
+ "2nd line in file 'mu'.\n" \
+ "new 3rd line in file 'mu'.\n" \
+ "add 3.5 line in file 'mu'.\n" \
+ "4th line in file 'mu'.\n" \
+ "5th line in file 'mu'.\n" \
+ "6th line in file 'mu'.\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branches/br/A/mu' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # r7: merge branches/br back to trunk
+ trunk_path = os.path.join(wc_dir, "trunk")
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ '-r', '4:HEAD',
+ branches_br_url, trunk_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'trunk' : Item(verb='Sending'),
+ 'trunk/A/mu' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # Now test blame, first without the -g option
+ expected_output = [ " 3 jrandom New version of file 'mu'.\n",
+ " 3 jrandom 2nd line in file 'mu'.\n",
+ " 7 jrandom new 3rd line in file 'mu'.\n",
+ " 7 jrandom add 3.5 line in file 'mu'.\n",
+ " 3 jrandom 4th line in file 'mu'.\n",
+ " 3 jrandom 5th line in file 'mu'.\n",
+ " 3 jrandom 6th line in file 'mu'.\n"]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', mu_path)
+
+ # Next test with the -g option
+ # the branch modifications at revision 5 & 6 should show in the output
+ expected_output = [ " 3 jrandom New version of file 'mu'.\n",
+ " 3 jrandom 2nd line in file 'mu'.\n",
+ "G 5 jrandom new 3rd line in file 'mu'.\n",
+ "G 6 jrandom add 3.5 line in file 'mu'.\n",
+ " 3 jrandom 4th line in file 'mu'.\n",
+ " 3 jrandom 5th line in file 'mu'.\n",
+ " 3 jrandom 6th line in file 'mu'.\n"]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-g', mu_path)
+
+ # Now test with -rN:M
+ expected_output = [ " - - New version of file 'mu'.\n",
+ " - - 2nd line in file 'mu'.\n",
+ " 7 jrandom new 3rd line in file 'mu'.\n",
+ " 7 jrandom add 3.5 line in file 'mu'.\n",
+ " - - 4th line in file 'mu'.\n",
+ " - - 5th line in file 'mu'.\n",
+ " - - 6th line in file 'mu'.\n"]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-r', '4:head', mu_path)
+
+ # Next test with the -g option with -rN:M
+ expected_output = [ " - - New version of file 'mu'.\n",
+ " - - 2nd line in file 'mu'.\n",
+ "G 5 jrandom new 3rd line in file 'mu'.\n",
+ "G 6 jrandom add 3.5 line in file 'mu'.\n",
+ " - - 4th line in file 'mu'.\n",
+ " - - 5th line in file 'mu'.\n",
+ " - - 6th line in file 'mu'.\n"]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-g', '-r', '6:head', mu_path)
+
+#----------------------------------------------------------------------
+
+@SkipUnless(server_has_mergeinfo)
+@XFail()
+@Issue(3862)
+def merge_sensitive_blame_and_empty_mergeinfo(sbox):
+ "blame -g handles changes from empty mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True)
+
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ psi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'psi')
+ psi_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D', 'H', 'psi')
+
+ # Make an edit to A/D/H/psi in r3.
+ svntest.main.file_append(psi_path, "trunk edit in revision three.\n")
+ sbox.simple_commit(message='trunk edit')
+
+ # Merge r3 from A to A_COPY, reverse merge r3 from A/D/H/psi
+ # to A_COPY/D/H/psi, and commit as r4. This results in empty
+ # mergeinfo on A_COPY/D/H/psi.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-c3',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.main.run_svn(None, 'merge', '-c-3',
+ sbox.repo_url + '/A/D/H/psi', psi_COPY_path)
+ sbox.simple_commit(message='Sync merge A to A_COPY excepting A_COPY/D/H/psi')
+
+ # Make an edit to A/D/H/psi in r5.
+ svntest.main.file_append(psi_path, "trunk edit in revision five.\n")
+ sbox.simple_commit(message='trunk edit')
+
+ # Sync merge A/D/H/psi to A_COPY/D/H/psi and commit as r6. This replaces
+ # the empty mergeinfo on A_COPY/D/H/psi with '/A/D/H/psi:2-5'.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path)
+ sbox.simple_commit(message='Sync merge A/D/H/psi to A_COPY/D/H/psi')
+
+ # Check the blame -g output:
+ # Currently this test fails because the trunk edit done in r3 is
+ # reported as having been done in r5.
+ #
+ # >svn blame -g A_COPY\D\H\psi
+ # 1 jrandom This is the file 'psi'.
+ # G 5 jrandom trunk edit in revision three.
+ # G 5 jrandom trunk edit in revision five.
+ expected_output = [
+ " 1 jrandom This is the file 'psi'.\n",
+ "G 3 jrandom trunk edit in revision three.\n",
+ "G 5 jrandom trunk edit in revision five.\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-g', psi_COPY_path)
+
+def blame_multiple_targets(sbox):
+ "blame multiple target"
+
+ sbox.build()
+
+ # First, make a new revision of iota.
+ sbox.simple_append('iota', "New contents for iota\n")
+ sbox.simple_commit()
+ iota = sbox.ospath('iota')
+
+ expected_output = [
+ " 1 jrandom This is the file 'iota'.\n",
+ " 2 jrandom New contents for iota\n",
+ ]
+
+ # We use --force to avoid an early bail from the current blame code,
+ # that performs a property check before the actual blame.
+
+ non_existent = os.path.join(sbox.wc_dir, 'non-existent')
+ svntest.actions.run_and_verify_svn(None,
+ ".*W155010: The node.*non-existent'.*",
+ 'blame', non_existent, iota,
+ '--force')
+
+ iota_url = sbox.repo_url + '/iota'
+ non_existent_url = sbox.repo_url + '/non-existent'
+
+ # SVN_ERR_FS_NOT_FILE | SVN_ERR_FS_NOT_FOUND
+ svntest.actions.run_and_verify_svn(None,
+ ".*W1600(13|17): '.*non-existent' .*not",
+ 'blame', non_existent_url, iota_url,
+ '--force')
+
+
+@Issue(4034)
+def blame_eol_handling(sbox):
+ "blame it on the eol handling"
+
+ sbox.build()
+
+ if os.name == 'nt':
+ native_eol = '\r\n'
+ else:
+ native_eol = '\n'
+
+ for eol, prop, rev in [ ('\r', 'CR', 2),
+ ('\n', 'LF', 4),
+ ('\r\n', 'CRLF', 6),
+ (native_eol, 'native', 8) ]:
+
+ f1 = sbox.ospath('blame-%s' % prop)
+ f2 = sbox.ospath('blame-%s-prop' % prop)
+
+ file_data = 'line 1 ' + eol + \
+ 'line 2 ' + eol + \
+ 'line 3 ' + eol + \
+ 'line 4 ' + eol + \
+ 'line 5 ' + eol
+
+ svntest.main.file_write(f1, file_data, mode='wb')
+ svntest.main.file_write(f2, file_data, mode='wb')
+
+ sbox.simple_add('blame-%s' % prop,
+ 'blame-%s-prop' % prop)
+ sbox.simple_propset('svn:eol-style', prop, 'blame-%s-prop' % prop)
+ sbox.simple_commit()
+
+ file_data = 'line 1 ' + eol + \
+ 'line 2 ' + eol + \
+ 'line 2a' + eol + \
+ 'line 3 ' + eol + \
+ 'line 4 ' + eol + \
+ 'line 4a' + eol + \
+ 'line 5 ' + eol
+
+ svntest.main.file_write(f1, file_data, mode='wb')
+ svntest.main.file_write(f2, file_data, mode='wb')
+
+ sbox.simple_commit()
+
+ expected_output = [
+ ' %d jrandom line 1 \n' % rev,
+ ' %d jrandom line 2 \n' % rev,
+ ' %d jrandom line 2a\n' % (rev + 1),
+ ' %d jrandom line 3 \n' % rev,
+ ' %d jrandom line 4 \n' % rev,
+ ' %d jrandom line 4a\n' % (rev + 1),
+ ' %d jrandom line 5 \n' % rev,
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', f1)
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', f2)
+
+ file_data = 'line 1 ' + eol + \
+ 'line 2 ' + eol + \
+ 'line 2a' + eol + \
+ 'line 3 ' + eol + \
+ 'line 3b' + eol + \
+ 'line 4 ' + eol + \
+ 'line 4a' + eol + \
+ 'line 5 ' + eol
+
+ svntest.main.file_write(f1, file_data, mode='wb')
+ svntest.main.file_write(f2, file_data, mode='wb')
+
+ expected_output = [
+ ' %d jrandom line 1 \n' % rev,
+ ' %d jrandom line 2 \n' % rev,
+ ' %d jrandom line 2a\n' % (rev + 1),
+ ' %d jrandom line 3 \n' % rev,
+ ' - - line 3b\n',
+ ' %d jrandom line 4 \n' % rev,
+ ' %d jrandom line 4a\n' % (rev + 1),
+ ' %d jrandom line 5 \n' % rev,
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', f1)
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', f2)
+
+
+@SkipUnless(svntest.main.server_has_reverse_get_file_revs)
+def blame_youngest_to_oldest(sbox):
+ "blame_youngest_to_oldest"
+
+ sbox.build()
+
+ # First, make a new revision of iota.
+ iota = sbox.ospath('iota')
+ orig_line = open(iota).read()
+ line = "New contents for iota\n"
+ svntest.main.file_append(iota, line)
+ sbox.simple_commit() #r2
+
+ # Move the file, to check that the operation will peg correctly.
+ iota_moved = sbox.ospath('iota_moved')
+ sbox.simple_move('iota', 'iota_moved')
+ sbox.simple_commit() #r3
+
+ # Delete a line.
+ open(iota_moved, 'w').write(line)
+ sbox.simple_commit() #r4
+
+ expected_output = [
+ ' %d jrandom %s\n' % (4, orig_line[:-1]),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-r4:1', iota_moved)
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:1', iota_moved)
+
+ expected_output = [
+ ' %d jrandom %s\n' % (2, line[:-1]),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-r1:HEAD', iota_moved)
+
+@Issue(4467)
+def blame_reverse_no_change(sbox):
+ "blame reverse towards a revision with no change"
+
+ sbox.build()
+
+ # Introduce a revision where iota doesn't change!
+ sbox.simple_propset('a', 'b', 'A')
+ sbox.simple_commit('') #r2
+
+ sbox.simple_append('iota', 'new line\n')
+ sbox.simple_commit('') #r3
+
+ sbox.simple_append('iota', 'another new line\n')
+ sbox.simple_commit('') #r4
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ' 3 jrandom new line\n',
+ ' 4 jrandom another new line\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-r2:HEAD', sbox.ospath('iota'))
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ]
+ # This used to trigger an assertion on 1.9.x before 1.9.0
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:2', sbox.ospath('iota'))
+
+ # Drop the middle line
+ sbox.simple_append('iota', 'This is the file \'iota\'.\n'
+ 'another new line\n', truncate=True)
+ sbox.simple_commit('') #r5
+
+ # Back to start
+ sbox.simple_append('iota', 'This is the file \'iota\'.\n', truncate=True)
+ sbox.simple_commit('') #r6
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:2', sbox.ospath('iota'))
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ' 5 jrandom new line\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:3', sbox.ospath('iota'))
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ' 5 jrandom new line\n',
+ ' 6 jrandom another new line\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:4', sbox.ospath('iota'))
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ' 6 jrandom another new line\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:5', sbox.ospath('iota'))
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-rHEAD:6', sbox.ospath('iota'))
+
+
+ expected_output = [
+ ' - - This is the file \'iota\'.\n',
+ ' 5 jrandom new line\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'blame', '-r5:3', sbox.ospath('iota'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ blame_space_in_name,
+ blame_binary,
+ blame_directory,
+ blame_in_xml,
+ blame_on_unknown_revision,
+ blame_peg_rev,
+ blame_eol_styles,
+ blame_ignore_whitespace,
+ blame_ignore_eolstyle,
+ blame_merge_info,
+ blame_merge_out_of_range,
+ blame_peg_rev_file_not_in_head,
+ blame_file_not_in_head,
+ blame_output_after_merge,
+ merge_sensitive_blame_and_empty_mergeinfo,
+ blame_multiple_targets,
+ blame_eol_handling,
+ blame_youngest_to_oldest,
+ blame_reverse_no_change,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/cat_tests.py b/subversion/tests/cmdline/cat_tests.py
new file mode 100755
index 0000000..8246bc2
--- /dev/null
+++ b/subversion/tests/cmdline/cat_tests.py
@@ -0,0 +1,276 @@
+#!/usr/bin/env python
+#
+# cat_tests.py: testing cat cases.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, re
+
+# Our testing module
+import svntest
+
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def cat_local_directory(sbox):
+ "cat a local directory"
+ sbox.build(read_only = True)
+
+ A_path = os.path.join(sbox.wc_dir, 'A')
+
+ expected_err = "svn: warning: W195007: '" + \
+ re.escape(os.path.abspath(A_path)) + \
+ "' refers to a directory"
+
+ svntest.actions.run_and_verify_svn2(None, expected_err,
+ 1, 'cat', A_path)
+
+def cat_remote_directory(sbox):
+ "cat a remote directory"
+ sbox.build(create_wc = False, read_only = True)
+
+ A_url = sbox.repo_url + '/A'
+ expected_err = "svn: warning: W195007: URL '" + A_url + \
+ "' refers to a directory\n.*"
+
+ svntest.actions.run_and_verify_svn2(None, expected_err,
+ 1, 'cat', A_url)
+
+def cat_base(sbox):
+ "cat a file at revision BASE"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, 'Appended text')
+
+ exit_code, outlines, errlines = svntest.main.run_svn(0, 'cat', mu_path)
+
+ # Verify the expected output
+ expected_output = svntest.main.greek_state.desc['A/mu'].contents
+ if len(outlines) != 1 or outlines[0] != expected_output:
+ raise svntest.Failure('Cat failed: expected "%s", but received "%s"' % \
+ (expected_output, outlines))
+
+def cat_nonexistent_file(sbox):
+ "cat a nonexistent file"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ bogus_path = os.path.join(wc_dir, 'A', 'bogus')
+ expected_err = "svn: warning: W200005: '" + \
+ re.escape(os.path.abspath(bogus_path)) + \
+ "' is not under version control"
+
+ svntest.actions.run_and_verify_svn2(None, expected_err, 1,
+ 'cat', bogus_path)
+
+def cat_skip_uncattable(sbox):
+ "cat should skip uncattable resources"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+ dir_path = os.path.join(wc_dir, 'A', 'D')
+ new_file_path = os.path.join(dir_path, 'new')
+ open(new_file_path, 'w')
+ item_list = os.listdir(dir_path)
+
+ # First we test running 'svn cat' on individual objects, expecting
+ # warnings for unversioned files and for directories. Then we try
+ # running 'svn cat' on multiple targets at once, and make sure we
+ # get the warnings we expect.
+
+ # item_list has all the files and directories under 'dir_path'
+ for file in item_list:
+ if file == svntest.main.get_admin_name():
+ continue
+ item_to_cat = os.path.join(dir_path, file)
+ if item_to_cat == new_file_path:
+ expected_err = "svn: warning: W200005: '" + \
+ re.escape(os.path.abspath(item_to_cat)) + \
+ "' is not under version control"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 1,
+ 'cat', item_to_cat)
+
+ elif os.path.isdir(item_to_cat):
+ expected_err = "svn: warning: W195007: '" + \
+ re.escape(os.path.abspath(item_to_cat)) + \
+ "' refers to a directory"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 1,
+ 'cat', item_to_cat)
+ else:
+ svntest.actions.run_and_verify_svn(["This is the file '"+file+"'.\n"],
+ [], 'cat', item_to_cat)
+
+ G_path = os.path.join(dir_path, 'G')
+ rho_path = os.path.join(G_path, 'rho')
+
+ expected_out = "This is the file 'rho'.\n"
+ expected_err1 = "svn: warning: W195007: '" + \
+ re.escape(os.path.abspath(G_path)) + \
+ "' refers to a directory\n"
+ svntest.actions.run_and_verify_svn2(expected_out, expected_err1, 1,
+ 'cat', rho_path, G_path)
+
+ expected_err2 = "svn: warning: W200005: '" + \
+ re.escape(os.path.abspath(new_file_path)) + \
+ "' is not under version control\n"
+ svntest.actions.run_and_verify_svn2(expected_out, expected_err2, 1,
+ 'cat', rho_path, new_file_path)
+
+ expected_err3 = expected_err1 + expected_err2 + \
+ ".*svn: E200009: Could not cat all targets because some targets"
+ expected_err_re = re.compile(expected_err3, re.DOTALL)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'cat', rho_path, G_path, new_file_path)
+ error = [line for line in error
+ if not re.compile(svntest.main.stack_trace_regexp).match(line)]
+
+ # Verify output
+ if output[0] != expected_out:
+ raise svntest.Failure('Cat failed: expected "%s", but received "%s"' % \
+ (expected_out, "".join(output)))
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('Cat failed: expected error "%s", but received "%s"' % \
+ (expected_err3, "".join(error)))
+
+# Test for issue #3560 'svn_wc_status3() returns incorrect status for
+# unversioned files'.
+@Issue(3560)
+def cat_unversioned_file(sbox):
+ "cat an unversioned file parent dir thinks exists"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+
+ # Delete a file an commit the deletion.
+ svntest.actions.run_and_verify_svn2(None, [], 0,
+ 'delete', iota_path)
+ svntest.actions.run_and_verify_svn2(None, [], 0,
+ 'commit', '-m', 'delete a file',
+ iota_path)
+
+ # Now try to cat the deleted file, it should be reported as unversioned.
+ expected_error = "svn: warning: W200005: '" + \
+ re.escape(os.path.abspath(iota_path)) + \
+ "' is not under version control"
+ svntest.actions.run_and_verify_svn2([], expected_error, 1,
+ 'cat', iota_path)
+
+ # Put an unversioned file at 'iota' and try to cat it again, the result
+ # should still be the same.
+ svntest.main.file_write(iota_path, "This the unversioned file 'iota'.\n")
+ svntest.actions.run_and_verify_svn2([], expected_error, 1,
+ 'cat', iota_path)
+
+def cat_keywords(sbox):
+ "cat a file with the svn:keywords property"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+
+ svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n"],
+ [], 'cat', iota_path)
+
+ svntest.main.file_append(iota_path, "$Revision$\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords', 'Revision',
+ iota_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'r2', wc_dir)
+
+ svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n", "$Revision: 2 $\n"],
+ [], 'cat', iota_path)
+
+def cat_url_special_characters(sbox):
+ """special characters in svn cat URL"""
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ special_urls = [sbox.repo_url + '/A' + '/%2E',
+ sbox.repo_url + '%2F' + 'A']
+
+ expected_err = "svn: warning: W195007: URL '" + sbox.repo_url + '/A' + \
+ "' refers to a directory\n.*"
+
+ for url in special_urls:
+ svntest.actions.run_and_verify_svn2(None, expected_err, 1,
+ 'cat', url)
+
+def cat_non_existing_remote_file(sbox):
+ """cat non-existing remote file"""
+ sbox.build(create_wc = False)
+ non_existing_path = sbox.repo_url + '/non-existing'
+
+ expected_err = "svn: warning: W160013: .*not found.*" + \
+ non_existing_path.split('/')[1]
+
+ # cat operation on non-existing remote path should return 1
+ svntest.actions.run_and_verify_svn2(None, expected_err, 1,
+ 'cat', non_existing_path)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ cat_local_directory,
+ cat_remote_directory,
+ cat_base,
+ cat_nonexistent_file,
+ cat_skip_uncattable,
+ cat_unversioned_file,
+ cat_keywords,
+ cat_url_special_characters,
+ cat_non_existing_remote_file,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/changelist_tests.py b/subversion/tests/cmdline/changelist_tests.py
new file mode 100755
index 0000000..1bbb4a6
--- /dev/null
+++ b/subversion/tests/cmdline/changelist_tests.py
@@ -0,0 +1,1213 @@
+#!/usr/bin/env python
+#
+# changelist_tests.py: testing changelist uses.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import string, sys, os, re
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Utilities
+
+
+def mod_all_files(wc_dir, new_text):
+ """Walk over working copy WC_DIR, appending NEW_TEXT to all the
+ files in that tree (but not inside the .svn areas of that tree)."""
+
+ dot_svn = svntest.main.get_admin_name()
+ for dirpath, dirs, files in os.walk(wc_dir):
+ if dot_svn in dirs:
+ dirs.remove(dot_svn)
+ for name in files:
+ svntest.main.file_append(os.path.join(dirpath, name), new_text)
+
+def changelist_all_files(wc_dir, name_func):
+ """Walk over working copy WC_DIR, adding versioned files to
+ changelists named by invoking NAME_FUNC(full-path-of-file) and
+ noting its string return value (or None, if we wish to remove the
+ file from a changelist)."""
+
+ dot_svn = svntest.main.get_admin_name()
+ for dirpath, dirs, files in os.walk(wc_dir):
+ if dot_svn in dirs:
+ dirs.remove(dot_svn)
+ for name in files:
+ full_path = os.path.join(dirpath, name)
+ clname = name_func(full_path)
+ if not clname:
+ svntest.main.run_svn(None, "changelist", "--remove", full_path)
+ else:
+ svntest.main.run_svn(None, "changelist", clname, full_path)
+
+def clname_from_lastchar_cb(full_path):
+ """Callback for changelist_all_files() that returns a changelist
+ name matching the last character in the file's name. For example,
+ after running this on a greek tree where every file has some text
+ modification, 'svn status' shows:
+
+ --- Changelist 'a':
+ M A/B/lambda
+ M A/B/E/alpha
+ M A/B/E/beta
+ M A/D/gamma
+ M A/D/H/omega
+ M iota
+
+ --- Changelist 'u':
+ M A/mu
+ M A/D/G/tau
+
+ --- Changelist 'i':
+ M A/D/G/pi
+ M A/D/H/chi
+ M A/D/H/psi
+
+ --- Changelist 'o':
+ M A/D/G/rho
+ """
+ return full_path[-1]
+
+
+# Regular expressions for 'svn changelist' output.
+_re_cl_rem_pattern = "^D \[(.*)\] (.*)"
+_re_cl_skip = re.compile("Skipped '(.*)'")
+_re_cl_add = re.compile("^A \[(.*)\] (.*)")
+_re_cl_rem = re.compile(_re_cl_rem_pattern)
+
+def verify_changelist_output(output, expected_adds=None,
+ expected_removals=None,
+ expected_skips=None):
+ """Compare lines of OUTPUT from 'svn changelist' against
+ EXPECTED_ADDS (a dictionary mapping paths to changelist names),
+ EXPECTED_REMOVALS (a dictionary mapping paths to ... whatever), and
+ EXPECTED_SKIPS (a dictionary mapping paths to ... whatever).
+
+ EXPECTED_SKIPS is ignored if None."""
+
+ num_expected = 0
+ if expected_adds:
+ num_expected += len(expected_adds)
+ if expected_removals:
+ num_expected += len(expected_removals)
+ if expected_skips:
+ num_expected += len(expected_skips)
+
+ if not expected_skips:
+ output = [line for line in output if (not _re_cl_skip.match(line))]
+
+ for line in output:
+ line = line.rstrip()
+ match = _re_cl_rem.match(line)
+ if match \
+ and expected_removals \
+ and match.group(2) in expected_removals:
+ continue
+ elif match:
+ raise svntest.Failure("Unexpected changelist removal line: " + line)
+ match = _re_cl_add.match(line)
+ if match \
+ and expected_adds \
+ and expected_adds.get(match.group(2)) == match.group(1):
+ continue
+ elif match:
+ raise svntest.Failure("Unexpected changelist add line: " + line)
+ match = _re_cl_skip.match(line)
+ if match \
+ and expected_skips \
+ and match.group(2) in expected_skips:
+ continue
+ elif match:
+ raise svntest.Failure("Unexpected changelist skip line: " + line)
+ raise svntest.Failure("Unexpected line: " + line)
+
+ if len(output) != num_expected:
+ raise svntest.Failure("Unexpected number of 'svn changelist' output " +
+ "lines (%d vs %d)" % (len(output), num_expected))
+
+def verify_pget_output(output, expected_props):
+ """Compare lines of OUTPUT from 'svn propget' against EXPECTED_PROPS
+ (a dictionary mapping paths to property values)."""
+
+ _re_pget = re.compile('^(.*) - (.*)$')
+ actual_props = {}
+ for line in output:
+ try:
+ path, prop = line.rstrip().split(' - ')
+ except:
+ raise svntest.Failure("Unexpected output line: " + line)
+ actual_props[path] = prop
+ if expected_props != actual_props:
+ raise svntest.Failure("Got unexpected property results\n"
+ "\tExpected: %s\n"
+ "\tActual: %s" % (str(expected_props),
+ str(actual_props)))
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def add_remove_changelists(sbox):
+ "add and remove files from changelists"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ ### 'Skip' notifications
+
+ def expected_skips_under(*greek_path):
+ "return a dict mapping Greek-tree directories below GREEK_PATH to None"
+
+ expected_skips = {}
+ for path in expected_skips_all:
+ if path.startswith(os.path.join(wc_dir, *greek_path)):
+ expected_skips[path] = None
+
+ return expected_skips
+
+ def all_parents(expected_adds):
+ """return a dict mapping Greek-tree directories above directories in
+ EXPECTED_ADDS to None"""
+
+ expected_skips = {}
+ for path in expected_adds.keys():
+ if not os.path.isdir(path):
+ path = os.path.dirname(path)
+
+ while path != wc_dir:
+ expected_skips[path] = None
+ path = os.path.dirname(path)
+
+ expected_skips[wc_dir] = None
+ return expected_skips
+
+ # all dirs in the Greek tree
+ expected_skips_all = dict([(x, None) for x in [
+ sbox.ospath(''),
+ sbox.ospath('A'),
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/F'),
+ sbox.ospath('A/C'),
+ sbox.ospath('A/D'),
+ sbox.ospath('A/D/G'),
+ sbox.ospath('A/D/H'),
+ ]])
+
+ expected_skips_wc_dir = { wc_dir : None }
+
+ ### First, we play with just adding to changelists ###
+
+ # svn changelist foo WC_DIR
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist", "foo",
+ wc_dir)
+ verify_changelist_output(output) # nothing expected
+
+ # svn changelist foo WC_DIR --depth files
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist", "foo",
+ "--depth", "files",
+ wc_dir)
+ expected_adds = {
+ os.path.join(wc_dir, 'iota') : 'foo',
+ }
+ verify_changelist_output(output, expected_adds)
+
+ # svn changelist foo WC_DIR --depth infinity
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist", "foo",
+ "--depth", "infinity",
+ wc_dir)
+ expected_adds = {
+ sbox.ospath('A/B/E/alpha') : 'foo',
+ sbox.ospath('A/B/E/beta') : 'foo',
+ sbox.ospath('A/B/lambda') : 'foo',
+ sbox.ospath('A/D/G/pi') : 'foo',
+ sbox.ospath('A/D/G/rho') : 'foo',
+ sbox.ospath('A/D/G/tau') : 'foo',
+ sbox.ospath('A/D/H/chi') : 'foo',
+ sbox.ospath('A/D/H/omega') : 'foo',
+ sbox.ospath('A/D/H/psi') : 'foo',
+ sbox.ospath('A/D/gamma') : 'foo',
+ sbox.ospath('A/mu') : 'foo',
+ }
+ verify_changelist_output(output, expected_adds)
+
+ ### Now, change some changelists ###
+
+ # svn changelist bar WC_DIR/A/D --depth infinity
+ exit_code, output, errput = svntest.main.run_svn(".*", "changelist", "bar",
+ "--depth", "infinity",
+ sbox.ospath('A/D'))
+ expected_adds = {
+ sbox.ospath('A/D/G/pi') : 'bar',
+ sbox.ospath('A/D/G/rho') : 'bar',
+ sbox.ospath('A/D/G/tau') : 'bar',
+ sbox.ospath('A/D/H/chi') : 'bar',
+ sbox.ospath('A/D/H/omega') : 'bar',
+ sbox.ospath('A/D/H/psi') : 'bar',
+ sbox.ospath('A/D/gamma') : 'bar',
+ }
+ expected_removals = expected_adds
+ verify_changelist_output(output, expected_adds, expected_removals)
+
+ # svn changelist baz WC_DIR/A/D/H --depth infinity
+ exit_code, output, errput = svntest.main.run_svn(".*", "changelist", "baz",
+ "--depth", "infinity",
+ sbox.ospath('A/D/H'))
+ expected_adds = {
+ sbox.ospath('A/D/H/chi') : 'baz',
+ sbox.ospath('A/D/H/omega') : 'baz',
+ sbox.ospath('A/D/H/psi') : 'baz',
+ }
+ expected_removals = expected_adds
+ verify_changelist_output(output, expected_adds, expected_removals)
+
+ ### Now, let's selectively rename some changelists ###
+
+ # svn changelist foo-rename WC_DIR --depth infinity --changelist foo
+ exit_code, output, errput = svntest.main.run_svn(".*", "changelist",
+ "foo-rename",
+ "--depth", "infinity",
+ "--changelist", "foo",
+ wc_dir)
+ expected_adds = {
+ sbox.ospath('A/B/E/alpha') : 'foo-rename',
+ sbox.ospath('A/B/E/beta') : 'foo-rename',
+ sbox.ospath('A/B/lambda') : 'foo-rename',
+ sbox.ospath('A/mu') : 'foo-rename',
+ sbox.ospath('iota') : 'foo-rename',
+ }
+ expected_removals = expected_adds
+ verify_changelist_output(output, expected_adds, expected_removals)
+
+ # svn changelist bar WC_DIR --depth infinity
+ # --changelist foo-rename --changelist baz
+ exit_code, output, errput = svntest.main.run_svn(
+ ".*", "changelist", "bar", "--depth", "infinity",
+ "--changelist", "foo-rename", "--changelist", "baz", wc_dir)
+
+ expected_adds = {
+ sbox.ospath('A/B/E/alpha') : 'bar',
+ sbox.ospath('A/B/E/beta') : 'bar',
+ sbox.ospath('A/B/lambda') : 'bar',
+ sbox.ospath('A/D/H/chi') : 'bar',
+ sbox.ospath('A/D/H/omega') : 'bar',
+ sbox.ospath('A/D/H/psi') : 'bar',
+ sbox.ospath('A/mu') : 'bar',
+ sbox.ospath('iota') : 'bar',
+ }
+ expected_removals = expected_adds
+ verify_changelist_output(output, expected_adds, expected_removals)
+
+ ### Okay. Time to remove some stuff from changelists now. ###
+
+ # svn changelist --remove WC_DIR
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist",
+ "--remove", wc_dir)
+ verify_changelist_output(output) # nothing expected
+
+ # svn changelist --remove WC_DIR --depth files
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist",
+ "--remove",
+ "--depth", "files",
+ wc_dir)
+ expected_removals = {
+ os.path.join(wc_dir, 'iota') : None,
+ }
+ verify_changelist_output(output, None, expected_removals)
+
+ # svn changelist --remove WC_DIR --depth infinity
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist",
+ "--remove",
+ "--depth", "infinity",
+ wc_dir)
+ expected_removals = {
+ sbox.ospath('A/B/E/alpha') : None,
+ sbox.ospath('A/B/E/beta') : None,
+ sbox.ospath('A/B/lambda') : None,
+ sbox.ospath('A/D/G/pi') : None,
+ sbox.ospath('A/D/G/rho') : None,
+ sbox.ospath('A/D/G/tau') : None,
+ sbox.ospath('A/D/H/chi') : None,
+ sbox.ospath('A/D/H/omega') : None,
+ sbox.ospath('A/D/H/psi') : None,
+ sbox.ospath('A/D/gamma') : None,
+ sbox.ospath('A/mu') : None,
+ }
+ verify_changelist_output(output, None, expected_removals)
+
+ ### Add files to changelists based on the last character in their names ###
+
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ ### Now, do selective changelist removal ###
+
+ # svn changelist --remove WC_DIR --depth infinity --changelist a
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist",
+ "--remove",
+ "--depth", "infinity",
+ "--changelist", "a",
+ wc_dir)
+ expected_removals = {
+ sbox.ospath('A/B/E/alpha') : None,
+ sbox.ospath('A/B/E/beta') : None,
+ sbox.ospath('A/B/lambda') : None,
+ sbox.ospath('A/D/H/omega') : None,
+ sbox.ospath('A/D/gamma') : None,
+ sbox.ospath('iota') : None,
+ }
+ verify_changelist_output(output, None, expected_removals)
+
+ # svn changelist --remove WC_DIR --depth infinity
+ # --changelist i --changelist o
+ exit_code, output, errput = svntest.main.run_svn(None, "changelist",
+ "--remove",
+ "--depth", "infinity",
+ "--changelist", "i",
+ "--changelist", "o",
+ wc_dir)
+ expected_removals = {
+ sbox.ospath('A/D/G/pi') : None,
+ sbox.ospath('A/D/G/rho') : None,
+ sbox.ospath('A/D/H/chi') : None,
+ sbox.ospath('A/D/H/psi') : None,
+ }
+ verify_changelist_output(output, None, expected_removals)
+
+#----------------------------------------------------------------------
+
+def commit_one_changelist(sbox):
+ "commit with single --changelist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a line of text to all the versioned files in the tree.
+ mod_all_files(wc_dir, "New text.\n")
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Now, test a commit that uses a single changelist filter (--changelist a).
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/tau', 'A/D/G/pi', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/G/rho', wc_rev=1, status='M ')
+ expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D/gamma', 'A/D/H/omega', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir,
+ "--changelist",
+ "a")
+
+#----------------------------------------------------------------------
+
+def commit_multiple_changelists(sbox):
+ "commit with multiple --changelist's"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a line of text to all the versioned files in the tree.
+ mod_all_files(wc_dir, "New text.\n")
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Now, test a commit that uses multiple changelist filters
+ # (--changelist=a --changelist=i).
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'iota' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/H/chi' : Item(verb='Sending'),
+ 'A/D/H/psi' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/tau', 'A/D/G/rho',
+ wc_rev=1, status='M ')
+ expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D/gamma', 'A/D/H/omega', 'A/D/G/pi', 'A/D/H/chi',
+ 'A/D/H/psi', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir,
+ "--changelist", "a",
+ "--changelist", "i")
+
+#----------------------------------------------------------------------
+
+def info_with_changelists(sbox):
+ "info --changelist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Now, test various combinations of changelist specification and depths.
+ for clname in [['a'], ['i'], ['a', 'i']]:
+ for depth in [None, 'files', 'infinity']:
+
+ # Figure out what we expect to see in our info output.
+ expected_paths = []
+ if 'a' in clname:
+ if depth == 'infinity':
+ expected_paths.append('A/B/lambda')
+ expected_paths.append('A/B/E/alpha')
+ expected_paths.append('A/B/E/beta')
+ expected_paths.append('A/D/gamma')
+ expected_paths.append('A/D/H/omega')
+ if depth == 'files' or depth == 'infinity':
+ expected_paths.append('iota')
+ if 'i' in clname:
+ if depth == 'infinity':
+ expected_paths.append('A/D/G/pi')
+ expected_paths.append('A/D/H/chi')
+ expected_paths.append('A/D/H/psi')
+ expected_paths = sorted([os.path.join(wc_dir, x.replace('/', os.sep)) for x in expected_paths])
+
+ # Build the command line.
+ args = ['info', wc_dir]
+ for cl in clname:
+ args.append('--changelist')
+ args.append(cl)
+ if depth:
+ args.append('--depth')
+ args.append(depth)
+
+ # Run 'svn info ...'
+ exit_code, output, errput = svntest.main.run_svn(None, *args)
+
+ # Filter the output for lines that begin with 'Path:', and
+ # reduce even those lines to just the actual path.
+ paths = sorted([x[6:].rstrip() for x in output if x[:6] == 'Path: '])
+
+ # And, compare!
+ if (paths != expected_paths):
+ raise svntest.Failure("Expected paths (%s) and actual paths (%s) "
+ "don't gel" % (str(expected_paths), str(paths)))
+
+#----------------------------------------------------------------------
+
+def diff_with_changelists(sbox):
+ "diff --changelist (wc-wc and repos-wc)"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a line of text to all the versioned files in the tree.
+ mod_all_files(wc_dir, "New text.\n")
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Now, test various combinations of changelist specification and depths.
+ for is_repos_wc in [0, 1]:
+ for clname in [['a'], ['i'], ['a', 'i']]:
+ for depth in ['files', 'infinity']:
+
+ # Figure out what we expect to see in our diff output.
+ expected_paths = []
+ if 'a' in clname:
+ if depth == 'infinity':
+ expected_paths.append('A/B/lambda')
+ expected_paths.append('A/B/E/alpha')
+ expected_paths.append('A/B/E/beta')
+ expected_paths.append('A/D/gamma')
+ expected_paths.append('A/D/H/omega')
+ if depth == 'files' or depth == 'infinity':
+ expected_paths.append('iota')
+ if 'i' in clname:
+ if depth == 'infinity':
+ expected_paths.append('A/D/G/pi')
+ expected_paths.append('A/D/H/chi')
+ expected_paths.append('A/D/H/psi')
+ expected_paths = sorted([os.path.join(wc_dir, x.replace('/', os.sep)) for x in expected_paths])
+
+ # Build the command line.
+ args = ['diff']
+ for cl in clname:
+ args.append('--changelist')
+ args.append(cl)
+ if depth:
+ args.append('--depth')
+ args.append(depth)
+ if is_repos_wc:
+ args.append('--old')
+ args.append(sbox.repo_url)
+ args.append('--new')
+ args.append(sbox.wc_dir)
+ else:
+ args.append(wc_dir)
+
+ # Run 'svn diff ...'
+ exit_code, output, errput = svntest.main.run_svn(None, *args)
+
+ # Filter the output for lines that begin with 'Index:', and
+ # reduce even those lines to just the actual path.
+ paths = sorted([x[7:].rstrip() for x in output if x[:7] == 'Index: '])
+
+ # Diff output on Win32 uses '/' path separators.
+ if sys.platform == 'win32':
+ paths = [x.replace('/', os.sep) for x in paths]
+
+ # And, compare!
+ if (paths != expected_paths):
+ raise svntest.Failure("Expected paths (%s) and actual paths (%s) "
+ "don't gel"
+ % (str(expected_paths), str(paths)))
+
+#----------------------------------------------------------------------
+
+def propmods_with_changelists(sbox):
+ "propset/del/get/list --changelist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Set property 'name'='value' on all working copy items.
+ svntest.main.run_svn(None, "pset", "--depth", "infinity",
+ "name", "value", wc_dir)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'' : Item(props={ 'name' : 'value' })})
+ expected_disk.tweak('A', 'A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda', 'A/C', 'A/D', 'A/D/G',
+ 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H',
+ 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', 'A/D/gamma',
+ 'A/mu', 'iota', props={ 'name' : 'value' })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Proplist the 'i' changelist
+ exit_code, output, errput = svntest.main.run_svn(None, "proplist", "--depth",
+ "infinity", "--changelist",
+ "i", wc_dir)
+ ### Really simple sanity check on the output of 'proplist'. If we've got
+ ### a proper proplist content checker anywhere, we should probably use it
+ ### instead.
+ if len(output) != 6:
+ raise svntest.Failure
+
+ # Remove the 'name' property from files in the 'o' and 'i' changelists.
+ svntest.main.run_svn(None, "pdel", "--depth", "infinity",
+ "name", "--changelist", "o", "--changelist", "i",
+ wc_dir)
+ expected_disk.tweak('A/D/G/pi', 'A/D/G/rho', 'A/D/H/chi', 'A/D/H/psi',
+ props={})
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Add 'foo'='bar' property on all files under A/B to depth files and
+ # in changelist 'a'.
+ svntest.main.run_svn(None, "pset", "--depth", "files",
+ "foo", "bar", "--changelist", "a",
+ os.path.join(wc_dir, 'A', 'B'))
+ expected_disk.tweak('A/B/lambda', props={ 'name' : 'value',
+ 'foo' : 'bar' })
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Add 'bloo'='blarg' property to all files in changelist 'a'.
+ svntest.main.run_svn(None, "pset", "--depth", "infinity",
+ "bloo", "blarg", "--changelist", "a",
+ wc_dir)
+ expected_disk.tweak('A/B/lambda', props={ 'name' : 'value',
+ 'foo' : 'bar',
+ 'bloo' : 'blarg' })
+ expected_disk.tweak('A/B/E/alpha', 'A/B/E/beta', 'A/D/H/omega', 'A/D/gamma',
+ 'iota', props={ 'name' : 'value',
+ 'bloo' : 'blarg' })
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Propget 'name' in files in changelists 'a' and 'i' to depth files.
+ exit_code, output, errput = svntest.main.run_svn(None, "pget",
+ "--depth", "files", "name",
+ "--changelist", "a",
+ "--changelist", "i",
+ wc_dir)
+ verify_pget_output(output, {
+ os.path.join(wc_dir, 'iota') : 'value',
+ })
+
+ # Propget 'name' in files in changelists 'a' and 'i' to depth infinity.
+ exit_code, output, errput = svntest.main.run_svn(None, "pget",
+ "--depth", "infinity",
+ "name",
+ "--changelist", "a",
+ "--changelist", "i",
+ wc_dir)
+ verify_pget_output(output, {
+ os.path.join(wc_dir, 'A', 'D', 'gamma') : 'value',
+ os.path.join(wc_dir, 'A', 'B', 'E', 'alpha') : 'value',
+ os.path.join(wc_dir, 'iota') : 'value',
+ os.path.join(wc_dir, 'A', 'B', 'E', 'beta') : 'value',
+ os.path.join(wc_dir, 'A', 'B', 'lambda') : 'value',
+ os.path.join(wc_dir, 'A', 'D', 'H', 'omega') : 'value',
+ })
+
+
+#----------------------------------------------------------------------
+
+def revert_with_changelists(sbox):
+ "revert --changelist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ # Add a line of text to all the versioned files in the tree.
+ mod_all_files(wc_dir, "Please, oh please, revert me!\n")
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D/gamma', 'A/D/H/omega', 'iota', 'A/mu',
+ 'A/D/G/tau', 'A/D/G/pi', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/G/rho', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # 'svn revert --changelist a WC_DIR' (without depth, no change expected)
+ svntest.main.run_svn(None, "revert", "--changelist", "a", wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # 'svn revert --changelist o --depth files WC_DIR WC_DIR/A/B' (no change)
+ svntest.main.run_svn(None, "revert", "--depth", "files",
+ "--changelist", "o",
+ wc_dir, os.path.join(wc_dir, 'A', 'B'))
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # 'svn revert --changelist a --depth files WC_DIR WC_DIR/A/B'
+ # (iota, lambda reverted)
+ svntest.main.run_svn(None, "revert", "--depth", "files",
+ "--changelist", "a",
+ wc_dir, os.path.join(wc_dir, 'A', 'B'))
+ expected_status.tweak('iota', 'A/B/lambda', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # 'svn revert --changelist a --changelist i --depth infinity WC_DIR'
+ # (alpha, beta, gamma, omega, pi, chi, psi reverted)
+ svntest.main.run_svn(None, "revert", "--depth", "infinity",
+ "--changelist", "a", "--changelist", "i",
+ wc_dir)
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', 'A/D/gamma',
+ 'A/D/H/omega', 'A/D/G/pi', 'A/D/H/chi',
+ 'A/D/H/psi', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # 'svn revert --depth infinity WC_DIR' (back to pristine-ness)
+ svntest.main.run_svn(None, "revert", "--depth", "infinity",
+ wc_dir)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def update_with_changelists(sbox):
+ "update --changelist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a line of text to all the versioned files in the tree, commit, update.
+ mod_all_files(wc_dir, "Added line.\n")
+ svntest.main.run_svn(None, "commit", "-m", "logmsg", wc_dir)
+ svntest.main.run_svn(None, "update", wc_dir)
+
+ # Add files to changelists based on the last character in their names.
+ changelist_all_files(wc_dir, clname_from_lastchar_cb)
+
+ ### Backdate only the files in the 'a' and 'i' changelists at depth
+ ### files under WC_DIR and WC_DIR/A/B.
+
+ # We expect update to only touch lambda and iota.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(status='U '),
+ 'iota' : Item(status='U '),
+ })
+
+ # Disk state should have all the files except iota and lambda
+ # carrying new text.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha',
+ contents="This is the file 'alpha'.\nAdded line.\n")
+ expected_disk.tweak('A/B/E/beta',
+ contents="This is the file 'beta'.\nAdded line.\n")
+ expected_disk.tweak('A/D/gamma',
+ contents="This is the file 'gamma'.\nAdded line.\n")
+ expected_disk.tweak('A/D/H/omega',
+ contents="This is the file 'omega'.\nAdded line.\n")
+ expected_disk.tweak('A/mu',
+ contents="This is the file 'mu'.\nAdded line.\n")
+ expected_disk.tweak('A/D/G/tau',
+ contents="This is the file 'tau'.\nAdded line.\n")
+ expected_disk.tweak('A/D/G/pi',
+ contents="This is the file 'pi'.\nAdded line.\n")
+ expected_disk.tweak('A/D/H/chi',
+ contents="This is the file 'chi'.\nAdded line.\n")
+ expected_disk.tweak('A/D/H/psi',
+ contents="This is the file 'psi'.\nAdded line.\n")
+ expected_disk.tweak('A/D/G/rho',
+ contents="This is the file 'rho'.\nAdded line.\n")
+
+ # Status is clean, but with iota and lambda at r1 and all else at r2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('iota', 'A/B/lambda', wc_rev=1)
+
+ # Update.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ "-r", "1",
+ "--changelist", "a",
+ "--changelist", "i",
+ "--depth", "files",
+ wc_dir,
+ os.path.join(wc_dir, 'A', 'B'))
+
+ ### Backdate to depth infinity all changelists "a", "i", and "o" now.
+
+ # We expect update to only touch all the files ending in 'a', 'i',
+ # and 'o' (except lambda and iota which were previously updated).
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='U '),
+ 'A/D/H/chi' : Item(status='U '),
+ 'A/D/H/psi' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='U '),
+ 'A/B/E/alpha' : Item(status='U '),
+ 'A/B/E/beta' : Item(status='U '),
+ 'A/D/gamma' : Item(status='U '),
+ 'A/D/H/omega' : Item(status='U '),
+ })
+
+ # Disk state should have only tau and mu carrying new text.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents="This is the file 'mu'.\nAdded line.\n")
+ expected_disk.tweak('A/D/G/tau',
+ contents="This is the file 'tau'.\nAdded line.\n")
+
+ # Status is clean, but with iota and lambda at r1 and all else at r2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('iota', 'A/B/lambda', 'A/D/G/pi', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/G/rho', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/D/gamma', 'A/D/H/omega', wc_rev=1)
+
+ # Update.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ "-r", "1",
+ "--changelist", "a",
+ "--changelist", "i",
+ "--changelist", "o",
+ "--depth", "infinity",
+ wc_dir)
+
+def tree_conflicts_and_changelists_on_commit1(sbox):
+ "tree conflicts, changelists and commit"
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+
+ iota = os.path.join(wc_dir, "iota")
+ rho = os.path.join(wc_dir, "A", "D", "G", "rho")
+
+ # This file will ultimately be committed
+ svntest.main.file_append(iota, "More stuff in iota")
+
+ # Verify that the commit is blocked when we include a tree-conflicted
+ # item.
+ svntest.main.run_svn(None, "changelist", "list", iota, rho)
+
+ expected_error = ("svn: E155015: Aborting commit: '.*" + re.escape(rho)
+ + "' remains in .*conflict")
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None, None,
+ expected_error,
+ wc_dir,
+ "--changelist",
+ "list")
+
+ # Now, test if we can commit iota without those tree-conflicts
+ # getting in the way.
+ svntest.main.run_svn(None, "changelist", "--remove", rho)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/D/G/pi', status='D ', treeconflict='C')
+ expected_status.tweak('A/D/G/tau', status='! ', treeconflict='C',
+ wc_rev=None)
+ expected_status.tweak('A/D/G/rho', status='A ', copied='+',
+ treeconflict='C', wc_rev='-')
+ expected_status.tweak('iota', wc_rev=3, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir,
+ "--changelist",
+ "list")
+
+
+def tree_conflicts_and_changelists_on_commit2(sbox):
+ "more tree conflicts, changelists and commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota = os.path.join(wc_dir, "iota")
+ A = os.path.join(wc_dir, "A",)
+ C = os.path.join(A, "C")
+
+ # Make a tree-conflict on A/C:
+ # Remove it, warp back, add a prop, update.
+ svntest.main.run_svn(None, 'delete', C)
+
+ expected_output = svntest.verify.RegexOutput(
+ "Deleting.*" + re.escape(C),
+ False)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'commit', '-m', 'delete A/C', C)
+
+ expected_output = svntest.verify.RegexOutput(
+ "A.*" + re.escape(C), False)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'update', C, "-r1")
+
+ expected_output = svntest.verify.RegexOutput(
+ ".*'propname' set on '"
+ + re.escape(C) + "'", False)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propset', 'propname', 'propval', C)
+
+ expected_output = svntest.verify.RegexOutput(
+ " C " + re.escape(C), False)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'update', wc_dir)
+
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/C', status='A ', copied='+',
+ treeconflict='C', wc_rev='-')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # So far so good. We have a tree-conflict on an absent dir A/C.
+
+ # Verify that the current situation does not commit.
+ expected_error = "svn: E155015: Aborting commit:.* remains in .*conflict";
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None, None,
+ expected_error,
+ wc_dir)
+
+ # Now try to commit with a changelist, not letting the
+ # tree-conflict get in the way.
+ svntest.main.file_append(iota, "More stuff in iota")
+ svntest.main.run_svn(None, "changelist", "list", iota)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('iota', wc_rev=3, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir,
+ "--changelist",
+ "list")
+
+
+#----------------------------------------------------------------------
+
+def move_keeps_changelist(sbox):
+ "'svn mv' of existing keeps the changelist"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+ iota2_path = iota_path + '2'
+
+ # 'svn mv' of existing file should *copy* the changelist to the new place
+ svntest.main.run_svn(None, "changelist", 'foo', iota_path)
+ svntest.main.run_svn(None, "rename", iota_path, iota2_path)
+ expected_infos = [
+ {
+ 'Name' : 'iota',
+ 'Schedule' : 'delete',
+ 'Changelist' : 'foo',
+ },
+ {
+ 'Name' : 'iota2',
+ 'Schedule' : 'add',
+ 'Changelist' : 'foo', # this line fails the test
+ },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, iota_path, iota2_path)
+
+def move_added_keeps_changelist(sbox):
+ "'svn mv' of added keeps the changelist"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ kappa_path = os.path.join(wc_dir, 'kappa')
+ kappa2_path = kappa_path + '2'
+
+ # add 'kappa' (do not commit!)
+ svntest.main.file_write(kappa_path, "This is the file 'kappa'.\n")
+ svntest.main.run_svn(None, 'add', kappa_path)
+
+ # 'svn mv' of added file should *move* the changelist to the new place
+ svntest.main.run_svn(None, "changelist", 'foo', kappa_path)
+ svntest.main.run_svn(None, "rename", kappa_path, kappa2_path)
+
+ # kappa not under version control
+ svntest.actions.run_and_verify_svnversion(kappa_path, repo_url,
+ [], ".*doesn't exist.*")
+ # kappa2 in a changelist
+ expected_infos = [
+ {
+ 'Name' : 'kappa2',
+ 'Schedule' : 'add',
+ 'Changelist' : 'foo', # this line fails the test
+ },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, kappa2_path)
+
+@Issue(3820)
+def change_to_dir(sbox):
+ "change file in changelist to dir"
+
+ sbox.build()
+
+ # No changelist initially
+ expected_infos = [{'Name' : 'mu', 'Changelist' : None}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu visible in changelist
+ svntest.actions.run_and_verify_svn(None, [],
+ 'changelist', 'qq', sbox.ospath('A/mu'))
+ expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu still visible after delete
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu removed from changelist after replace with directory
+ svntest.actions.run_and_verify_svn('^A|' + _re_cl_rem_pattern, [],
+ 'mkdir', sbox.ospath('A/mu'))
+ expected_infos = [{'Changelist' : None}] # No Name for directories?
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ svntest.main.run_svn(None, "commit", "-m", "r2: replace A/mu: file->dir",
+ sbox.ospath('A'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ svntest.main.run_svn(None, "update", "-r", "1", sbox.ospath('A'))
+ expected_infos = [{'Name' : 'mu', 'Changelist' : None}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu visible in changelist
+ svntest.actions.run_and_verify_svn(None, [],
+ 'changelist', 'qq', sbox.ospath('A/mu'))
+ expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu removed from changelist after replace with dir via merge
+ svntest.main.run_svn(None, "merge", "-c", "2", sbox.ospath('A'),
+ sbox.ospath('A'))
+ expected_infos = [{'Changelist' : None}] # No Name for directories?
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+
+@Issue(3822)
+def revert_deleted_in_changelist(sbox):
+ "revert a deleted file in a changelist"
+
+ sbox.build(read_only = True)
+
+ # No changelist initially
+ expected_infos = [{'Name' : 'mu', 'Changelist' : None}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu visible in changelist
+ svntest.actions.run_and_verify_svn(None, [],
+ 'changelist', 'qq', sbox.ospath('A/mu'))
+ expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu still visible after delete
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu still visible after revert
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('A/mu'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu still visible after parent delete
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+ # A/mu still visible after revert
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', sbox.ospath('A'))
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu'))
+
+def add_remove_non_existent_target(sbox):
+ "add and remove non-existent target to changelist"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ bogus_path = os.path.join(wc_dir, 'A', 'bogus')
+
+ expected_err = "svn: warning: W155010: The node '" + \
+ re.escape(os.path.abspath(bogus_path)) + \
+ "' was not found"
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'changelist', 'testlist',
+ bogus_path)
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'changelist', bogus_path,
+ '--remove')
+
+def add_remove_unversioned_target(sbox):
+ "add and remove unversioned target to changelist"
+
+ sbox.build(read_only = True)
+ unversioned = sbox.ospath('unversioned')
+ svntest.main.file_write(unversioned, "dummy contents", 'w+')
+
+ expected_err = "svn: warning: W155010: The node '" + \
+ re.escape(os.path.abspath(unversioned)) + \
+ "' was not found"
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'changelist', 'testlist',
+ unversioned)
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'changelist', unversioned,
+ '--remove')
+
+@Issue(3985)
+def readd_after_revert(sbox):
+ "add new file to changelist, revert and readd"
+ sbox.build(read_only = True)
+
+ dummy = sbox.ospath('dummy')
+ svntest.main.file_write(dummy, "dummy contents")
+
+ sbox.simple_add('dummy')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'changelist', 'testlist',
+ dummy)
+
+ sbox.simple_revert('dummy')
+
+ svntest.main.file_write(dummy, "dummy contents")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', dummy)
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ add_remove_changelists,
+ commit_one_changelist,
+ commit_multiple_changelists,
+ info_with_changelists,
+ diff_with_changelists,
+ propmods_with_changelists,
+ revert_with_changelists,
+ update_with_changelists,
+ tree_conflicts_and_changelists_on_commit1,
+ tree_conflicts_and_changelists_on_commit2,
+ move_keeps_changelist,
+ move_added_keeps_changelist,
+ change_to_dir,
+ revert_deleted_in_changelist,
+ add_remove_non_existent_target,
+ add_remove_unversioned_target,
+ readd_after_revert,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/checkout_tests.py b/subversion/tests/cmdline/checkout_tests.py
new file mode 100755
index 0000000..49165e7
--- /dev/null
+++ b/subversion/tests/cmdline/checkout_tests.py
@@ -0,0 +1,1213 @@
+#!/usr/bin/env python
+#
+# checkout_tests.py: Testing checkout --force behavior when local
+# tree already exits.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, time, subprocess
+import datetime
+
+# Our testing module
+import svntest
+from svntest import wc, actions
+import logging
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+
+logger = logging.getLogger()
+
+#----------------------------------------------------------------------
+# Helper function for testing stderr from co.
+# If none of the strings in STDERR list matches the regular expression
+# RE_STRING raise an error.
+def test_stderr(re_string, stderr):
+ exp_err_re = re.compile(re_string)
+ for line in stderr:
+ if exp_err_re.search(line):
+ return
+ for x in stderr:
+ logger.debug(x[:-1])
+ logger.info("Expected stderr reg-ex: '" + re_string + "'")
+ raise svntest.Failure("Checkout failed but not in the expected way")
+
+#----------------------------------------------------------------------
+# Helper function to set up an existing local tree that has paths which
+# obstruct with the incoming WC.
+#
+# Build a sandbox SBOX without a WC. Created the following paths
+# rooted at SBOX.WC_DIR:
+#
+# iota
+# A/
+# A/mu
+#
+# If MOD_FILES is FALSE, 'iota' and 'A/mu' have the same contents as the
+# standard greek tree. If TRUE the contents of each as set as follows:
+#
+# iota : contents == "This is the local version of the file 'iota'.\n"
+# A/mu : contents == "This is the local version of the file 'mu'.\n"
+#
+# If ADD_UNVERSIONED is TRUE, add the following files and directories,
+# rooted in SBOX.WC_DIR, that don't exist in the standard greek tree:
+#
+# 'sigma'
+# 'A/upsilon'
+# 'A/Z/'
+#
+# Return the expected output for svn co --force SBOX.REPO_URL SBOX.WC_DIR
+#
+def make_local_tree(sbox, mod_files=False, add_unversioned=False):
+ """Make a local unversioned tree to checkout into."""
+
+ sbox.build(create_wc = False)
+
+ if os.path.exists(sbox.wc_dir):
+ svntest.main.safe_rmtree(sbox.wc_dir)
+
+ export_target = sbox.wc_dir
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = sbox.wc_dir
+ expected_output.desc[""] = Item()
+ expected_output.tweak(contents=None, status="A ")
+
+ # Export an unversioned tree to sbox.wc_dir.
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ svntest.main.greek_state.copy())
+
+ # Remove everything remaining except for 'iota', 'A/', and 'A/mu'.
+ svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "B"))
+ svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "C"))
+ svntest.main.safe_rmtree(os.path.join(sbox.wc_dir, "A", "D"))
+
+ # Should obstructions differ from the standard greek tree?
+ if mod_files:
+ iota_path = os.path.join(sbox.wc_dir, "iota")
+ mu_path = os.path.join(sbox.wc_dir, "A", "mu")
+ svntest.main.file_write(iota_path,
+ "This is the local version of the file 'iota'.\n")
+ svntest.main.file_write(mu_path,
+ "This is the local version of the file 'mu'.\n")
+
+ # Add some files that won't obstruct anything in standard greek tree?
+ if add_unversioned:
+ sigma_path = os.path.join(sbox.wc_dir, "sigma")
+ svntest.main.file_append(sigma_path, "unversioned sigma")
+ upsilon_path = os.path.join(sbox.wc_dir, "A", "upsilon")
+ svntest.main.file_append(upsilon_path, "unversioned upsilon")
+ Z_path = os.path.join(sbox.wc_dir, "A", "Z")
+ os.mkdir(Z_path)
+
+ return wc.State(sbox.wc_dir, {
+ "A" : Item(status='E '), # Obstruction
+ "A/B" : Item(status='A '),
+ "A/B/lambda" : Item(status='A '),
+ "A/B/E" : Item(status='A '),
+ "A/B/E/alpha" : Item(status='A '),
+ "A/B/E/beta" : Item(status='A '),
+ "A/B/F" : Item(status='A '),
+ "A/mu" : Item(status='E '), # Obstruction
+ "A/C" : Item(status='A '),
+ "A/D" : Item(status='A '),
+ "A/D/gamma" : Item(status='A '),
+ "A/D/G" : Item(status='A '),
+ "A/D/G/pi" : Item(status='A '),
+ "A/D/G/rho" : Item(status='A '),
+ "A/D/G/tau" : Item(status='A '),
+ "A/D/H" : Item(status='A '),
+ "A/D/H/chi" : Item(status='A '),
+ "A/D/H/omega" : Item(status='A '),
+ "A/D/H/psi" : Item(status='A '),
+ "iota" : Item(status='E '), # Obstruction
+ })
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+#----------------------------------------------------------------------
+
+def checkout_with_obstructions(sbox):
+ """co with obstructions conflicts without --force"""
+
+ make_local_tree(sbox, False, False)
+
+ #svntest.factory.make(sbox,
+ # """# Checkout with unversioned obstructions lying around.
+ # svn co url wc_dir
+ # svn status""")
+ #svntest.factory.make(sbox,
+ # """# Now see to it that we can recover from the obstructions.
+ # rm -rf A iota
+ # svn up""")
+ #exit(0)
+
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ # Checkout with unversioned obstructions causes tree conflicts.
+ # svn co url wc_dir
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ 'A' : Item(status=' ', treeconflict='C'),
+ # And the updates below the tree conflict
+ 'A/D' : Item(status=' ', treeconflict='A'),
+ 'A/D/gamma' : Item(status=' ', treeconflict='A'),
+ 'A/D/G' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/rho' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/pi' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/tau' : Item(status=' ', treeconflict='A'),
+ 'A/D/H' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/chi' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/omega' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/psi' : Item(status=' ', treeconflict='A'),
+ 'A/B' : Item(status=' ', treeconflict='A'),
+ 'A/B/E' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
+ 'A/B/F' : Item(status=' ', treeconflict='A'),
+ 'A/B/lambda' : Item(status=' ', treeconflict='A'),
+ 'A/C' : Item(status=' ', treeconflict='A'),
+ 'A/mu' : Item(status=' ', treeconflict='A'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F',
+ 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/C')
+
+ actions.run_and_verify_checkout(url, wc_dir, expected_output,
+ expected_disk)
+
+ # svn status
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ # A and iota are tree conflicted and obstructed
+ expected_status.tweak('A', 'iota', status='D ', wc_rev=1,
+ treeconflict='C')
+
+ expected_status.tweak('A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', 'A/D/gamma', 'A/B',
+ 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F', 'A/B/lambda', 'A/C',
+ status='D ')
+ # A/mu exists on disk, but is deleted
+ expected_status.tweak('A/mu', status='D ')
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+
+ # Now see to it that we can recover from the obstructions.
+ # rm -rf A iota
+ svntest.main.safe_rmtree( os.path.join(wc_dir, 'A') )
+ os.remove( os.path.join(wc_dir, 'iota') )
+
+
+ svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'iota'))
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status,)
+
+
+
+#----------------------------------------------------------------------
+
+def forced_checkout_of_file_with_dir_obstructions(sbox):
+ """forced co flags conflict if a dir obstructs a file"""
+ # svntest.factory.make(sbox,
+ # """mkdir $WC_DIR.other/iota
+ # svn co --force url $WC_DIR.other """)
+ sbox.build()
+ url = sbox.repo_url
+ wc_dir_other = sbox.add_wc_path('other')
+
+ other_iota = os.path.join(wc_dir_other, 'iota')
+
+ # mkdir $WC_DIR.other/iota
+ os.makedirs(other_iota)
+
+ # svn co --force url $WC_DIR.other
+ expected_output = svntest.wc.State(wc_dir_other, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'iota' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=None)
+
+ actions.run_and_verify_checkout(url, wc_dir_other, expected_output,
+ expected_disk, [], '--force')
+
+
+#----------------------------------------------------------------------
+
+def forced_checkout_of_dir_with_file_obstructions(sbox):
+ """forced co flags conflict if a file obstructs a dir"""
+
+ make_local_tree(sbox, False, False)
+
+ #svntest.factory.make(sbox,"""
+ # mkdir wc_dir_other
+ # echo "The file A" > wc_dir_other/A
+ # svn co --force url wc_dir_other
+ # """)
+ #svntest.factory.make(sbox,"""
+ # # Now see to it that we can recover from the obstructions.
+ # rm wc_dir_other/A
+ # svn up wc_dir_other""")
+ #exit(0)
+
+ url = sbox.repo_url
+ wc_dir_other = sbox.add_wc_path('other')
+
+ other_A = os.path.join(wc_dir_other, 'A')
+
+ # mkdir wc_dir_other
+ os.makedirs(wc_dir_other)
+
+ # echo "The file A" > wc_dir_other/A
+ svntest.main.file_write(other_A, 'The file A\n')
+
+ # svn co --force url wc_dir_other
+ expected_output = svntest.wc.State(wc_dir_other, {
+ 'iota' : Item(status='A '),
+ 'A' : Item(status=' ', treeconflict='C'),
+ # And what happens below A
+ 'A/mu' : Item(status=' ', treeconflict='A'),
+ 'A/D' : Item(status=' ', treeconflict='A'),
+ 'A/D/G' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/tau' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/pi' : Item(status=' ', treeconflict='A'),
+ 'A/D/G/rho' : Item(status=' ', treeconflict='A'),
+ 'A/D/H' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/psi' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/omega' : Item(status=' ', treeconflict='A'),
+ 'A/D/H/chi' : Item(status=' ', treeconflict='A'),
+ 'A/D/gamma' : Item(status=' ', treeconflict='A'),
+ 'A/C' : Item(status=' ', treeconflict='A'),
+ 'A/B' : Item(status=' ', treeconflict='A'),
+ 'A/B/E' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
+ 'A/B/F' : Item(status=' ', treeconflict='A'),
+ 'A/B/lambda' : Item(status=' ', treeconflict='A'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F',
+ 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/mu',
+ 'A/C')
+ expected_disk.tweak('A', contents='The file A\n')
+
+ actions.run_and_verify_checkout(url, wc_dir_other, expected_output,
+ expected_disk, [], '--force')
+
+
+ # Now see to it that we can recover from the obstructions.
+ # rm wc_dir_other/A
+ os.remove(other_A)
+
+ # svn up wc_dir_other
+ expected_output = svntest.wc.State(wc_dir_other, {
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = actions.get_virginal_state(wc_dir_other, 1)
+
+ svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir_other, 'A'))
+
+ actions.run_and_verify_update(wc_dir_other, expected_output, expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+def forced_checkout_with_faux_obstructions(sbox):
+ """co with faux obstructions ok with --force"""
+
+ # Make a local tree that partially obstructs the paths coming from the
+ # repos but has no true differences.
+ expected_output = make_local_tree(sbox, False, False)
+
+ expected_wc = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir, expected_output,
+ expected_wc, [], '--force')
+
+#----------------------------------------------------------------------
+
+def forced_checkout_with_real_obstructions(sbox):
+ """co with real obstructions ok with --force"""
+
+ # Make a local tree that partially obstructs the paths coming from the
+ # repos and make the obstructing files different from the standard greek
+ # tree.
+ expected_output = make_local_tree(sbox, True, False)
+
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.tweak('A/mu',
+ contents="This is the local version of the file 'mu'.\n")
+ expected_wc.tweak('iota',
+ contents="This is the local version of the file 'iota'.\n")
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir, expected_output,
+ expected_wc, [], '--force')
+
+#----------------------------------------------------------------------
+
+def forced_checkout_with_real_obstructions_and_unversioned_files(sbox):
+ """co with real obstructions and unversioned files"""
+
+ # Make a local tree that partially obstructs the paths coming from the
+ # repos, make the obstructing files different from the standard greek
+ # tree, and finally add some files that don't exist in the stardard tree.
+ expected_output = make_local_tree(sbox, True, True)
+
+ expected_wc = svntest.main.greek_state.copy()
+ expected_wc.tweak('A/mu',
+ contents="This is the local version of the file 'mu'.\n")
+ expected_wc.tweak('iota',
+ contents="This is the local version of the file 'iota'.\n")
+ expected_wc.add({'sigma' : Item("unversioned sigma"),
+ 'A/upsilon' : Item("unversioned upsilon"),
+ 'A/Z' : Item(),
+ })
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir, expected_output,
+ expected_wc, [], '--force')
+
+#----------------------------------------------------------------------
+
+def forced_checkout_with_versioned_obstruction(sbox):
+ """forced co with versioned obstruction"""
+
+ # Make a greek tree working copy
+ sbox.build(read_only = True)
+
+ # Create a second repository with the same greek tree
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
+
+ fresh_wc_dir = sbox.add_wc_path('fresh')
+ fresh_wc_dir_A = os.path.join(fresh_wc_dir, 'A')
+ os.mkdir(fresh_wc_dir)
+
+ other_wc_dir = sbox.add_wc_path("other")
+ other_wc_dir_A = os.path.join(other_wc_dir, "A")
+ os.mkdir(other_wc_dir)
+
+ # Checkout "A" from the first repos to a fresh dir.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ "co", repo_url + "/A",
+ fresh_wc_dir_A)
+
+ # Checkout "A" from the second repos to the other dir.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ "co", other_repo_url + "/A",
+ other_wc_dir_A)
+
+ # Checkout the entire first repos into the fresh dir. This should
+ # fail because A is already checked out. (Ideally, we'd silently
+ # incorporate A's working copy into its parent working copy.)
+ expected_output = svntest.wc.State(fresh_wc_dir, {
+ 'iota' : Item(status='A '),
+ 'A' : Item(verb='Skipped'),
+ })
+ expected_wc = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_checkout(repo_url, fresh_wc_dir,
+ expected_output, expected_wc,
+ [], '--force')
+
+ # Checkout the entire first repos into the other dir. This should
+ # fail because it's a different repository.
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'iota' : Item(status='A '),
+ 'A' : Item(verb='Skipped'),
+ })
+ expected_wc = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_checkout(repo_url, other_wc_dir,
+ expected_output, expected_wc,
+ [], '--force')
+
+ #ensure that other_wc_dir_A is not affected by this forced checkout.
+ svntest.actions.run_and_verify_svn(None,
+ [], "st", other_wc_dir_A)
+ exit_code, sout, serr = svntest.actions.run_and_verify_svn(
+ None, [], "info",
+ other_wc_dir_A)
+
+ #TODO rename test_stderr to test_regex or something.
+ test_stderr("URL: " + other_repo_url + '/A$', sout)
+
+ #ensure that other_wc_dir is in a consistent state though it may be
+ #missing few items.
+ exit_code, sout, serr = svntest.actions.run_and_verify_svn(
+ None, [], "info",
+ other_wc_dir)
+ #TODO rename test_stderr to test_regex or something.
+ test_stderr("URL: " + sbox.repo_url + '$', sout)
+
+
+
+#----------------------------------------------------------------------
+# Ensure that an import followed by a checkout in place works correctly.
+def import_and_checkout(sbox):
+ """import and checkout"""
+
+ sbox.build(read_only = True)
+
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ import_from_dir = sbox.add_wc_path("other")
+
+ # Export greek tree to import_from_dir
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = import_from_dir
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ import_from_dir,
+ expected_output,
+ svntest.main.greek_state.copy())
+
+ # Create the 'other' repos
+ svntest.main.create_repos(other_repo_dir)
+
+ # Import import_from_dir to the other repos
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '-m', 'import', import_from_dir,
+ other_repo_url)
+
+ expected_output = wc.State(import_from_dir, {
+ "A" : Item(status='E '),
+ "A/B" : Item(status='E '),
+ "A/B/lambda" : Item(status='E '),
+ "A/B/E" : Item(status='E '),
+ "A/B/E/alpha" : Item(status='E '),
+ "A/B/E/beta" : Item(status='E '),
+ "A/B/F" : Item(status='E '),
+ "A/mu" : Item(status='E '),
+ "A/C" : Item(status='E '),
+ "A/D" : Item(status='E '),
+ "A/D/gamma" : Item(status='E '),
+ "A/D/G" : Item(status='E '),
+ "A/D/G/pi" : Item(status='E '),
+ "A/D/G/rho" : Item(status='E '),
+ "A/D/G/tau" : Item(status='E '),
+ "A/D/H" : Item(status='E '),
+ "A/D/H/chi" : Item(status='E '),
+ "A/D/H/omega" : Item(status='E '),
+ "A/D/H/psi" : Item(status='E '),
+ "iota" : Item(status='E ')
+ })
+
+ expected_wc = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_checkout(other_repo_url, import_from_dir,
+ expected_output, expected_wc,
+ [], '--force')
+
+#----------------------------------------------------------------------
+# Issue #2529.
+@Issue(2529)
+def checkout_broken_eol(sbox):
+ "checkout file with broken eol style"
+
+ svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
+ 'update_tests_data',
+ 'checkout_broken_eol.dump'),
+ create_wc=False)
+
+ URL = sbox.repo_url
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'file': Item(status='A '),
+ })
+
+ expected_wc = svntest.wc.State('', {
+ 'file': Item(contents='line\nline2\n'),
+ })
+ svntest.actions.run_and_verify_checkout(URL,
+ sbox.wc_dir,
+ expected_output,
+ expected_wc)
+
+def checkout_creates_intermediate_folders(sbox):
+ "checkout and create some intermediate folders"
+
+ sbox.build(create_wc = False, read_only = True)
+
+ checkout_target = os.path.join(sbox.wc_dir, 'a', 'b', 'c')
+
+ # checkout a working copy in a/b/c, should create these intermediate
+ # folders
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = checkout_target
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ checkout_target,
+ expected_output,
+ expected_wc)
+
+# Test that, if a peg revision is provided without an explicit revision,
+# svn will checkout the directory as it was at rPEG, rather than at HEAD.
+def checkout_peg_rev(sbox):
+ "checkout with peg revision"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ # create a new revision
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'changed file mu', wc_dir)
+
+ # now checkout the repo@1 in another folder, this should create our initial
+ # wc without the change in mu.
+ checkout_target = sbox.add_wc_path('checkout')
+ os.mkdir(checkout_target)
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = checkout_target
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + '@1',
+ checkout_target,
+ expected_output,
+ expected_wc)
+
+#----------------------------------------------------------------------
+# Issue 2602: Test that peg revision dates are correctly supported.
+@Issue(2602)
+def checkout_peg_rev_date(sbox):
+ "checkout with peg revision date"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ ## Get svn:date.
+ exit_code, output, errput = svntest.main.run_svn(None, 'propget', 'svn:date',
+ '--revprop', '-r1',
+ '--no-newline',
+ sbox.repo_url)
+ if exit_code or errput != [] or len(output) != 1:
+ raise svntest.Failure("svn:date propget failed")
+ r1_string = output[0]
+
+ ## Increment the svn:date date by one microsecond.
+ # TODO: pass tzinfo=UTC to datetime.datetime()
+ date_pattern = re.compile(r'(\d+)-(\d+)-(\d+)T(\d\d):(\d\d):(\d\d)\.(\d+)Z$')
+ r1_time = datetime.datetime(*map(int, date_pattern.match(r1_string).groups()))
+ peg_time = r1_time + datetime.timedelta(microseconds=1)
+ assert r1_time != peg_time
+ # peg_string is, by all likelihood, younger than r1's svn:date and older than
+ # r2's svn:date. It is also not equal to either of them, so we test the
+ # binary search of svn:date values.
+ peg_string = '%04d-%02d-%02dT%02d:%02d:%02d.%06dZ' % \
+ tuple(getattr(peg_time, x)
+ for x in ["year", "month", "day", "hour", "minute",
+ "second", "microsecond"])
+
+ # create a new revision
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'changed file mu', wc_dir)
+
+ # now checkout the repo@peg_string in another folder, this should create our
+ # initial wc without the change in mu.
+ checkout_target = sbox.add_wc_path('checkout')
+ os.mkdir(checkout_target)
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = checkout_target
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state.copy()
+
+ # use an old date to checkout, that way we're sure we get the first revision
+ svntest.actions.run_and_verify_checkout(sbox.repo_url +
+ '@{' + peg_string + '}',
+ checkout_target,
+ expected_output,
+ expected_wc)
+
+ # now try another checkout with repo@r1_string
+ checkout_target = sbox.add_wc_path('checkout2')
+ os.mkdir(checkout_target)
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = checkout_target
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state.copy()
+
+ # use an old date to checkout, that way we're sure we get the first revision
+ svntest.actions.run_and_verify_checkout(sbox.repo_url +
+ '@{' + r1_string + '}',
+ checkout_target,
+ expected_output,
+ expected_wc)
+
+#----------------------------------------------------------------------
+def co_with_obstructing_local_adds(sbox):
+ "co handles obstructing paths scheduled for add"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Add files and dirs to the repos via the first WC. Each of these
+ # will be added to the backup WC via a checkout:
+ #
+ # A/B/upsilon: Identical to the file scheduled for addition in
+ # the backup WC.
+ #
+ # A/C/nu: A "normal" add, won't exist in the backup WC.
+ #
+ # A/D/kappa: Conflicts with the file scheduled for addition in
+ # the backup WC.
+ #
+ # A/D/H/I: New dirs that will also be scheduled for addition
+ # A/D/H/I/J: in the backup WC.
+ # A/D/H/I/K:
+ #
+ # A/D/H/I/L: A "normal" dir add, won't exist in the backup WC.
+ #
+ # A/D/H/I/K/xi: Identical to the file scheduled for addition in
+ # the backup WC.
+ #
+ # A/D/H/I/K/eta: Conflicts with the file scheduled for addition in
+ # the backup WC.
+ upsilon_path = os.path.join(wc_dir, 'A', 'B', 'upsilon')
+ svntest.main.file_append(upsilon_path, "This is the file 'upsilon'\n")
+ nu_path = os.path.join(wc_dir, 'A', 'C', 'nu')
+ svntest.main.file_append(nu_path, "This is the file 'nu'\n")
+ kappa_path = os.path.join(wc_dir, 'A', 'D', 'kappa')
+ svntest.main.file_append(kappa_path, "This is REPOS file 'kappa'\n")
+ I_path = os.path.join(wc_dir, 'A', 'D', 'H', 'I')
+ os.mkdir(I_path)
+ J_path = os.path.join(I_path, 'J')
+ os.mkdir(J_path)
+ K_path = os.path.join(I_path, 'K')
+ os.mkdir(K_path)
+ L_path = os.path.join(I_path, 'L')
+ os.mkdir(L_path)
+ xi_path = os.path.join(K_path, 'xi')
+ svntest.main.file_append(xi_path, "This is file 'xi'\n")
+ eta_path = os.path.join(K_path, 'eta')
+ svntest.main.file_append(eta_path, "This is REPOS file 'eta'\n")
+ svntest.main.run_svn(None, 'add', upsilon_path, nu_path,
+ kappa_path, I_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/B/upsilon' : Item(verb='Adding'),
+ 'A/C/nu' : Item(verb='Adding'),
+ 'A/D/kappa' : Item(verb='Adding'),
+ 'A/D/H/I' : Item(verb='Adding'),
+ 'A/D/H/I/J' : Item(verb='Adding'),
+ 'A/D/H/I/K' : Item(verb='Adding'),
+ 'A/D/H/I/K/xi' : Item(verb='Adding'),
+ 'A/D/H/I/K/eta' : Item(verb='Adding'),
+ 'A/D/H/I/L' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/nu' : Item(status=' ', wc_rev=2),
+ 'A/D/kappa' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/eta' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create various paths scheduled for addition which will obstruct
+ # the adds coming from the repos.
+ upsilon_backup_path = os.path.join(wc_backup, 'A', 'B', 'upsilon')
+ svntest.main.file_append(upsilon_backup_path,
+ "This is the file 'upsilon'\n")
+ kappa_backup_path = os.path.join(wc_backup, 'A', 'D', 'kappa')
+ svntest.main.file_append(kappa_backup_path,
+ "This is WC file 'kappa'\n")
+ I_backup_path = os.path.join(wc_backup, 'A', 'D', 'H', 'I')
+ os.mkdir(I_backup_path)
+ J_backup_path = os.path.join(I_backup_path, 'J')
+ os.mkdir(J_backup_path)
+ K_backup_path = os.path.join(I_backup_path, 'K')
+ os.mkdir(K_backup_path)
+ xi_backup_path = os.path.join(K_backup_path, 'xi')
+ svntest.main.file_append(xi_backup_path, "This is file 'xi'\n")
+ eta_backup_path = os.path.join(K_backup_path, 'eta')
+ svntest.main.file_append(eta_backup_path, "This is WC file 'eta'\n")
+ svntest.main.run_svn(None, 'add',
+ upsilon_backup_path,
+ kappa_backup_path,
+ I_backup_path)
+
+ # Create expected output tree for a checkout of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/B/upsilon' : Item(status='E '),
+ 'A/C/nu' : Item(status='A '),
+ 'A/D/H/I' : Item(status='E '),
+ 'A/D/H/I/J' : Item(status='E '),
+ 'A/D/H/I/K' : Item(status='E '),
+ 'A/D/H/I/K/xi' : Item(status='E '),
+ 'A/D/H/I/K/eta' : Item(status='C '),
+ 'A/D/H/I/L' : Item(status='A '),
+ 'A/D/kappa' : Item(status='C '),
+ })
+
+ # Create expected disk for checkout of wc_backup.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/upsilon' : Item("This is the file 'upsilon'\n"),
+ 'A/C/nu' : Item("This is the file 'nu'\n"),
+ 'A/D/H/I' : Item(),
+ 'A/D/H/I/J' : Item(),
+ 'A/D/H/I/K' : Item(),
+ 'A/D/H/I/K/xi' : Item("This is file 'xi'\n"),
+ 'A/D/H/I/K/eta' : Item("\n".join(["<<<<<<< .mine",
+ "This is WC file 'eta'",
+ "||||||| .r0",
+ "=======",
+ "This is REPOS file 'eta'",
+ ">>>>>>> .r2",
+ ""])),
+ 'A/D/H/I/L' : Item(),
+ 'A/D/kappa' : Item("\n".join(["<<<<<<< .mine",
+ "This is WC file 'kappa'",
+ "||||||| .r0",
+ "=======",
+ "This is REPOS file 'kappa'",
+ ">>>>>>> .r2",
+ ""])),
+ })
+
+ # Create expected status tree for the checkout. Since the obstructing
+ # kappa and upsilon differ from the repos, they should show as modified.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.add({
+ 'A/B/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/nu' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/eta' : Item(status='C ', wc_rev=2),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=2),
+ 'A/D/kappa' : Item(status='C ', wc_rev=2),
+ })
+
+ # "Extra" files that we expect to result from the conflicts.
+ extra_files = ['eta\.r0', 'eta\.r2', 'eta\.mine',
+ 'kappa\.r0', 'kappa\.r2', 'kappa\.mine']
+
+ # Perform the checkout and check the results in three ways.
+ # We use --force here because run_and_verify_checkout() will delete
+ # wc_backup before performing the checkout otherwise.
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_backup,
+ expected_output, expected_disk,
+ [], '--force',
+ extra_files=extra_files)
+
+ svntest.actions.run_and_verify_status(wc_backup, expected_status)
+
+ # Some obstructions are still not permitted:
+ #
+ # Test that file and dir obstructions scheduled for addition *with*
+ # history fail when checkout tries to add the same path.
+
+ # URL to URL copy of A/D/G to A/D/M.
+ G_URL = sbox.repo_url + '/A/D/G'
+ M_URL = sbox.repo_url + '/A/D/M'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', G_URL, M_URL, '-m', '')
+
+ # WC to WC copy of A/D/H to A/D/M. (M is now scheduled for addition
+ # with history in WC and pending addition from the repos).
+ D_path = os.path.join(wc_dir, 'A', 'D')
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+ M_path = os.path.join(wc_dir, 'A', 'D', 'M')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', H_path, M_path)
+
+ # URL to URL copy of A/B/E/alpha to A/B/F/omicron.
+ omega_URL = sbox.repo_url + '/A/B/E/alpha'
+ omicron_URL = sbox.repo_url + '/A/B/F/omicron'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', omega_URL, omicron_URL,
+ '-m', '')
+
+ # WC to WC copy of A/D/H/chi to /A/B/F/omicron. (omicron is now
+ # scheduled for addition with history in WC and pending addition
+ # from the repos).
+ F_path = os.path.join(wc_dir, 'A', 'B', 'F')
+ omicron_path = os.path.join(wc_dir, 'A', 'B', 'F', 'omicron')
+ chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', chi_path,
+ omicron_path)
+
+ # Try to co M's Parent.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/omicron' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/nu' : Item(status=' ', wc_rev=2),
+ 'A/D/kappa' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/eta' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=2),
+ 'A/D/M' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/M/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/I' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '), # A/D/MI is a new op_root
+ 'A/D/M/I/J' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/I/K' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/I/K/xi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/I/K/eta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/M/I/L' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/M' : Item(status=' ', treeconflict='C'),
+ 'A/D/M/rho' : Item(status=' ', treeconflict='A'),
+ 'A/D/M/pi' : Item(status=' ', treeconflict='A'),
+ 'A/D/M/tau' : Item(status=' ', treeconflict='A'),
+ })
+ expected_disk = wc.State('', {
+ 'gamma' : Item("This is the file 'gamma'.\n"),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("This is the file 'rho'.\n"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'H/I' : Item(),
+ 'H/I/J' : Item(),
+ 'H/I/K' : Item(),
+ 'H/I/K/xi' : Item("This is file 'xi'\n"),
+ 'H/I/K/eta' : Item("This is REPOS file 'eta'\n"),
+ 'H/I/L' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'M/I' : Item(),
+ 'M/I/J' : Item(),
+ 'M/I/K' : Item(),
+ 'M/I/K/xi' : Item("This is file 'xi'\n"),
+ 'M/I/K/eta' : Item("This is REPOS file 'eta'\n"),
+ 'M/I/L' : Item(),
+ 'M/chi' : Item("This is the file 'chi'.\n"),
+ 'M/psi' : Item("This is the file 'psi'.\n"),
+ 'M/omega' : Item("This is the file 'omega'.\n"),
+ 'kappa' : Item("This is REPOS file 'kappa'\n"),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + '/A/D',
+ D_path,
+ expected_output,
+ expected_disk,
+ [], '--force')
+
+ expected_status.tweak('A/D/M', treeconflict='C', status='R ')
+ expected_status.tweak(
+ 'A/D',
+ 'A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/gamma',
+ 'A/D/kappa',
+ 'A/D/H',
+ 'A/D/H/I',
+ 'A/D/H/I/J',
+ 'A/D/H/I/K',
+ 'A/D/H/I/K/xi',
+ 'A/D/H/I/K/eta',
+ 'A/D/H/I/L', wc_rev=4)
+ expected_status.add({
+ 'A/D/H/chi' : Item(status=' ', wc_rev=4),
+ 'A/D/H/psi' : Item(status=' ', wc_rev=4),
+ 'A/D/H/omega' : Item(status=' ', wc_rev=4),
+ 'A/D/M/pi' : Item(status='D ', wc_rev=4),
+ 'A/D/M/rho' : Item(status='D ', wc_rev=4),
+ 'A/D/M/tau' : Item(status='D ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Try to co omicron's parent.
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/omicron' : Item(status=' ', treeconflict='C'),
+ })
+ expected_disk = wc.State('', {
+ 'omicron' : Item("This is the file 'chi'.\n"),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + '/A/B/F',
+ F_path,
+ expected_output,
+ expected_disk,
+ [], '--force')
+
+ expected_status.tweak('A/B/F/omicron', treeconflict='C', status='R ')
+ expected_status.add({
+ 'A/B/F' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Test if checking out from a Windows driveroot is supported.
+@SkipUnless(svntest.main.is_os_windows)
+def checkout_wc_from_drive(sbox):
+ "checkout from the root of a Windows drive"
+
+ def find_the_next_available_drive_letter():
+ "find the first available drive"
+
+ # get the list of used drive letters, use some Windows specific function.
+ try:
+ import win32api
+
+ drives=win32api.GetLogicalDriveStrings()
+ drives=drives.split('\000')
+
+ for d in range(ord('G'), ord('Z')+1):
+ drive = chr(d)
+ if not drive + ':\\' in drives:
+ return drive
+ except ImportError:
+ # In ActiveState python x64 win32api is not available
+ for d in range(ord('G'), ord('Z')+1):
+ drive = chr(d)
+ if not os.path.isdir(drive + ':\\'):
+ return drive
+
+ return None
+
+ # just create an empty folder, we'll checkout later.
+ sbox.build(create_wc = False)
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ os.mkdir(sbox.wc_dir)
+
+ # create a virtual drive to the repository folder
+ drive = find_the_next_available_drive_letter()
+ if drive is None:
+ raise svntest.Skip('No drive letter available')
+
+ subprocess.call(['subst', drive +':', sbox.repo_dir])
+ repo_url = 'file:///' + drive + ':/'
+ wc_dir = sbox.wc_dir
+ was_cwd = os.getcwd()
+
+ try:
+ expected_wc = svntest.main.greek_state.copy()
+ expected_output = wc.State(wc_dir, {
+ 'A' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_checkout(repo_url, wc_dir,
+ expected_output, expected_wc)
+
+ wc2_dir = sbox.add_wc_path('2')
+ expected_output = wc.State(wc2_dir, {
+ 'D' : Item(status='A '),
+ 'D/H' : Item(status='A '),
+ 'D/H/psi' : Item(status='A '),
+ 'D/H/chi' : Item(status='A '),
+ 'D/H/omega' : Item(status='A '),
+ 'D/G' : Item(status='A '),
+ 'D/G/tau' : Item(status='A '),
+ 'D/G/pi' : Item(status='A '),
+ 'D/G/rho' : Item(status='A '),
+ 'D/gamma' : Item(status='A '),
+ 'C' : Item(status='A '),
+ 'mu' : Item(status='A '),
+ 'B' : Item(status='A '),
+ 'B/E' : Item(status='A '),
+ 'B/E/alpha' : Item(status='A '),
+ 'B/E/beta' : Item(status='A '),
+ 'B/F' : Item(status='A '),
+ 'B/lambda' : Item(status='A '),
+ })
+
+ expected_wc = wc.State('', {
+ 'C' : Item(),
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'mu' : Item(contents="This is the file 'mu'.\n"),
+ })
+
+ svntest.actions.run_and_verify_checkout(repo_url + '/A', wc2_dir,
+ expected_output, expected_wc)
+
+ wc3_dir = sbox.add_wc_path('3')
+ expected_output = wc.State(wc3_dir, {
+ 'H' : Item(status='A '),
+ 'H/psi' : Item(status='A '),
+ 'H/chi' : Item(status='A '),
+ 'H/omega' : Item(status='A '),
+ 'G' : Item(status='A '),
+ 'G/tau' : Item(status='A '),
+ 'G/pi' : Item(status='A '),
+ 'G/rho' : Item(status='A '),
+ 'gamma' : Item(status='A '),
+ })
+
+ expected_wc = wc.State('', {
+ 'H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'gamma' : Item(contents="This is the file 'gamma'.\n"),
+ })
+
+ svntest.actions.run_and_verify_checkout(repo_url + '/A/D', wc3_dir,
+ expected_output, expected_wc)
+
+ finally:
+ os.chdir(was_cwd)
+ # cleanup the virtual drive
+ subprocess.call(['subst', '/D', drive +':'])
+
+#----------------------------------------------------------------------
+
+# list all tests here, starting with None:
+test_list = [ None,
+ checkout_with_obstructions,
+ forced_checkout_of_file_with_dir_obstructions,
+ forced_checkout_of_dir_with_file_obstructions,
+ forced_checkout_with_faux_obstructions,
+ forced_checkout_with_real_obstructions,
+ forced_checkout_with_real_obstructions_and_unversioned_files,
+ forced_checkout_with_versioned_obstruction,
+ import_and_checkout,
+ checkout_broken_eol,
+ checkout_creates_intermediate_folders,
+ checkout_peg_rev,
+ checkout_peg_rev_date,
+ co_with_obstructing_local_adds,
+ checkout_wc_from_drive
+ ]
+
+if __name__ == "__main__":
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/commit_tests.py b/subversion/tests/cmdline/commit_tests.py
new file mode 100755
index 0000000..4f4a6f9
--- /dev/null
+++ b/subversion/tests/cmdline/commit_tests.py
@@ -0,0 +1,3234 @@
+#!/usr/bin/env python
+#
+# commit_tests.py: testing fancy commit cases.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, os, re
+
+# Our testing module
+import svntest
+from svntest import wc
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+from svntest.main import server_has_revprop_commit, \
+ server_gets_client_capabilities
+from svntest.actions import inject_conflict_into_wc
+
+######################################################################
+# Utilities
+#
+
+def is_non_posix_os_or_cygwin_platform():
+ return (not svntest.main.is_posix_os()) or sys.platform == 'cygwin'
+
+def get_standard_state(wc_dir):
+ """Return a status list reflecting the local mods made by
+ make_standard_slew_of_changes()."""
+
+ state = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ state.tweak('', 'A/D', 'A/D/G/pi', status=' M')
+ state.tweak('A/B/lambda', status='M ')
+ state.tweak('A/B/E', 'A/D/H/chi', status='R ')
+ state.tweak('A/B/E/alpha', 'A/B/E/beta', 'A/C', 'A/D/gamma',
+ 'A/D/G/rho', status='D ')
+ state.tweak('A/D/H/omega', status='MM')
+
+ # New things
+ state.add({
+ 'Q' : Item(status='A ', wc_rev=0),
+ 'Q/floo' : Item(status='A ', wc_rev=0),
+ 'A/D/H/gloo' : Item(status='A ', wc_rev=0),
+ 'A/B/E/bloo' : Item(status='A ', wc_rev=0),
+ })
+
+ return state
+
+
+def make_standard_slew_of_changes(wc_dir):
+ """Make a specific set of local mods to WC_DIR. These will be used
+ by every commit-test. Verify the 'svn status' output, and return the
+ (pre-commit) status tree."""
+
+ # Cache current working directory, move into wc_dir
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ # Add a directory
+ os.mkdir('Q')
+ svntest.main.run_svn(None, 'add', 'Q')
+
+ # Remove two directories
+ A_B_E = os.path.join('A', 'B', 'E')
+ svntest.main.run_svn(None, 'rm', A_B_E)
+ svntest.main.run_svn(None, 'rm', os.path.join('A', 'C'))
+
+ # Replace one of the removed directories
+ # But first recreate if it doesn't exist (single-db)
+ if not os.path.exists(A_B_E):
+ os.mkdir(A_B_E)
+ svntest.main.run_svn(None, 'add', A_B_E)
+
+ # Make property mods to two directories
+ svntest.main.run_svn(None, 'propset', 'foo', 'bar', os.curdir)
+ svntest.main.run_svn(None, 'propset', 'foo2', 'bar2', os.path.join('A', 'D'))
+
+ # Add three files
+ svntest.main.file_append(os.path.join('A', 'B', 'E', 'bloo'), "hi")
+ svntest.main.file_append(os.path.join('A', 'D', 'H', 'gloo'), "hello")
+ svntest.main.file_append(os.path.join('Q', 'floo'), "yo")
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'B', 'E', 'bloo'))
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'D', 'H', 'gloo'))
+ svntest.main.run_svn(None, 'add', os.path.join('Q', 'floo'))
+
+ # Remove three files
+ svntest.main.run_svn(None, 'rm', os.path.join('A', 'D', 'G', 'rho'))
+ svntest.main.run_svn(None, 'rm', os.path.join('A', 'D', 'H', 'chi'))
+ svntest.main.run_svn(None, 'rm', os.path.join('A', 'D', 'gamma'))
+
+ # Replace one of the removed files
+ svntest.main.file_append(os.path.join('A', 'D', 'H', 'chi'), "chi")
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'D', 'H', 'chi'))
+
+ # Make textual mods to two files
+ svntest.main.file_append(os.path.join('A', 'B', 'lambda'), "new ltext")
+ svntest.main.file_append(os.path.join('A', 'D', 'H', 'omega'), "new otext")
+
+ # Make property mods to three files
+ svntest.main.run_svn(None, 'propset', 'blue', 'azul',
+ os.path.join('A', 'D', 'H', 'omega'))
+ svntest.main.run_svn(None, 'propset', 'green', 'verde',
+ os.path.join('Q', 'floo'))
+ svntest.main.run_svn(None, 'propset', 'red', 'rojo',
+ os.path.join('A', 'D', 'G', 'pi'))
+
+ # Restore the CWD.
+ os.chdir(was_cwd)
+
+ # Build an expected status tree.
+ expected_status = get_standard_state(wc_dir)
+
+ # Verify status -- all local mods should be present.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ return expected_status
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def commit_one_file(sbox):
+ "commit one file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ omega_path = sbox.ospath('A/D/H/omega')
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/omega' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/D/H/omega', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ omega_path)
+
+
+#----------------------------------------------------------------------
+
+def commit_one_new_file(sbox):
+ "commit one newly added file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ gloo_path = sbox.ospath('A/D/H/gloo')
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/gloo' : Item(verb='Adding'),
+ })
+ expected_status.tweak('A/D/H/gloo', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ gloo_path)
+
+
+#----------------------------------------------------------------------
+
+def commit_one_new_binary_file(sbox):
+ "commit one newly added binary file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ gloo_path = sbox.ospath('A/D/H/gloo')
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream', gloo_path)
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/gloo' : Item(verb='Adding (bin)'),
+ })
+ expected_status.tweak('A/D/H/gloo', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ gloo_path)
+
+
+#----------------------------------------------------------------------
+
+def commit_multiple_targets(sbox):
+ "commit multiple targets"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # This test will commit three targets: psi, B, and pi. In that order.
+
+ # Make local mods to many files.
+ AB_path = sbox.ospath('A/B')
+ lambda_path = sbox.ospath('A/B/lambda')
+ rho_path = sbox.ospath('A/D/G/rho')
+ pi_path = sbox.ospath('A/D/G/pi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ psi_path = sbox.ospath('A/D/H/psi')
+ svntest.main.file_append(lambda_path, 'new appended text for lambda')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+ svntest.main.file_append(pi_path, 'new appended text for pi')
+ svntest.main.file_append(omega_path, 'new appended text for omega')
+ svntest.main.file_append(psi_path, 'new appended text for psi')
+
+ # Just for kicks, add a property to A/D/G as well. We'll make sure
+ # that it *doesn't* get committed.
+ ADG_path = sbox.ospath('A/D/G')
+ svntest.main.run_svn(None, 'propset', 'foo', 'bar', ADG_path)
+
+ # Create expected output tree for 'svn ci'. We should see changes
+ # only on these three targets, no others.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/psi' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but our three targets should be at 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/psi', 'A/B/lambda', 'A/D/G/pi', wc_rev=2)
+
+ # rho and omega should still display as locally modified:
+ expected_status.tweak('A/D/G/rho', 'A/D/H/omega', status='M ')
+
+ # A/D/G should still have a local property set, too.
+ expected_status.tweak('A/D/G', status=' M')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ psi_path, AB_path, pi_path)
+
+#----------------------------------------------------------------------
+
+
+def commit_multiple_targets_2(sbox):
+ "commit multiple targets, 2nd variation"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # This test will commit four targets: psi, B, omega and pi. In that order.
+
+ # Make local mods to many files.
+ AB_path = sbox.ospath('A/B')
+ lambda_path = sbox.ospath('A/B/lambda')
+ rho_path = sbox.ospath('A/D/G/rho')
+ pi_path = sbox.ospath('A/D/G/pi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ psi_path = sbox.ospath('A/D/H/psi')
+ svntest.main.file_append(lambda_path, 'new appended text for lambda')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+ svntest.main.file_append(pi_path, 'new appended text for pi')
+ svntest.main.file_append(omega_path, 'new appended text for omega')
+ svntest.main.file_append(psi_path, 'new appended text for psi')
+
+ # Just for kicks, add a property to A/D/G as well. We'll make sure
+ # that it *doesn't* get committed.
+ ADG_path = sbox.ospath('A/D/G')
+ svntest.main.run_svn(None, 'propset', 'foo', 'bar', ADG_path)
+
+ # Created expected output tree for 'svn ci'. We should see changes
+ # only on these three targets, no others.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/psi' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but our four targets should be at 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/psi', 'A/B/lambda', 'A/D/G/pi', 'A/D/H/omega',
+ wc_rev=2)
+
+ # rho should still display as locally modified:
+ expected_status.tweak('A/D/G/rho', status='M ')
+
+ # A/D/G should still have a local property set, too.
+ expected_status.tweak('A/D/G', status=' M')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ psi_path, AB_path,
+ omega_path, pi_path)
+
+#----------------------------------------------------------------------
+
+def commit_inclusive_dir(sbox):
+ "commit wc_dir/A/D -- includes D. (anchor=A, tgt=D)"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ D_path = sbox.ospath('A/D')
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Deleting'),
+ 'A/D/H/gloo' : Item(verb='Adding'),
+ 'A/D/H/chi' : Item(verb='Replacing'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/G/rho', 'A/D/gamma')
+ expected_status.tweak('A/D', 'A/D/G/pi', 'A/D/H/omega',
+ wc_rev=2, status=' ')
+ expected_status.tweak('A/D/H/chi', 'A/D/H/gloo', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ D_path)
+
+#----------------------------------------------------------------------
+
+def commit_top_dir(sbox):
+ "commit wc_dir -- (anchor=wc_dir, tgt={})"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(verb='Sending'),
+ 'Q' : Item(verb='Adding'),
+ 'Q/floo' : Item(verb='Adding'),
+ 'A/B/E' : Item(verb='Replacing'),
+ 'A/B/E/bloo' : Item(verb='Adding'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/C' : Item(verb='Deleting'),
+ 'A/D' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Deleting'),
+ 'A/D/H/gloo' : Item(verb='Adding'),
+ 'A/D/H/chi' : Item(verb='Replacing'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/G/rho', 'A/D/gamma', 'A/C',
+ 'A/B/E/alpha', 'A/B/E/beta')
+ expected_status.tweak('A/D', 'A/D/G/pi', 'A/D/H/omega', 'Q/floo', '',
+ wc_rev=2, status=' ')
+ expected_status.tweak('A/D/H/chi', 'Q', 'A/B/E', 'A/B/E/bloo', 'A/B/lambda',
+ 'A/D/H/gloo', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir)
+
+#----------------------------------------------------------------------
+
+# Regression test for bug reported by Jon Trowbridge:
+#
+# From: Jon Trowbridge <trow@ximian.com>
+# Subject: svn segfaults if you commit a file that hasn't been added
+# To: dev@subversion.tigris.org
+# Date: 17 Jul 2001 03:20:55 -0500
+# Message-Id: <995358055.16975.5.camel@morimoto>
+#
+# The problem is that report_single_mod in libsvn_wc/adm_crawler.c is
+# called with its entry parameter as NULL, but the code doesn't
+# check that entry is non-NULL before trying to dereference it.
+#
+# This bug never had an issue number.
+#
+def commit_unversioned_thing(sbox):
+ "committing unversioned object produces error"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create an unversioned file in the wc.
+ svntest.main.file_append(sbox.ospath('blorg'), "nothing to see")
+
+ # Commit a non-existent file and *expect* failure:
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ ".*is not under version control.*",
+ os.path.join(wc_dir,'blorg'))
+
+#----------------------------------------------------------------------
+
+# regression test for bug #391
+
+def nested_dir_replacements(sbox):
+ "replace two nested dirs, verify empty contents"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_D = sbox.ospath('A/D')
+
+ # Delete and re-add A/D (a replacement), and A/D/H (another replace).
+ svntest.main.run_svn(None, 'rm', A_D)
+
+ # Recreate directories for single-db
+ if not os.path.exists(A_D):
+ os.mkdir(A_D)
+ os.mkdir(os.path.join(A_D, 'H'))
+ svntest.main.run_svn(None, 'add', '--depth=empty', A_D)
+ svntest.main.run_svn(None, 'add', '--depth=empty', os.path.join(A_D, 'H'))
+
+ # For kicks, add new file A/D/bloo.
+ svntest.main.file_append(os.path.join(A_D, 'bloo'), "hi")
+ svntest.main.run_svn(None, 'add', os.path.join(A_D, 'bloo'))
+
+ # Verify pre-commit status:
+ #
+ # - A/D should both be scheduled as addition, A/D as "R" at rev 1
+ # (rev 1 because they both existed before at rev 1)
+ #
+ # - A/D/H should be a local addition "A"
+ # (and exists as shadowed node in BASE)
+ #
+ # - A/D/bloo scheduled as "A" at rev 0
+ # (rev 0 because it did not exist before)
+ #
+ # - ALL other children of A/D scheduled as "D" at rev 1
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D', status='R ', wc_rev=1)
+ # In the entries world we couldn't represent H properly, so it shows
+ # A/D/H as a replacement against BASE
+ expected_status.tweak('A/D/H', status='A ', wc_rev='-',
+ entry_status='R ', entry_rev='1')
+ expected_status.add({
+ 'A/D/bloo' : Item(status='A ', wc_rev=0),
+ })
+ expected_status.tweak('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', 'A/D/gamma',
+ status='D ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Build expected post-commit trees:
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Replacing'),
+ 'A/D/H' : Item(verb='Adding'),
+ 'A/D/bloo' : Item(verb='Adding'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D', 'A/D/H', wc_rev=2)
+ expected_status.add({
+ 'A/D/bloo' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', 'A/D/gamma')
+
+ # Commit from the top of the working copy and verify output & status.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Testing part 1 of the "Greg Hudson" problem -- specifically, that
+# our use of the "existence=deleted" flag is working properly in cases
+# where the parent directory's revision lags behind a deleted child's
+# revision.
+
+def hudson_part_1(sbox):
+ "hudson prob 1.0: delete file, commit, update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove gamma from the working copy.
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ # After committing, status should show no sign of gamma.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma')
+
+ # Commit the deletion of gamma and verify.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now gamma should be marked as `deleted' under the hood. When we
+ # update, we should no output, and a perfect, virginal status list
+ # at revision 2. (The `deleted' entry should be removed.)
+
+ # Expected output of update: nothing.
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ # Expected disk tree: everything but gamma
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/gamma')
+
+ # Expected status after update: totally clean revision 2, minus gamma.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/D/gamma')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+# Testing part 1 of the "Greg Hudson" problem -- variation on previous
+# test, removing a directory instead of a file this time.
+
+def hudson_part_1_variation_1(sbox):
+ "hudson prob 1.1: delete dir, commit, update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove H from the working copy.
+ H_path = sbox.ospath('A/D/H')
+ svntest.main.run_svn(None, 'rm', H_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H' : Item(verb='Deleting'),
+ })
+
+ # After committing, status should show no sign of H or its contents
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+
+ # Commit the deletion of H and verify.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now H should be marked as `deleted' under the hood. When we
+ # update, we should no see output, and a perfect, virginal status
+ # list at revision 2. (The `deleted' entry should be removed.)
+
+ # Expected output of update: H gets a no-op deletion.
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ # Expected disk tree: everything except files in H
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+
+ # Expected status after update: totally clean revision 2, minus H.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Testing part 1 of the "Greg Hudson" problem -- variation 2. In this
+# test, we make sure that a file that is BOTH `deleted' and scheduled
+# for addition can be correctly committed & merged.
+
+def hudson_part_1_variation_2(sbox):
+ "hudson prob 1.2: delete, commit, re-add, commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove gamma from the working copy.
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ # After committing, status should show no sign of gamma.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma')
+
+ # Commit the deletion of gamma and verify.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now gamma should be marked as `deleted' under the hood.
+ # Go ahead and re-add gamma, so that is *also* scheduled for addition.
+ svntest.main.file_append(gamma_path, "added gamma")
+ svntest.main.run_svn(None, 'add', gamma_path)
+
+ # For sanity, examine status: it should show a revision 2 tree with
+ # gamma scheduled for addition.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=0, status='A ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Adding'),
+ })
+
+ # After committing, status should show only gamma at revision 3.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+# Testing part 2 of the "Greg Hudson" problem.
+#
+# In this test, we make sure that we're UNABLE to commit a propchange
+# on an out-of-date directory.
+
+def hudson_part_2(sbox):
+ "hudson prob 2.0: prop commit on old dir fails"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove gamma from the working copy.
+ D_path = sbox.ospath('A/D')
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ # After committing, status should show no sign of gamma.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma')
+
+ # Commit the deletion of gamma and verify.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now gamma should be marked as `deleted' under the hood, at
+ # revision 2. Meanwhile, A/D is still lagging at revision 1.
+
+ # Make a propchange on A/D
+ svntest.main.run_svn(None, 'ps', 'foo', 'bar', D_path)
+
+ # Commit and *expect* a repository Merge failure:
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ ".*[Oo]ut of date.*")
+
+#----------------------------------------------------------------------
+
+# Test a possible regression in our 'deleted' post-commit handling.
+#
+# This test moves files from one subdir to another, commits, then
+# updates the empty directory. Nothing should be printed, assuming
+# all the moved files are properly marked as 'deleted' and reported to
+# the server.
+
+def hudson_part_2_1(sbox):
+ "hudson prob 2.1: move files, update empty dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Move all the files in H to G
+ H_path = sbox.ospath('A/D/H')
+ G_path = sbox.ospath('A/D/G')
+ chi_path = os.path.join(H_path, 'chi')
+ psi_path = os.path.join(H_path, 'psi')
+ omega_path = os.path.join(H_path, 'omega')
+
+ svntest.main.run_svn(None, 'mv', chi_path, G_path)
+ svntest.main.run_svn(None, 'mv', psi_path, G_path)
+ svntest.main.run_svn(None, 'mv', omega_path, G_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/chi' : Item(verb='Deleting'),
+ 'A/D/H/omega' : Item(verb='Deleting'),
+ 'A/D/H/psi' : Item(verb='Deleting'),
+ 'A/D/G/chi' : Item(verb='Adding'),
+ 'A/D/G/omega' : Item(verb='Adding'),
+ 'A/D/G/psi' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/H/chi')
+ expected_status.remove('A/D/H/omega')
+ expected_status.remove('A/D/H/psi')
+ expected_status.add({ 'A/D/G/chi' :
+ Item(wc_rev=2, status=' ') })
+ expected_status.add({ 'A/D/G/omega' :
+ Item(wc_rev=2, status=' ') })
+ expected_status.add({ 'A/D/G/psi' :
+ Item(wc_rev=2, status=' ') })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now, assuming all three files in H are marked as 'deleted', an
+ # update of H should print absolutely nothing.
+ expected_output = svntest.wc.State(wc_dir, { })
+
+ # Reuse expected_status
+ expected_status.tweak(wc_rev=2)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+ expected_disk.add({
+ 'A/D/G/chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_disk.add({
+ 'A/D/G/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_disk.add({
+ 'A/D/G/psi' : Item("This is the file 'psi'.\n"),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def hook_test(sbox):
+ "hook testing"
+
+ sbox.build()
+
+ # Get paths to the working copy and repository
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Create a hook that appends its name to a log file.
+ hook_format = """import sys
+fp = open(sys.argv[1] + '/hooks.log', 'a')
+fp.write("%s\\n")
+fp.close()"""
+
+ # Setup the hook configs to log data to a file
+ start_commit_hook = svntest.main.get_start_commit_hook_path(repo_dir)
+ svntest.main.create_python_hook_script(start_commit_hook,
+ hook_format % "start_commit_hook")
+
+ pre_commit_hook = svntest.main.get_pre_commit_hook_path(repo_dir)
+ svntest.main.create_python_hook_script(pre_commit_hook,
+ hook_format % "pre_commit_hook")
+
+ post_commit_hook = svntest.main.get_post_commit_hook_path(repo_dir)
+ svntest.main.create_python_hook_script(post_commit_hook,
+ hook_format % "post_commit_hook")
+
+ # Modify iota just so there is something to commit.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "More stuff in iota")
+
+ # Commit, no output expected.
+ svntest.actions.run_and_verify_svn([], [],
+ 'ci', '--quiet',
+ '-m', 'log msg', wc_dir)
+
+ # Now check the logfile
+ expected_data = [ 'start_commit_hook\n', 'pre_commit_hook\n', 'post_commit_hook\n' ]
+
+ logfilename = os.path.join(repo_dir, "hooks.log")
+ if os.path.exists(logfilename):
+ fp = open(logfilename)
+ else:
+ raise svntest.verify.SVNUnexpectedOutput("hook logfile %s not found")\
+ % logfilename
+
+ actual_data = fp.readlines()
+ fp.close()
+ os.unlink(logfilename)
+ svntest.verify.compare_and_display_lines('wrong hook logfile content',
+ 'STDERR',
+ expected_data, actual_data)
+
+#----------------------------------------------------------------------
+
+# Regression test for bug #469, whereby merge() was once reporting
+# erroneous conflicts due to Ancestor < Target < Source, in terms of
+# node-rev-id parentage.
+
+def merge_mixed_revisions(sbox):
+ "commit mixed-rev wc (no erroneous merge error)"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make some convenient paths.
+ iota_path = sbox.ospath('iota')
+ H_path = sbox.ospath('A/D/H')
+ chi_path = sbox.ospath('A/D/H/chi')
+ omega_path = sbox.ospath('A/D/H/omega')
+
+ # Here's the reproduction formula, in 5 parts.
+ # Hoo, what a buildup of state!
+
+ # 1. echo "moo" >> iota; echo "moo" >> A/D/H/chi; svn ci
+ svntest.main.file_append(iota_path, "moo")
+ svntest.main.file_append(chi_path, "moo")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/D/H/chi' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/D/H/chi', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+ # 2. svn up A/D/H
+ expected_status = svntest.wc.State(wc_dir, {
+ 'A/D/H' : Item(status=' ', wc_rev=2),
+ 'A/D/H/chi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/omega' : Item(status=' ', wc_rev=2),
+ 'A/D/H/psi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\nmoo"),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ })
+ expected_output = svntest.wc.State(wc_dir, { })
+ svntest.actions.run_and_verify_update(H_path,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+ # 3. echo "moo" >> iota; svn ci iota
+ svntest.main.file_append(iota_path, "moo2")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H', 'A/D/H/omega', 'A/D/H/chi', 'A/D/H/psi',
+ wc_rev=2)
+ expected_status.tweak('iota', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+ # 4. echo "moo" >> A/D/H/chi; svn ci A/D/H/chi
+ svntest.main.file_append(chi_path, "moo3")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/chi' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/chi', wc_rev=4)
+ expected_status.tweak('A/D/H', 'A/D/H/omega', 'A/D/H/psi', wc_rev=2)
+ expected_status.tweak('iota', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # 5. echo "moo" >> iota; svn ci iota
+ svntest.main.file_append(iota_path, "moomoo")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H', 'A/D/H/omega', 'A/D/H/psi', wc_rev=2)
+ expected_status.tweak('A/D/H/chi', wc_rev=4)
+ expected_status.tweak('iota', wc_rev=5)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # At this point, here is what our tree should look like:
+ # _ 1 ( 5) working_copies/commit_tests-10
+ # _ 1 ( 5) working_copies/commit_tests-10/A
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B/E
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B/E/alpha
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B/E/beta
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B/F
+ # _ 1 ( 5) working_copies/commit_tests-10/A/B/lambda
+ # _ 1 ( 5) working_copies/commit_tests-10/A/C
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D/G
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/pi
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/rho
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/tau
+ # _ 2 ( 5) working_copies/commit_tests-10/A/D/H
+ # _ 4 ( 5) working_copies/commit_tests-10/A/D/H/chi
+ # _ 2 ( 5) working_copies/commit_tests-10/A/D/H/omega
+ # _ 2 ( 5) working_copies/commit_tests-10/A/D/H/psi
+ # _ 1 ( 5) working_copies/commit_tests-10/A/D/gamma
+ # _ 1 ( 5) working_copies/commit_tests-10/A/mu
+ # _ 5 ( 5) working_copies/commit_tests-10/iota
+
+ # At this point, we're ready to modify omega and iota, and commit
+ # from the top. We should *not* get a conflict!
+
+ svntest.main.file_append(iota_path, "finalmoo")
+ svntest.main.file_append(omega_path, "finalmoo")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/D/H/omega', wc_rev=6)
+ expected_status.tweak('A/D/H', 'A/D/H/psi', wc_rev=2)
+ expected_status.tweak('A/D/H/chi', wc_rev=4)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def commit_uri_unsafe(sbox):
+ "commit files and dirs with URI-unsafe characters"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Note: on Windows, files can't have angle brackets in them, so we
+ # don't tests that case.
+ if svntest.main.windows or sys.platform == 'cygwin':
+ angle_name = '_angle_'
+ nasty_name = '#![]{}()__%'
+ else:
+ angle_name = '<angle>'
+ nasty_name = '#![]{}()<>%'
+
+ # Make some convenient paths.
+ hash_dir = sbox.ospath('#hash#')
+ nasty_dir = os.path.join(wc_dir, nasty_name)
+ space_path = sbox.ospath('A/D/space path')
+ bang_path = sbox.ospath('A/D/H/bang!')
+ bracket_path = sbox.ospath('A/D/H/bra[ket')
+ brace_path = sbox.ospath('A/D/H/bra{e')
+ angle_path = os.path.join(wc_dir, 'A', 'D', 'H', angle_name)
+ paren_path = sbox.ospath('A/D/pare)(theses')
+ percent_path = sbox.ospath('#hash#/percen%')
+ nasty_path = os.path.join(wc_dir, 'A', nasty_name)
+
+ os.mkdir(hash_dir)
+ os.mkdir(nasty_dir)
+ svntest.main.file_append(space_path, "This path has a space in it.")
+ svntest.main.file_append(bang_path, "This path has a bang in it.")
+ svntest.main.file_append(bracket_path, "This path has a bracket in it.")
+ svntest.main.file_append(brace_path, "This path has a brace in it.")
+ svntest.main.file_append(angle_path, "This path has angle brackets in it.")
+ svntest.main.file_append(paren_path, "This path has parentheses in it.")
+ svntest.main.file_append(percent_path, "This path has a percent in it.")
+ svntest.main.file_append(nasty_path, "This path has all sorts of ick in it.")
+
+ add_list = [hash_dir,
+ nasty_dir, # not xml-safe
+ space_path,
+ bang_path,
+ bracket_path,
+ brace_path,
+ angle_path, # not xml-safe
+ paren_path,
+ percent_path,
+ nasty_path, # not xml-safe
+ ]
+ for item in add_list:
+ svntest.main.run_svn(None, 'add', '--depth=empty', item)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ '#hash#' : Item(verb='Adding'),
+ nasty_name : Item(verb='Adding'),
+ 'A/D/space path' : Item(verb='Adding'),
+ 'A/D/H/bang!' : Item(verb='Adding'),
+ 'A/D/H/bra[ket' : Item(verb='Adding'),
+ 'A/D/H/bra{e' : Item(verb='Adding'),
+ 'A/D/H/' + angle_name : Item(verb='Adding'),
+ 'A/D/pare)(theses' : Item(verb='Adding'),
+ '#hash#/percen%' : Item(verb='Adding'),
+ 'A/' + nasty_name : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Items in our add list will be at rev 2
+ for item in expected_output.desc.keys():
+ expected_status.add({ item : Item(wc_rev=2, status=' ') })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+def commit_deleted_edited(sbox):
+ "commit deleted yet edited files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make some convenient paths.
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+
+ # Edit the files.
+ svntest.main.file_append(iota_path, "This file has been edited.")
+ svntest.main.file_append(mu_path, "This file has been edited.")
+
+ # Schedule the files for removal.
+ svntest.main.run_svn(None, 'remove', '--force', iota_path)
+ svntest.main.run_svn(None, 'remove', '--force', mu_path)
+
+ # Make our output list
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Deleting'),
+ 'A/mu' : Item(verb='Deleting'),
+ })
+
+ # Items in the status list are all at rev 1, except the two things
+ # we changed...but then, they don't exist at all.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('iota', 'A/mu')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def commit_in_dir_scheduled_for_addition(sbox):
+ "commit a file inside dir scheduled for addition"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ Z_path = sbox.ospath('Z')
+ Z_abspath = os.path.abspath(Z_path)
+ mu_path = sbox.ospath('Z/mu')
+
+ svntest.main.run_svn(None, 'move', A_path, Z_path)
+
+ # Make sure mu is a committable
+ svntest.main.file_write(mu_path, "xxxx")
+
+ # Commit a copied thing inside an added-with-history directory,
+ # expecting a specific error to occur!
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ "svn: E200009: '" +
+ re.escape(Z_abspath) +
+ "' is not known to exist in the",
+ mu_path)
+
+ Q_path = sbox.ospath('Q')
+ Q_abspath = os.path.abspath(Q_path)
+ bloo_path = os.path.join(Q_path, 'bloo')
+
+ os.mkdir(Q_path)
+ svntest.main.file_append(bloo_path, "New contents.")
+ svntest.main.run_svn(None, 'add', Q_path)
+
+ # Commit a regular added thing inside an added directory,
+ # expecting a specific error to occur!
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ "svn: E200009: '" +
+ re.escape(Q_abspath) +
+ "' is not known to exist in the",
+ bloo_path)
+
+ R_path = sbox.ospath('Z/B/R')
+ sbox.simple_mkdir('Z/B/R')
+
+ # Commit a d added thing inside an added directory,
+ # expecting a specific error to occur!
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ "svn: E200009: '" +
+ re.escape(Z_abspath) +
+ "' is not known to exist in the.*",
+ R_path)
+
+#----------------------------------------------------------------------
+
+# Does this make sense now that deleted files are always removed from the wc?
+def commit_rmd_and_deleted_file(sbox):
+ "commit deleted (and missing) file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = sbox.ospath('A/mu')
+
+ # 'svn remove' mu
+ svntest.main.run_svn(None, 'rm', mu_path)
+
+ # Commit, hoping to see no errors
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'commit', '-m', 'logmsg', mu_path)
+
+#----------------------------------------------------------------------
+
+# Issue #644 which failed over ra_neon.
+@Issue(644)
+def commit_add_file_twice(sbox):
+ "issue 644 attempt to add a file twice"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a file
+ gloo_path = sbox.ospath('A/D/H/gloo')
+ svntest.main.file_append(gloo_path, "hello")
+ svntest.main.run_svn(None, 'add', gloo_path)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/gloo' : Item(verb='Adding'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/H/gloo' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit should succeed
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update to state before commit
+ svntest.main.run_svn(None, 'up', '-r', '1', wc_dir)
+
+ # Create the file again
+ svntest.main.file_append(gloo_path, "hello")
+ svntest.main.run_svn(None, 'add', gloo_path)
+
+ # Commit and *expect* a failure:
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ ".*E160020: File.*already exists.*")
+
+#----------------------------------------------------------------------
+
+# There was a problem that committing from a directory that had a
+# longer name than the working copy directory caused the commit notify
+# messages to display truncated/random filenames.
+
+def commit_from_long_dir(sbox):
+ "commit from a dir with a longer name than the wc"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ was_dir = os.getcwd()
+ abs_wc_dir = os.path.realpath(os.path.join(was_dir, wc_dir))
+
+ # something to commit
+ svntest.main.file_append(sbox.ospath('iota'), "modified iota")
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State('', {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Any length name was enough to provoke the original bug, but
+ # keeping its length less than that of the filename 'iota' avoided
+ # random behaviour, but still caused the test to fail
+ extra_name = 'xx'
+
+ os.chdir(wc_dir)
+ os.mkdir(extra_name)
+ os.chdir(extra_name)
+
+ svntest.actions.run_and_verify_commit(abs_wc_dir,
+ expected_output,
+ None)
+
+#----------------------------------------------------------------------
+
+def commit_with_lock(sbox):
+ "try to commit when directory is locked"
+
+ sbox.build()
+ # modify gamma and lock its directory
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ gamma_path = os.path.join(D_path, 'gamma')
+ svntest.main.file_append(gamma_path, "modified gamma")
+ svntest.actions.lock_admin_dir(D_path)
+
+ # this commit should fail
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ 'svn: E155004: '
+ 'Working copy \'.*\' locked')
+
+ # unlock directory
+ svntest.actions.run_and_verify_svn([], [],
+ 'cleanup', D_path)
+
+ # this commit should succeed
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Explicitly commit the current directory. This did at one point fail
+# in post-commit processing due to a path canonicalization problem.
+
+def commit_current_dir(sbox):
+ "commit the current directory"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ svntest.main.run_svn(None, 'propset', 'pname', 'pval', wc_dir)
+
+ was_cwd = os.getcwd()
+
+ os.chdir(wc_dir)
+
+ expected_output = svntest.wc.State('.', {
+ '.' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit('.',
+ expected_output,
+ None)
+ os.chdir(was_cwd)
+
+ # I can't get the status check to work as part of run_and_verify_commit.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+# Check that the pending txn gets removed from the repository after
+# a failed commit.
+
+def failed_commit(sbox):
+ "commit with conflicts and check txn in repo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make the other working copy
+ other_wc_dir = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc_dir)
+
+ # Make different changes in the two working copies
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "More stuff in iota")
+
+ other_iota_path = os.path.join(other_wc_dir, "iota")
+ svntest.main.file_append(other_iota_path, "More different stuff in iota")
+
+ # Commit both working copies. The second commit should fail.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'commit', '-m', 'log', wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'commit', '-m', 'log', other_wc_dir)
+
+ # Now list the txns in the repo. The list should be empty.
+ svntest.actions.run_and_verify_svnadmin([], [],
+ 'lstxns', sbox.repo_dir)
+
+#----------------------------------------------------------------------
+
+# Commit from multiple working copies is being worked on as issue #2381.
+# Also related to issue #959, this test here doesn't use svn:externals
+# but the behaviour needs to be considered.
+# In this test two WCs are nested, one WC is child of the other.
+@Issue(2381)
+def commit_multiple_wc_nested(sbox):
+ "commit from two nested working copies"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Checkout a second working copy
+ wc2_dir = sbox.ospath('A/wc2')
+ url = sbox.repo_url
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'checkout',
+ url, wc2_dir)
+
+ # Modify both working copies
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ lambda2_path = os.path.join(wc2_dir, 'A', 'B', 'lambda')
+ svntest.main.file_append(lambda2_path, 'appended lambda2 text')
+
+ # Verify modified status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_status2 = svntest.actions.get_virginal_state(wc2_dir, 1)
+ expected_status2.tweak('A/B/lambda', status='M ')
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+ # Commit should succeed, even though one target is a "child" of the other.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'commit', '-m', 'log',
+ wc_dir, wc2_dir)
+
+ # Verify status changed
+ expected_status.tweak('A/mu', status=' ', wc_rev=2)
+ expected_status2.tweak('A/B/lambda', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+# Same as commit_multiple_wc_nested except that the two WCs are not nested.
+@Issue(2381)
+def commit_multiple_wc(sbox):
+ "commit from two working copies"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Cleanup original wc
+ svntest.sandbox._cleanup_test_path(wc_dir)
+
+ # Checkout two wcs
+ wc1_dir = sbox.ospath('wc1')
+ wc2_dir = sbox.ospath('wc2')
+ url = sbox.repo_url
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'checkout',
+ url, wc1_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'checkout',
+ url, wc2_dir)
+
+ # Modify both working copies
+ mu1_path = os.path.join(wc1_dir, 'A', 'mu')
+ svntest.main.file_append(mu1_path, 'appended mu1 text')
+ lambda2_path = os.path.join(wc2_dir, 'A', 'B', 'lambda')
+ svntest.main.file_append(lambda2_path, 'appended lambda2 text')
+
+ # Verify modified status
+ expected_status1 = svntest.actions.get_virginal_state(wc1_dir, 1)
+ expected_status1.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_status(wc1_dir, expected_status1)
+ expected_status2 = svntest.actions.get_virginal_state(wc2_dir, 1)
+ expected_status2.tweak('A/B/lambda', status='M ')
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+ # Commit should succeed.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'commit', '-m', 'log',
+ wc1_dir, wc2_dir)
+
+ # Verify status changed
+ expected_status1.tweak('A/mu', status=' ', wc_rev=2)
+ expected_status2.tweak('A/B/lambda', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc1_dir, expected_status1)
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+# Same as commit_multiple_wc except that the two WCs come
+# from different repositories. Commits to multiple repositories
+# are outside the scope of issue #2381.
+@Issue(2381)
+def commit_multiple_wc_multiple_repos(sbox):
+ "committing two WCs from different repos fails"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create another repository
+ repo2, url2 = sbox.add_repo_path("repo2")
+ svntest.main.copy_repos(sbox.repo_dir, repo2, 1, 1)
+
+ # Cleanup original wc
+ svntest.sandbox._cleanup_test_path(wc_dir)
+
+ # Checkout two wcs
+ wc1_dir = sbox.ospath('wc1')
+ wc2_dir = sbox.ospath('wc2')
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'checkout',
+ sbox.repo_url, wc1_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'checkout',
+ url2, wc2_dir)
+
+ # Modify both working copies
+ mu1_path = os.path.join(wc1_dir, 'A', 'mu')
+ svntest.main.file_append(mu1_path, 'appended mu1 text')
+ lambda2_path = os.path.join(wc2_dir, 'A', 'B', 'lambda')
+ svntest.main.file_append(lambda2_path, 'appended lambda2 text')
+
+ # Verify modified status
+ expected_status1 = svntest.actions.get_virginal_state(wc1_dir, 1)
+ expected_status1.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_status(wc1_dir, expected_status1)
+ expected_status2 = svntest.actions.get_virginal_state(wc2_dir, 1)
+ expected_status2.tweak('A/B/lambda', status='M ')
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+ # Commit should fail, since WCs come from different repositories.
+ # The exact error message depends on whether or not the tests are
+ # run below an existing working copy
+ error_re = ( ".*(is not a working copy" +
+ "|Are all targets part of the same working copy" +
+ "|was not found).*" )
+ svntest.actions.run_and_verify_svn([], error_re,
+ 'commit', '-m', 'log',
+ wc1_dir, wc2_dir)
+
+ # Verify status unchanged
+ svntest.actions.run_and_verify_status(wc1_dir, expected_status1)
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+#----------------------------------------------------------------------
+@Issues(1195,1239)
+def commit_nonrecursive(sbox):
+ "commit named targets with -N"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ ### Note: the original recipes used 'add -N'. These days, we use
+ ### --depth={empty,files}, and both the code and the comments below
+ ### have been adjusted to reflect this.
+
+ #####################################################
+ ### Issue #1195:
+ ###
+ ### 1. Create these directories and files:
+ ###
+ ### file1
+ ### dir1
+ ### dir1/file2
+ ### dir1/file3
+ ### dir1/dir2
+ ### dir1/dir2/file4
+ ###
+ ### 2. run 'svn add --depth=empty <all of the above>'
+ ###
+ ### 3. run 'svn ci -N <all of the above>'
+ ###
+ ### (The bug was that only 4 entities would get committed, when it
+ ### should be 6: dir2/ and file4 were left out.)
+
+ # These paths are relative to the top of the test's working copy.
+ file1_path = 'file1'
+ dir1_path = 'dir1'
+ file2_path = 'dir1/file2'
+ file3_path = 'dir1/file3'
+ dir2_path = 'dir1/dir2'
+ file4_path = 'dir1/dir2/file4'
+
+ # Create the new files and directories.
+ svntest.main.file_append(sbox.ospath(file1_path), 'this is file1')
+ os.mkdir(sbox.ospath(dir1_path))
+ svntest.main.file_append(sbox.ospath(file2_path), 'this is file2')
+ svntest.main.file_append(sbox.ospath(file3_path), 'this is file3')
+ os.mkdir(sbox.ospath(dir2_path))
+ svntest.main.file_append(sbox.ospath(file4_path), 'this is file4')
+
+ # Add them to version control.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'add', '--depth=empty',
+ sbox.ospath(file1_path),
+ sbox.ospath(dir1_path),
+ sbox.ospath(file2_path),
+ sbox.ospath(file3_path),
+ sbox.ospath(dir2_path),
+ sbox.ospath(file4_path))
+
+ # Commit. We should see all 6 items (2 dirs, 4 files) get sent.
+ expected_output = svntest.wc.State(
+ wc_dir,
+ { file1_path : Item(verb='Adding'),
+ dir1_path : Item(verb='Adding'),
+ file2_path : Item(verb='Adding'),
+ file3_path : Item(verb='Adding'),
+ dir2_path : Item(verb='Adding'),
+ file4_path : Item(verb='Adding'),
+ }
+ )
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ file1_path : Item(status=' ', wc_rev=2),
+ dir1_path : Item(status=' ', wc_rev=2),
+ file2_path : Item(status=' ', wc_rev=2),
+ file3_path : Item(status=' ', wc_rev=2),
+ dir2_path : Item(status=' ', wc_rev=2),
+ file4_path : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '-N',
+ sbox.ospath(file1_path),
+ sbox.ospath(dir1_path),
+ sbox.ospath(file2_path),
+ sbox.ospath(file3_path),
+ sbox.ospath(dir2_path),
+ sbox.ospath(file4_path))
+
+ #######################################################################
+ ###
+ ### There's some complex history here; please bear with me.
+ ###
+ ### First there was issue #1239, which had the following recipe:
+ ###
+ ### 1. Create these directories and files:
+ ###
+ ### dirA
+ ### dirA/fileA
+ ### dirA/fileB
+ ### dirA/dirB
+ ### dirA/dirB/fileC
+ ### dirA/dirB/nocommit
+ ###
+ ### 2. run 'svn add --depth=empty <all of the above>'
+ ###
+ ### 3. run 'svn ci -N <all but nocommit>'
+ ###
+ ### (In this recipe, 'add -N' has been changed to 'add --depth...',
+ ### but 'ci -N' has been left as-is, for reasons explained below.)
+ ###
+ ### Issue #1239 claimed a two-part bug: that step 3 would try to
+ ### commit the file `nocommit' when it shouldn't, and that it would
+ ### get an error anyway:
+ ###
+ ### Adding wc/dirA
+ ### Adding wc/dirA/fileA
+ ### Adding wc/dirA/fileB
+ ### Adding wc/dirA/dirB
+ ### Adding wc/dirA/dirB/nocommit
+ ### Adding wc/dirA/dirB/fileC
+ ### Transmitting file data ....svn: A problem occurred; \
+ ### see later errors for details
+ ### svn: Commit succeeded, but other errors follow:
+ ### svn: Problem running log
+ ### svn: Error bumping revisions post-commit (details follow):
+ ### svn: in directory
+ ### 'F:/Programmation/Projets/subversion/svnant/test/wc/dirA'
+ ### svn: start_handler: error processing command 'committed' in
+ ### 'F:/Programmation/Projets/subversion/svnant/test/wc/dirA'
+ ### svn: Working copy not locked
+ ### svn: directory not locked
+ ### (F:/Programmation/Projets/subversion/svnant/test/wc)
+ ###
+ ### However, this was all in the days before --depth, and depended
+ ### on an idiosyncratic interpretation of -N, one which required
+ ### commit to behave differently from other commands taking -N.
+ ###
+ ### These days, -N should be equivalent to --depth=files in almost
+ ### all cases. There are some exceptions (e.g., status), and commit
+ ### is one of them: 'commit -N' means 'commit --depth=empty'.
+ ###
+ ### The original implementation, as well as this test, mistakenly
+ ### mapped 'commit -N' to 'commit --depth=files'; that was a bug that
+ ### made 'svn ci -N' incompatible with 1.4 and earlier versions.
+ ###
+ ### See also 'commit_propmods_with_depth_empty' in depth_tests.py .
+
+ # Now add these directories and files, except the last:
+ dirA_path = 'dirA'
+ fileA_path = 'dirA/fileA'
+ fileB_path = 'dirA/fileB'
+ dirB_path = 'dirA/dirB'
+ nope_1_path = 'dirA/dirB/nope_1'
+ nope_2_path = 'dirA/dirB/nope_2'
+
+ # Create the new files and directories.
+ os.mkdir(sbox.ospath(dirA_path))
+ svntest.main.file_append(sbox.ospath(fileA_path), 'fileA')
+ svntest.main.file_append(sbox.ospath(fileB_path), 'fileB')
+ os.mkdir(sbox.ospath(dirB_path))
+ svntest.main.file_append(sbox.ospath(nope_1_path), 'nope_1')
+ svntest.main.file_append(sbox.ospath(nope_2_path), 'nope_2')
+
+ # Add them to version control.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'add', '-N',
+ sbox.ospath(dirA_path),
+ sbox.ospath(fileA_path),
+ # don't add fileB
+ sbox.ospath(dirB_path),
+ sbox.ospath(nope_1_path),
+ # don't add nope_2
+ )
+
+ expected_output = svntest.wc.State(
+ wc_dir,
+ { dirA_path : Item(verb='Adding'),
+ # no children!
+ }
+ )
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Expect the leftovers from the first part of the test.
+ expected_status.add({
+ file1_path : Item(status=' ', wc_rev=2),
+ dir1_path : Item(status=' ', wc_rev=2),
+ file2_path : Item(status=' ', wc_rev=2),
+ file3_path : Item(status=' ', wc_rev=2),
+ dir2_path : Item(status=' ', wc_rev=2),
+ file4_path : Item(status=' ', wc_rev=2),
+ })
+
+ # Expect some commits and some non-commits from this part of the test.
+ expected_status.add({
+ dirA_path : Item(status=' ', wc_rev=3),
+ fileA_path : Item(status='A ', wc_rev=0),
+ # no fileB
+ dirB_path : Item(status='A ', wc_rev=0),
+ nope_1_path : Item(status='A ', wc_rev=0),
+ # no nope_2
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '-N', sbox.ospath(dirA_path))
+
+#----------------------------------------------------------------------
+# Regression for #1017: ra_neon was allowing the deletion of out-of-date
+# files or dirs, which majorly violates Subversion's semantics.
+# An out-of-date error should be raised if the object to be committed has
+# already been deleted or modified in the repo.
+
+def commit_out_of_date_deletions(sbox):
+ "commit deletion of out-of-date file or dir"
+
+ # Path WC 1 WC backup
+ # =========== ==== =========
+ # A/C pset del
+ # A/I del pset
+ # A/B/F del del
+ # A/D/H/omega text del
+ # A/B/E/alpha pset del
+ # A/D/H/chi del text
+ # A/B/E/beta del pset
+ # A/D/H/psi del del
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Need another empty dir
+ I_path = sbox.ospath('A/I')
+ os.mkdir(I_path)
+ svntest.main.run_svn(None, 'add', I_path)
+ sbox.simple_commit(message='prep')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Edits in wc 1
+ C_path = sbox.ospath('A/C')
+ omega_path = sbox.ospath('A/D/H/omega')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ svntest.main.run_svn(None, 'propset', 'fooprop', 'foopropval', C_path)
+ svntest.main.file_append(omega_path, 'appended omega text')
+ svntest.main.run_svn(None, 'propset', 'fooprop', 'foopropval', alpha_path)
+
+ # Deletions in wc 1
+ I_path = sbox.ospath('A/I')
+ F_path = sbox.ospath('A/B/F')
+ chi_path = sbox.ospath('A/D/H/chi')
+ beta_path = sbox.ospath('A/B/E/beta')
+ psi_path = sbox.ospath('A/D/H/psi')
+ svntest.main.run_svn(None, 'rm', I_path, F_path, chi_path, beta_path,
+ psi_path)
+
+ # Commit in wc 1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/I' : Item(verb='Deleting'),
+ 'A/B/F' : Item(verb='Deleting'),
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/D/H/chi' : Item(verb='Deleting'),
+ 'A/B/E/beta' : Item(verb='Deleting'),
+ 'A/D/H/psi' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/C', 'A/D/H/omega', 'A/B/E/alpha', wc_rev=3,
+ status=' ')
+ expected_status.remove('A/B/F', 'A/D/H/chi', 'A/B/E/beta', 'A/D/H/psi')
+ commit = svntest.actions.run_and_verify_commit
+ commit(wc_dir, expected_output, expected_status, [], wc_dir)
+
+ # Edits in wc backup
+ I_path = os.path.join(wc_backup, 'A', 'I')
+ chi_path = os.path.join(wc_backup, 'A', 'D', 'H', 'chi')
+ beta_path = os.path.join(wc_backup, 'A', 'B', 'E','beta')
+ svntest.main.run_svn(None, 'propset', 'fooprop', 'foopropval', I_path)
+ svntest.main.file_append(chi_path, 'appended chi text')
+ svntest.main.run_svn(None, 'propset', 'fooprop', 'foopropval', beta_path)
+
+ # Deletions in wc backup
+ C_path = os.path.join(wc_backup, 'A', 'C')
+ F_path = os.path.join(wc_backup, 'A', 'B', 'F')
+ omega_path = os.path.join(wc_backup, 'A', 'D', 'H', 'omega')
+ alpha_path = os.path.join(wc_backup, 'A', 'B', 'E', 'alpha')
+ psi_path = os.path.join(wc_backup, 'A', 'D', 'H', 'psi')
+ svntest.main.run_svn(None, 'rm', C_path, F_path, omega_path, alpha_path,
+ psi_path)
+
+ # A commit of any one of these files or dirs should fail, preferably
+ # with an out-of-date error message.
+ error_re = ".*(out of date|not found).*"
+ commit(wc_backup, None, None, error_re, C_path)
+ commit(wc_backup, None, None, error_re, I_path)
+ commit(wc_backup, None, None, error_re, F_path)
+ commit(wc_backup, None, None, error_re, omega_path)
+ commit(wc_backup, None, None, error_re, alpha_path)
+ commit(wc_backup, None, None, error_re, chi_path)
+ commit(wc_backup, None, None, error_re, beta_path)
+ commit(wc_backup, None, None, error_re, psi_path)
+
+def commit_with_bad_log_message(sbox):
+ "commit with a log message containing bad data"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ log_msg_path = sbox.ospath('log-message')
+
+ # Make a random change, so there's something to commit.
+ svntest.main.file_append(iota_path, 'fish')
+
+ # Create a log message containing a zero-byte.
+ svntest.main.file_append(log_msg_path, '\x00')
+
+ # Commit and expect an error.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None, None,
+ ".*contains a zero byte.*",
+ '-F', log_msg_path,
+ iota_path)
+
+def commit_with_mixed_line_endings(sbox):
+ "commit with log message with mixed EOL"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ iota_path = sbox.ospath('iota')
+ log_msg_path = sbox.ospath('log-message')
+
+ # Make a random change, so there's something to commit.
+ svntest.main.file_append(iota_path, 'kebab')
+
+ # Create a log message containing a zero-byte.
+ svntest.main.file_append(log_msg_path, "test\nthis\n\rcase\r\n--This line, and those below, will be ignored--\n")
+
+ # Commit and expect an error.
+ expected_stderr = ".*E135000: Error normalizing log message to internal format.*"
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None, None,
+ expected_stderr,
+ '-F', log_msg_path,
+ iota_path)
+
+def commit_with_mixed_line_endings_in_ignored_part(sbox):
+ "commit with log message with mixed EOL in tail"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ iota_path = sbox.ospath('iota')
+ log_msg_path = sbox.ospath('log-message')
+
+ # Make a random change, so there's something to commit.
+ svntest.main.file_append(iota_path, 'cheeseburger')
+
+ # Create a log message containing a zero-byte.
+ svntest.main.file_append(log_msg_path, "test\n--This line, and those below, will be ignored--\nfoo\r\nbar\nbaz\n\r")
+
+ # Create expected state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ iota_path)
+
+def from_wc_top_with_bad_editor(sbox):
+ "commit with invalid external editor cmd"
+
+ # Shortly after revision 5407, Vladimir Prus posted this bug recipe:
+ #
+ # #!/bin/bash
+ # cd /tmp
+ # rm -rf repo wc
+ # svnadmin create repo
+ # svn mkdir file:///tmp/repo/foo -m ""
+ # svn co file:///tmp/repo/foo wc
+ # cd wc
+ # svn ps svn:externals "lib http://something.org/lib" .
+ # svn ci
+ #
+ # The final 'svn ci' would seg fault because of a problem in
+ # calculating the paths to insert in the initial log message that
+ # gets passed to the editor.
+ #
+ # So this regression test is primarily about making sure the seg
+ # fault is gone, and only secondarily about testing that we get the
+ # expected error from passing a bad editor cmd to Subversion.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'pset', 'fish', 'food', wc_dir)
+ os.chdir(wc_dir)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ None, svntest.verify.AnyOutput,
+ '--force-interactive',
+ 'ci', '--editor-cmd', 'no_such-editor')
+
+ err = " ".join([x.strip() for x in err])
+ if not (re.match(".*no_such-editor.*", err)
+ and re.match(".*Commit failed.*", err)):
+ print("Commit failed, but not in the way expected.")
+ raise svntest.Failure
+
+
+def mods_in_schedule_delete(sbox):
+ "commit with mods in schedule delete"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Schedule a delete, then put in local mods
+ C_path = sbox.ospath('A/C')
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'rm', C_path)
+
+ if not os.path.exists(C_path):
+ os.mkdir(C_path)
+ foo_path = os.path.join(C_path, 'foo')
+ foo_contents = 'zig\nzag\n'
+ svntest.main.file_append(foo_path, foo_contents)
+
+ # Commit should succeed
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/C')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Unversioned file still exists
+ actual_contents = open(foo_path).read()
+ if actual_contents != foo_contents:
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+@Skip(is_non_posix_os_or_cygwin_platform)
+@Issue(1954)
+def tab_test(sbox):
+ "tabs in paths"
+ # For issue #1954.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ tab_file = sbox.ospath('A/tab\tfile')
+ tab_dir = sbox.ospath('A/tab\tdir')
+ source_url = sbox.repo_url + "/source_dir"
+ tab_url = sbox.repo_url + "/tab%09dir"
+
+ svntest.main.file_append(tab_file, "This file has a tab in it.")
+ os.mkdir(tab_dir)
+
+ def match_bad_tab_path(path, errlines):
+ match_re = ".*: Invalid control character '0x09' in path .*"
+ for line in errlines:
+ if re.match (match_re, line):
+ break
+ else:
+ raise svntest.Failure("Failed to find match_re in " + str(errlines))
+
+ # add file to wc
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'add', tab_file)
+ match_bad_tab_path(tab_file, errlines)
+
+ # add dir to wc
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'add', tab_dir)
+ match_bad_tab_path(tab_dir, errlines)
+
+ # mkdir URL
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'mkdir',
+ '-m', 'msg', tab_url)
+ match_bad_tab_path(tab_dir, errlines)
+
+ # copy URL
+ svntest.main.run_svn(1,
+ 'mkdir', '-m', 'msg', source_url)
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'copy',
+ '-m', 'msg',
+ source_url, tab_url)
+ match_bad_tab_path(tab_dir, errlines)
+
+ # mv URL
+ exit_code, outlines, errlines = svntest.main.run_svn(1, 'mv', '-m', 'msg',
+ source_url, tab_url)
+ match_bad_tab_path(tab_dir, errlines)
+
+#----------------------------------------------------------------------
+@Issue(2285)
+def local_mods_are_not_commits(sbox):
+ "local ops should not be treated like commits"
+
+ # For issue #2285.
+ #
+ # Some commands can run on either a URL or a local path. These
+ # commands take a log message, intended for the URL case.
+ # Therefore, they should make sure that getting a log message for
+ # a local operation errors (because not committing).
+ #
+ # This is in commit_tests.py because the unifying theme is that
+ # commits are *not* happening. And because there was no better
+ # place to put it :-).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_error = '.*Local, non-commit operations do not take a log message.*'
+
+ # copy wc->wc
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'log msg',
+ sbox.ospath('iota'),
+ sbox.ospath('iota2'))
+
+ # copy repos->wc
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'log msg',
+ sbox.repo_url + "/iota",
+ sbox.ospath('iota2'))
+
+ # delete
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'rm', '-m', 'log msg',
+ sbox.ospath('A/D/gamma'))
+
+ # mkdir
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'mkdir', '-m', 'log msg',
+ sbox.ospath('newdir'))
+
+ # rename
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'log msg',
+ sbox.ospath('A/mu'),
+ sbox.ospath('A/yu'))
+
+
+#----------------------------------------------------------------------
+# Test if the post-commit error message is returned back to the svn
+# client and is displayed as a warning.
+@Issue(3553)
+def post_commit_hook_test(sbox):
+ "post commit hook failure case testing"
+
+ sbox.build()
+
+ # Get paths to the working copy and repository
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Create a hook that outputs a message to stderr and returns exit code 1
+ # Include a non-XML-safe message to regression-test issue #3553.
+ error_msg = "Text with <angle brackets> & ampersand"
+ svntest.actions.create_failing_hook(repo_dir, "post-commit", error_msg)
+
+ # Modify iota just so there is something to commit.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "lakalakalakalaka")
+
+ # Now, commit and examine the output (we happen to know that the
+ # filesystem will report an absolute path because that's the way the
+ # filesystem is created by this test suite.
+ expected_output = [ "Sending "+ iota_path + "\n",
+ "Transmitting file data .done\n",
+ "Committing transaction...\n",
+ "Committed revision 2.\n",
+ "\n",
+ "Warning: " +
+ svntest.actions.hook_failure_message('post-commit'),
+ error_msg + "\n",
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'ci', '-m', 'log msg', iota_path)
+
+#----------------------------------------------------------------------
+# Commit two targets non-recursively, but both targets should be the
+# same folder (in multiple variations). Test that svn handles this correctly.
+def commit_same_folder_in_targets(sbox):
+ "commit two targets, both the same folder"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+
+ svntest.main.file_append(iota_path, "added extra line to file iota")
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ # Commit the wc_dir and iota.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '-N',
+ wc_dir,
+ iota_path)
+
+#----------------------------------------------------------------------
+# test for issue 2459: verify that commit fails when a file with mixed
+# eol-styles is included, and show an error message which includes the
+# filename.
+@Issue(2459)
+def commit_inconsistent_eol(sbox):
+ "commit files with inconsistent eol should fail"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', 'native', iota_path)
+ svntest.main.file_append_binary(iota_path,
+ "added extra line to file iota\012"
+ "added extra line to file iota\015")
+ svntest.main.file_append(mu_path, "added extra line to file mu\n"
+ "added extra line to file mu\n")
+
+ expected_err = ".*iota.*"
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'commit', '-m', 'log message',
+ wc_dir)
+
+
+@SkipUnless(server_has_revprop_commit)
+def mkdir_with_revprop(sbox):
+ "set revision props during remote mkdir"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ '--with-revprop', 'bug=42', remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('42', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def delete_with_revprop(sbox):
+ "set revision props during remote delete"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ remote_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'delete', '-m', 'msg',
+ '--with-revprop', 'bug=52', remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 3:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 3, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('52', [], 'propget', 'bug',
+ '--revprop', '-r', 3, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def commit_with_revprop(sbox):
+ "set revision props during commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ omega_path = sbox.ospath('A/D/H/omega')
+ gloo_path = sbox.ospath('A/D/H/gloo')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/omega' : Item(verb='Sending'),
+ 'A/D/H/gloo' : Item(verb='Adding'),
+ })
+
+ expected_status.tweak('A/D/H/omega', wc_rev=2, status=' ')
+ expected_status.tweak('A/D/H/gloo', wc_rev=2, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '-m', 'msg',
+ '--with-revprop', 'bug=62',
+ omega_path, gloo_path)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('62', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def import_with_revprop(sbox):
+ "set revision props during import"
+
+ sbox.build()
+ local_dir = sbox.ospath('folder')
+ local_file = sbox.ospath('folder/file')
+ os.mkdir(local_dir)
+ svntest.main.file_write(local_file, "xxxx")
+
+ svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'msg',
+ '--with-revprop', 'bug=72', local_dir,
+ sbox.repo_url)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('72', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def copy_R2R_with_revprop(sbox):
+ "set revision props during repos-to-repos copy"
+
+ sbox.build()
+ remote_dir1 = sbox.repo_url + "/dir1"
+ remote_dir2 = sbox.repo_url + "/dir2"
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ remote_dir1)
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'msg',
+ '--with-revprop', 'bug=82', remote_dir1,
+ remote_dir2)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 3:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 3, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('82', [], 'propget', 'bug',
+ '--revprop', '-r', 3, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def copy_WC2R_with_revprop(sbox):
+ "set revision props during wc-to-repos copy"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+ local_dir = sbox.ospath('folder')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', local_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'msg',
+ '--with-revprop', 'bug=92', local_dir,
+ remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('92', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def move_R2R_with_revprop(sbox):
+ "set revision props during repos-to-repos move"
+
+ sbox.build()
+ remote_dir1 = sbox.repo_url + "/dir1"
+ remote_dir2 = sbox.repo_url + "/dir2"
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ remote_dir1)
+
+ svntest.actions.run_and_verify_svn(None, [], 'move', '-m', 'msg',
+ '--with-revprop', 'bug=102', remote_dir1,
+ remote_dir2)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 3:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 3, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('102', [], 'propget', 'bug',
+ '--revprop', '-r', 3, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def propedit_with_revprop(sbox):
+ "set revision props during remote property edit"
+
+ sbox.build()
+ svntest.main.use_editor('append_foo')
+
+ svntest.actions.run_and_verify_svn(None, [], 'propedit', '-m', 'msg',
+ '--with-revprop', 'bug=112', 'prop',
+ sbox.repo_url)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('112', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def set_multiple_props_with_revprop(sbox):
+ "set multiple revision props during remote mkdir"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ '--with-revprop', 'bug=32',
+ '--with-revprop', 'ref=22', remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n', ' ref\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('32', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('22', [], 'propget', 'ref',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def use_empty_value_in_revprop_pair(sbox):
+ "set revprop without value ('') during remote mkdir"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ '--with-revprop', 'bug=',
+ '--with-revprop', 'ref=', remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n', ' ref\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('', [], 'propget', 'ref',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def no_equals_in_revprop_pair(sbox):
+ "set revprop without '=' during remote mkdir"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg',
+ '--with-revprop', 'bug',
+ '--with-revprop', 'ref', remote_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ ['Unversioned properties on revision 2:\n',
+ ' svn:author\n',' svn:date\n', ' svn:log\n',
+ ' bug\n', ' ref\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'proplist',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('', [], 'propget', 'bug',
+ '--revprop', '-r', 2, sbox.repo_url)
+ svntest.actions.run_and_verify_svn('', [], 'propget', 'ref',
+ '--revprop', '-r', 2, sbox.repo_url)
+
+
+@SkipUnless(server_has_revprop_commit)
+def set_invalid_revprops(sbox):
+ "set invalid revision props during remote mkdir"
+
+ sbox.build()
+ remote_dir = sbox.repo_url + "/dir"
+ # Try to set svn: revprops.
+ expected = '.*Standard properties can\'t.*'
+ svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg',
+ '--with-revprop', 'svn:author=42', remote_dir)
+ svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg',
+ '--with-revprop', 'svn:log=42', remote_dir)
+ svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg',
+ '--with-revprop', 'svn:date=42', remote_dir)
+ svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg',
+ '--with-revprop', 'svn:foo=bar', remote_dir)
+
+ # Empty revprop pair.
+ svntest.actions.run_and_verify_svn([],
+ 'svn: E205000: '
+ 'Revision property pair is empty',
+ 'mkdir', '-m', 'msg',
+ '--with-revprop', '',
+ remote_dir)
+
+#----------------------------------------------------------------------
+@Issue(3553)
+def start_commit_hook_test(sbox):
+ "start-commit hook failure case testing"
+
+ sbox.build()
+
+ # Get paths to the working copy and repository
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Create a hook that outputs a message to stderr and returns exit code 1
+ # Include a non-XML-safe message to regression-test issue #3553.
+ error_msg = "Text with <angle brackets> & ampersand"
+ svntest.actions.create_failing_hook(repo_dir, "start-commit", error_msg)
+
+ # Modify iota just so there is something to commit.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "More stuff in iota")
+
+ # Commit, expect error code 1
+ exit_code, actual_stdout, actual_stderr = svntest.main.run_svn(
+ 1, 'ci', '--quiet', '-m', 'log msg', wc_dir)
+
+ # No stdout expected
+ svntest.verify.compare_and_display_lines('Start-commit hook test',
+ 'STDOUT', [], actual_stdout)
+
+ # Compare only the last two lines of stderr since the preceding ones
+ # contain source code file and line numbers.
+ if len(actual_stderr) > 2:
+ actual_stderr = actual_stderr[-2:]
+ expected_stderr = [ "svn: E165001: " +
+ svntest.actions.hook_failure_message('start-commit'),
+ error_msg + "\n",
+ ]
+ svntest.verify.compare_and_display_lines('Start-commit hook test',
+ 'STDERR',
+ expected_stderr, actual_stderr)
+
+ # Now list the txns in the repo. The list should be empty.
+ svntest.actions.run_and_verify_svnadmin([], [],
+ 'lstxns', sbox.repo_dir)
+
+#----------------------------------------------------------------------
+@Issue(3553)
+def pre_commit_hook_test(sbox):
+ "pre-commit hook failure case testing"
+
+ sbox.build()
+
+ # Get paths to the working copy and repository
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Create a hook that outputs a message to stderr and returns exit code 1
+ # Include a non-XML-safe message to regression-test issue #3553.
+ error_msg = "Text with <angle brackets> & ampersand"
+ svntest.actions.create_failing_hook(repo_dir, "pre-commit", error_msg)
+
+ # Modify iota just so there is something to commit.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "More stuff in iota")
+
+ # Commit, expect error code 1
+ exit_code, actual_stdout, actual_stderr = svntest.main.run_svn(
+ 1, 'ci', '--quiet', '-m', 'log msg', wc_dir)
+
+ # No stdout expected
+ svntest.verify.compare_and_display_lines('Pre-commit hook test',
+ 'STDOUT', [], actual_stdout)
+
+ # Compare only the last two lines of stderr since the preceding ones
+ # contain source code file and line numbers.
+ if len(actual_stderr) > 2:
+ actual_stderr = actual_stderr[-2:]
+ expected_stderr = [ "svn: E165001: " +
+ svntest.actions.hook_failure_message('pre-commit'),
+ error_msg + "\n",
+ ]
+ svntest.verify.compare_and_display_lines('Pre-commit hook test',
+ 'STDERR',
+ expected_stderr, actual_stderr)
+
+#----------------------------------------------------------------------
+
+def versioned_log_message(sbox):
+ "'svn commit -F foo' when foo is a versioned file"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ iota_path = os.path.join('iota')
+ mu_path = os.path.join('A', 'mu')
+ log_path = os.path.join('A', 'D', 'H', 'omega')
+
+ svntest.main.file_append(iota_path, "2")
+
+ # try to check in a change using a versioned file as your log entry.
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'ci', '-F', log_path)
+
+ # force it. should not produce any errors.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-F', log_path, '--force-log')
+
+ svntest.main.file_append(mu_path, "2")
+
+ # try the same thing, but specifying the file to commit explicitly.
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'ci', '-F', log_path, mu_path)
+
+ # force it... should succeed.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-F', log_path,
+ '--force-log', mu_path)
+
+#----------------------------------------------------------------------
+
+def changelist_near_conflict(sbox):
+ "'svn commit --changelist=foo' above a conflict"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ gloo_path = sbox.ospath('A/D/H/gloo')
+
+ expected_status = make_standard_slew_of_changes(wc_dir)
+
+ # Create a changelist.
+ changelist_name = "logical-changeset"
+ svntest.actions.run_and_verify_svn(None, [],
+ "changelist", changelist_name,
+ mu_path, gloo_path)
+
+ # Create a conflict (making r2 in the process).
+ inject_conflict_into_wc(sbox, 'iota', iota_path,
+ None, expected_status, 2)
+
+ # Commit the changelist.
+ expected_output = svntest.wc.State(wc_dir, {
+ "A/D/H/gloo" : Item(verb='Adding'),
+ })
+ expected_status.tweak("A/D/H/gloo", wc_rev=3, status=" ")
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ "--changelist=" + changelist_name,
+ "-m", "msg", wc_dir)
+
+
+#----------------------------------------------------------------------
+
+def commit_out_of_date_file(sbox):
+ "try to commit a file that is out-of-date"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ pi_path = sbox.ospath('A/D/G/pi')
+ backup_pi_path = os.path.join(wc_backup, 'A', 'D', 'G', 'pi')
+
+ svntest.main.file_append(pi_path, "new line\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ "A/D/G/pi" : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak("A/D/G/pi", wc_rev=2, status=" ")
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.main.file_append(backup_pi_path, "hello")
+ expected_err = ".*(pi.*out of date|Out of date.*pi).*"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'commit', '-m', 'log message',
+ wc_backup)
+
+@SkipUnless(server_gets_client_capabilities)
+@Issue(2991)
+def start_commit_detect_capabilities(sbox):
+ "start-commit hook sees client capabilities" # Issue #2991
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repos_dir = sbox.repo_dir
+
+ # Create a start-commit hook that detects the "mergeinfo" capability.
+ hook_text = "import sys\n" + \
+ "fp = open(sys.argv[1] + '/hooks.log', 'w')\n" + \
+ "caps = sys.argv[3].split(':')\n" + \
+ "if 'mergeinfo' in caps:\n" + \
+ " fp.write('yes')\n" + \
+ "else:\n" + \
+ " fp.write('no')\n" + \
+ "fp.close()\n"
+
+ start_commit_hook = svntest.main.get_start_commit_hook_path(repos_dir)
+ svntest.main.create_python_hook_script(start_commit_hook, hook_text)
+
+ # Commit something.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "More stuff in iota")
+ svntest.actions.run_and_verify_svn([], [], 'ci', '--quiet',
+ '-m', 'log msg', wc_dir)
+
+ # Check that "mergeinfo" was detected.
+ log_path = os.path.join(repos_dir, "hooks.log")
+ if os.path.exists(log_path):
+ data = open(log_path).read()
+ os.unlink(log_path)
+ else:
+ raise svntest.verify.SVNUnexpectedOutput("'%s' not found") % log_path
+ if data != 'yes':
+ raise svntest.Failure
+
+# Test for issue #3198
+@Issue(3198)
+def commit_added_missing(sbox):
+ "commit a missing to-be-added file should fail"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = sbox.ospath('A/mu')
+ a_path = sbox.ospath('A/a.txt')
+ b_path = sbox.ospath('A/b.txt')
+
+ # Make two copies of mu: a and b
+ svntest.main.run_svn(None, 'cp', mu_path, a_path)
+ svntest.main.run_svn(None, 'cp', mu_path, b_path)
+
+ # remove b, make it missing
+ os.remove(b_path)
+
+ # Commit, hoping to see an error
+ svntest.actions.run_and_verify_svn([], ".* is scheduled for addition, but is missing",
+ 'commit', '-m', 'logmsg', wc_dir)
+
+#----------------------------------------------------------------------
+
+# Helper for commit-failure tests
+def commit_fails_at_path(path, wc_dir, error_re):
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ error_re,
+ path)
+
+def tree_conflicts_block_commit(sbox):
+ "tree conflicts block commit"
+
+ # Commit is not allowed in a directory containing tree conflicts.
+ # This test corresponds to use cases 1-3 (with file victims) in
+ # notes/tree-conflicts/use-cases.txt.
+
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+ A = sbox.ospath('A')
+ D = sbox.ospath('A/D')
+ G = sbox.ospath('A/D/G')
+
+ error_re = ".*remains in conflict.*"
+ commit_fails_at_path(wc_dir, wc_dir, error_re)
+ commit_fails_at_path(A, A, error_re)
+ commit_fails_at_path(D, D, error_re)
+ commit_fails_at_path(G, G, error_re)
+ commit_fails_at_path(os.path.join(G, 'pi'), G, error_re)
+
+
+def tree_conflicts_resolved(sbox):
+ "tree conflicts resolved"
+
+ # Commit is allowed after tree conflicts are resolved.
+ # This test corresponds to use cases 1-3 in
+ # notes/tree-conflicts/use-cases.txt.
+
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Duplicate wc for tests
+ wc_dir_2 = sbox.add_wc_path('2')
+ svntest.actions.duplicate_dir(wc_dir, wc_dir_2)
+
+ # Mark the tree conflict victims as resolved
+ G = sbox.ospath('A/D/G')
+ victims = [ os.path.join(G, v) for v in ['pi', 'rho', 'tau'] ]
+ svntest.actions.run_and_verify_resolved(victims)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/D/G/pi', status='D ')
+ expected_status.tweak('A/D/G/rho', status='A ', copied='+', wc_rev='-')
+ expected_status.remove('A/D/G/tau')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Recursively resolved in parent directory -- expect same result
+ G2 = os.path.join(wc_dir_2, 'A', 'D', 'G')
+ victims = [ os.path.join(G2, v) for v in ['pi', 'rho', 'tau'] ]
+ svntest.actions.run_and_verify_resolved(victims, G2, '-R')
+
+ expected_status.wc_dir = wc_dir_2
+ svntest.actions.run_and_verify_status(wc_dir_2, expected_status)
+
+#----------------------------------------------------------------------
+def commit_multiple_nested_deletes(sbox):
+ "committing multiple nested deletes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A = sbox.ospath('A')
+ A_B = os.path.join(A, 'B')
+
+ sbox.simple_rm('A')
+
+ svntest.main.run_svn(None, 'ci', A, A_B, '-m', 'Q')
+
+@Issue(4042)
+def commit_incomplete(sbox):
+ "commit an incomplete dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('pname', 'pval', 'A/B')
+ svntest.actions.set_incomplete(sbox.ospath('A/B'), 1)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B', status='! ', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Reported here:
+# Message-ID: <4EBF0FC9.300@gmail.com>
+# Date: Sun, 13 Nov 2011 13:31:05 +1300
+# From: Fergus Slorach <sugref@gmail.com>
+# Subject: svn commit --targets behaviour change in 1.7?
+@Issue(4059)
+def commit_add_subadd(sbox):
+ "committing add with explicit subadd targets"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ targets_file = sbox.ospath('targets') # ### better tempdir?
+ targets_file = os.path.abspath(targets_file)
+
+ # prepare targets file
+ targets = "A/D A/D/H A/D/H/chi A/D/H/omega A/D/H/psi".split()
+ open(targets_file, 'w').write("\n".join(targets))
+
+ # r2: rm A/D
+ sbox.simple_rm('A/D')
+ sbox.simple_commit(message='rm')
+
+ # r3: revert r2, with specific invocation
+ os.chdir(wc_dir)
+ svntest.main.run_svn(None, 'up')
+ svntest.main.run_svn(None, 'merge', '-c', '-2', './')
+ svntest.main.run_svn(None, 'commit', '--targets', targets_file, '-mm')
+
+def commit_danglers(sbox):
+ "verify committing some dangling children fails"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_copy('A','A_copied')
+
+ A_copied = sbox.ospath('A_copied')
+ mu_copied = sbox.ospath('A_copied/mu')
+
+ svntest.main.file_write(mu_copied, "xxxx")
+
+ # We already test for this problem for some time
+ expected_error = "svn: E200009: '.*A_copied' .*exist.*yet.* '.*mu'.*part"
+ svntest.actions.run_and_verify_commit(mu_copied,
+ None,
+ None,
+ expected_error)
+
+ # But now do the same thing via changelist filtering
+ svntest.main.run_svn(None, 'changelist', 'L', mu_copied, sbox.ospath('A/mu'))
+
+ # And try to commit A_copied itself with changelist filtering
+ svntest.actions.run_and_verify_commit(A_copied,
+ None,
+ None,
+ expected_error,
+ A_copied, '--cl', 'L')
+
+ # And on the wcroot
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ expected_error,
+ wc_dir, '--cl', 'L')
+
+#----------------------------------------------------------------------
+# Test for issue 4203: Commit of moved dir with modified file in
+# dir/subdir should bump LastChangedRev of subdir in originating WC
+@XFail()
+@Issue(4203)
+def last_changed_of_copied_subdir(sbox):
+ "last changed of copied subdir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_move('A/B','A/B_copied')
+
+ B_copied = sbox.ospath('A/B_copied')
+ E_copied = sbox.ospath('A/B_copied/E')
+ alpha_copied = sbox.ospath('A/B_copied/E/alpha')
+
+ svntest.main.file_write(alpha_copied, "xxxx")
+
+ svntest.main.run_svn(None, 'commit', wc_dir, '-mm')
+
+ expected = {'Revision' : '2',
+ 'Last Changed Rev' : '2',
+ }
+ svntest.actions.run_and_verify_info([expected], E_copied)
+
+def commit_unversioned(sbox):
+ "verify behavior on unversioned targets"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ expected_err = '.*E200009: .*existing.*\' is not under version control.*'
+
+ # Unversioned, but existing file
+ svntest.main.file_write(sbox.ospath('existing'), "xxxx")
+ svntest.actions.run_and_verify_commit(wc_dir, None, None, expected_err,
+ sbox.ospath('existing'))
+
+ # Unversioned, not existing
+ svntest.actions.run_and_verify_commit(wc_dir, None, None, expected_err,
+ sbox.ospath('not-existing'))
+
+@Issue(4400)
+def commit_cp_with_deep_delete(sbox):
+ "verify behavior of a copy with a deep (>=3) delete"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Prep by adding a tree deep enough to exercise the issue.
+ sbox.simple_mkdir('A/B/E/I')
+ sbox.simple_commit(message='prep')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # copy the deep tree and then delete a dir 3 deep.
+ sbox.simple_copy('A','A2')
+ sbox.simple_rm('A2/B/E/I')
+
+ # come up with the expected output and status
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A2' : Item(verb='Adding'),
+ 'A2/B/E/I' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E/I' : Item(status=' ', wc_rev='2'),
+ 'A2' : Item(status=' ', wc_rev='3'),
+ 'A2/B' : Item(status=' ', wc_rev='3'),
+ 'A2/B/lambda' : Item(status=' ', wc_rev='3'),
+ 'A2/B/F' : Item(status=' ', wc_rev='3'),
+ 'A2/B/E' : Item(status=' ', wc_rev='3'),
+ 'A2/B/E/alpha' : Item(status=' ', wc_rev='3'),
+ 'A2/B/E/beta' : Item(status=' ', wc_rev='3'),
+ 'A2/D' : Item(status=' ', wc_rev='3'),
+ 'A2/D/gamma' : Item(status=' ', wc_rev='3'),
+ 'A2/D/H' : Item(status=' ', wc_rev='3'),
+ 'A2/D/H/psi' : Item(status=' ', wc_rev='3'),
+ 'A2/D/H/omega' : Item(status=' ', wc_rev='3'),
+ 'A2/D/H/chi' : Item(status=' ', wc_rev='3'),
+ 'A2/D/G' : Item(status=' ', wc_rev='3'),
+ 'A2/D/G/tau' : Item(status=' ', wc_rev='3'),
+ 'A2/D/G/rho' : Item(status=' ', wc_rev='3'),
+ 'A2/D/G/pi' : Item(status=' ', wc_rev='3'),
+ 'A2/C' : Item(status=' ', wc_rev='3'),
+ 'A2/mu' : Item(status=' ', wc_rev='3'),
+ })
+
+ # Commit the copy without the one dir.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+def commit_deep_deleted(sbox):
+ "try to commit a deep descendant of a deleted node"
+
+ sbox.build()
+
+ sbox.simple_move('A', 'AA')
+
+ sbox.simple_propset('k', 'v', 'AA/D/G')
+
+ # Committing some added descendant returns a proper error
+ expected_err = ('.*svn: E200009: \'%s\' is not known to exist in the ' +
+ 'repository and is not part of the commit, yet its ' +
+ 'child \'%s\' is part of the commit.*') % (
+ re.escape(os.path.abspath(sbox.ospath('AA'))),
+ re.escape(os.path.abspath(sbox.ospath('AA/D/G'))))
+
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ None,
+ None,
+ expected_err,
+ sbox.ospath('AA/D/G'))
+
+ sbox.simple_propdel('k', 'AA/D/G')
+ sbox.simple_rm('AA/D/G')
+
+ # But a delete fails..
+ # This used to trigger an assertion in Subversion 1.8.0-1.8.8, because
+ # the status walker couldn't find the repository path for AA/D/G
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ None,
+ None,
+ expected_err,
+ sbox.ospath('AA/D/G'))
+
+ # And now commit like how a GUI client would do it, but forgetting the move
+ expected_err = ('svn: E200009: Cannot commit \'%s\' because it was moved ' +
+ 'from \'%s\' which is not part of the commit; both sides ' +
+ 'of the move must be committed together') % (
+ re.escape(os.path.abspath(sbox.ospath('AA'))),
+ re.escape(os.path.abspath(sbox.ospath('A'))))
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ None,
+ None,
+ expected_err,
+ '--depth', 'empty',
+ sbox.ospath('AA/D/G'),
+ sbox.ospath('AA'))
+
+
+ # And now how it works
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ None,
+ None,
+ [],
+ '--depth', 'empty',
+ sbox.ospath('AA/D/G'),
+ sbox.ospath('AA'),
+ sbox.ospath('A'))
+
+@Issue(4480)
+def commit_mergeinfo_ood(sbox):
+ "commit of mergeinfo that should cause out of date"
+
+ sbox.build()
+ sbox.simple_rm('A', 'iota')
+ sbox.simple_commit() # r2
+
+ sbox.simple_mkdir('trunk', 'branch')
+ sbox.simple_commit() # r3
+
+ sbox.simple_append('trunk/a', 'This is a\n')
+ sbox.simple_add('trunk/a')
+ sbox.simple_commit() # r4
+
+ sbox.simple_append('trunk/b', 'This is b\n')
+ sbox.simple_add('trunk/b')
+ sbox.simple_commit() # r5
+
+ sbox.simple_update() # To r5
+
+ expected_output = [
+ '--- Merging r4 into \'%s\':\n' % sbox.ospath('branch'),
+ 'A %s\n' % sbox.ospath('branch/a'),
+ '--- Recording mergeinfo for merge of r4' \
+ ' into \'%s\':\n' % sbox.ospath('branch'),
+ ' U %s\n' % sbox.ospath('branch'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c4', '^/trunk',
+ sbox.ospath('branch'))
+
+ sbox.simple_commit()
+
+ sbox.simple_update(revision='5')
+
+ expected_output = [
+ '--- Merging r5 into \'%s\':\n' % sbox.ospath('branch'),
+ 'A %s\n' % sbox.ospath('branch/b'),
+ '--- Recording mergeinfo for merge of r5 into \'%s\':\n' % sbox.ospath('branch'),
+ ' U %s\n' % sbox.ospath('branch'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c5', '^/trunk',
+ sbox.ospath('branch'))
+
+ # Currently this commit succeeds with dav over HTTPv2, while it should really fail
+ expected_err = '.*out of date.*'
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'commit', sbox.ospath(''), '-m', 'M')
+
+@Issue(2295)
+def mkdir_conflict_proper_error(sbox):
+ "mkdir conflict should produce a proper error"
+
+ sbox.build(create_wc=False)
+ repo_url = sbox.repo_url
+
+ expected_error = "svn: E160020: .* already exists.*'/A'"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'mkdir', repo_url + '/A',
+ '-m', '')
+
+def commit_xml(sbox):
+ "commit an xml file"
+
+ sbox.build()
+
+ sbox.simple_add_text('index.xml', 'index.xml')
+ sbox.simple_add_text('index.html', 'index.html')
+ sbox.simple_propset('svn:mime-type', 'text/xml', 'index.xml')
+ sbox.simple_propset('svn:mime-type', 'text/html', 'index.html')
+
+ # This currently (2015-04-09) makes mod_dav return a 'HTTP/1.1 201 Created'
+ # result with content type text/xml (copied from file), which used to
+ # invoke the error parsing.
+ #
+ # Depending on the Apache version and config, this may cause an xml error.
+ sbox.simple_commit()
+
+ # This currently (2015-04-09) makes mod_dav return a 'HTTP/1.1 204 Updated'
+ # result with content type text/xml (copied from file), which used to
+ # invoke the error parsing.
+ #
+ # Depending on the Apache version and config, this may cause an xml error.
+ sbox.simple_append('index.xml', '<Q></R>', True)
+ sbox.simple_append('index.html', '<Q></R>', True)
+ sbox.simple_commit()
+
+@Issue(4722)
+def commit_issue4722_checksum(sbox):
+ "commit that triggered checksum failure"
+
+ sbox.build()
+
+ # This bug only ever affected FSFS in 1.9.7. The test could be
+ # considered a bit "fragile" as any change to the on-disk
+ # representation may well make it pass trivially. On the other hand
+ # it should still pass irrespective of that representation, and for
+ # all other repository types.
+
+ # Enough data to allow the bug to occur
+ with open(sbox.ospath('f'), 'w') as fp:
+ for i in range(0, 2001):
+ fp.write('abcdefghijklmnopqrstuvwxyz')
+ sbox.simple_add('f')
+ sbox.simple_commit()
+
+ # Just the right data to trigger the bug
+ with open(sbox.ospath('f'), 'w') as fp:
+ for i in range(0, 8713):
+ fp.write(str(i))
+ fp.write("11111")
+ sbox.simple_commit()
+
+ # Trigger deduplication which is when the bug occurred
+ with open(sbox.ospath('f'), 'w') as fp:
+ for i in range(0, 2001):
+ fp.write('abcdefghijklmnopqrstuvwxyz')
+ sbox.simple_commit()
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ commit_one_file,
+ commit_one_new_file,
+ commit_one_new_binary_file,
+ commit_multiple_targets,
+ commit_multiple_targets_2,
+ commit_inclusive_dir,
+ commit_top_dir,
+ commit_unversioned_thing,
+ nested_dir_replacements,
+ hudson_part_1,
+ hudson_part_1_variation_1,
+ hudson_part_1_variation_2,
+ hudson_part_2,
+ hudson_part_2_1,
+ hook_test,
+ merge_mixed_revisions,
+ commit_uri_unsafe,
+ commit_deleted_edited,
+ commit_in_dir_scheduled_for_addition,
+ commit_rmd_and_deleted_file,
+ commit_add_file_twice,
+ commit_from_long_dir,
+ commit_with_lock,
+ commit_current_dir,
+ commit_multiple_wc_nested,
+ commit_multiple_wc,
+ commit_multiple_wc_multiple_repos,
+ commit_nonrecursive,
+ failed_commit,
+ commit_out_of_date_deletions,
+ commit_with_bad_log_message,
+ commit_with_mixed_line_endings,
+ commit_with_mixed_line_endings_in_ignored_part,
+ from_wc_top_with_bad_editor,
+ mods_in_schedule_delete,
+ tab_test,
+ local_mods_are_not_commits,
+ post_commit_hook_test,
+ commit_same_folder_in_targets,
+ commit_inconsistent_eol,
+ mkdir_with_revprop,
+ delete_with_revprop,
+ commit_with_revprop,
+ import_with_revprop,
+ copy_R2R_with_revprop,
+ copy_WC2R_with_revprop,
+ move_R2R_with_revprop,
+ propedit_with_revprop,
+ set_multiple_props_with_revprop,
+ use_empty_value_in_revprop_pair,
+ no_equals_in_revprop_pair,
+ set_invalid_revprops,
+ start_commit_hook_test,
+ pre_commit_hook_test,
+ versioned_log_message,
+ changelist_near_conflict,
+ commit_out_of_date_file,
+ start_commit_detect_capabilities,
+ commit_added_missing,
+ tree_conflicts_block_commit,
+ tree_conflicts_resolved,
+ commit_multiple_nested_deletes,
+ commit_incomplete,
+ commit_add_subadd,
+ commit_danglers,
+ last_changed_of_copied_subdir,
+ commit_unversioned,
+ commit_cp_with_deep_delete,
+ commit_deep_deleted,
+ commit_mergeinfo_ood,
+ mkdir_conflict_proper_error,
+ commit_xml,
+ commit_issue4722_checksum,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/copy_tests.py b/subversion/tests/cmdline/copy_tests.py
new file mode 100755
index 0000000..3bb8599
--- /dev/null
+++ b/subversion/tests/cmdline/copy_tests.py
@@ -0,0 +1,5965 @@
+#!/usr/bin/env python
+#
+# copy_tests.py: testing the many uses of 'svn cp' and 'svn mv'
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import stat, os, re, shutil, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import main
+from svntest.main import (
+ SVN_PROP_MERGEINFO,
+ file_append,
+ file_write,
+ make_log_msg,
+ run_svn,
+)
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+#
+#----------------------------------------------------------------------
+# Helper for wc_copy_replacement and repos_to_wc_copy_replacement
+def copy_replace(sbox, wc_copy):
+ """Tests for 'R'eplace functionanity for files.
+
+Depending on the value of wc_copy either a working copy (when true)
+or a url (when false) copy source is used."""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ # Status before attempting copies
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # The copy shouldn't fail
+ if wc_copy:
+ pi_src = sbox.ospath('A/D/G/pi')
+ else:
+ pi_src = sbox.repo_url + '/A/D/G/pi'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', pi_src, rho_path)
+
+ # Now commit
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.tweak('A/D/G/rho', status=' ', copied=None,
+ wc_rev='2')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+# Helper for wc_copy_replace_with_props and
+# repos_to_wc_copy_replace_with_props
+def copy_replace_with_props(sbox, wc_copy):
+ """Tests for 'R'eplace functionanity for files with props.
+
+ Depending on the value of wc_copy either a working copy (when true) or
+ a url (when false) copy source is used."""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Use a temp file to set properties with wildcards in their values
+ # otherwise Win32/VS2005 will expand them
+ prop_path = sbox.ospath('proptmp')
+ svntest.main.file_append(prop_path, '*')
+
+ # Set props on file which is copy-source later on
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'phony-prop', '-F',
+ prop_path, pi_path)
+ os.remove(prop_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:eol-style', 'LF', rho_path)
+
+ # Verify props having been set
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/pi',
+ props={ 'phony-prop': '*' })
+ expected_disk.tweak('A/D/G/rho',
+ props={ 'svn:eol-style': 'LF' })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Commit props
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi': Item(verb='Sending'),
+ 'A/D/G/rho': Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', wc_rev='2')
+ expected_status.tweak('A/D/G/rho', wc_rev='2')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Bring wc into sync
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # File scheduled for deletion
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ # Status before attempting copies
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # The copy shouldn't fail
+ if wc_copy:
+ pi_src = sbox.ospath('A/D/G/pi')
+ else:
+ pi_src = sbox.repo_url + '/A/D/G/pi'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', pi_src, rho_path)
+
+ # Verify both content and props have been copied
+ if wc_copy:
+ props = { 'phony-prop' : '*'}
+ else:
+ props = { 'phony-prop' : '*'}
+
+ expected_disk.tweak('A/D/G/rho',
+ contents="This is the file 'pi'.\n",
+ props=props)
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Now commit and verify
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.tweak('A/D/G/rho', status=' ', copied=None,
+ wc_rev='3')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+# (Taken from notes/copy-planz.txt:)
+#
+# We have four use cases for 'svn cp' now.
+#
+# A. svn cp wc_path1 wc_path2
+#
+# This duplicates a path in the working copy, and schedules it
+# for addition with history.
+#
+# B. svn cp URL [-r rev] wc_path
+#
+# This "checks out" URL (in REV) into the working copy at
+# wc_path, integrates it, and schedules it for addition with
+# history.
+#
+# C. svn cp wc_path URL
+#
+# This immediately commits wc_path to URL on the server; the
+# commit will be an addition with history. The commit will not
+# change the working copy at all.
+#
+# D. svn cp URL1 [-r rev] URL2
+#
+# This causes a server-side copy to happen immediately; no
+# working copy is required.
+
+
+
+# TESTS THAT NEED TO BE WRITTEN
+#
+# Use Cases A & C
+#
+# -- single files, with/without local mods, as both 'cp' and 'mv'.
+# (need to verify commit worked by updating a 2nd working copy
+# to see the local mods)
+#
+# -- dir copy, has mixed revisions
+#
+# -- dir copy, has local mods (an edit, an add, a delete, and a replace)
+#
+# -- dir copy, has mixed revisions AND local mods
+#
+# -- dir copy, has mixed revisions AND another previously-made copy!
+# (perhaps done as two nested 'mv' commands!)
+#
+# Use Case D
+#
+
+# By the time the copy setup algorithm is complete, the copy
+# operation will have four parts: SRC-DIR, SRC-BASENAME, DST-DIR,
+# DST-BASENAME. In all cases, SRC-DIR/SRC-BASENAME and DST_DIR must
+# already exist before the operation, but DST_DIR/DST_BASENAME must
+# NOT exist.
+#
+# Besides testing things that don't meet the above criteria, we want to
+# also test valid cases:
+#
+# - where SRC-DIR/SRC-BASENAME is a file or a dir.
+# - where SRC-DIR (or SRC-DIR/SRC-BASENAME) is a parent/grandparent
+# directory of DST-DIR
+# - where SRC-DIR (or SRC-DIR/SRC-BASENAME) is a child/grandchild
+# directory of DST-DIR
+# - where SRC-DIR (or SRC-DIR/SRC-BASENAME) is not in the lineage
+# of DST-DIR at all
+
+
+
+#----------------------------------------------------------------------
+@Issue(1091)
+def basic_copy_and_move_files(sbox):
+ "basic copy and move commands -- on files only"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ iota_path = sbox.ospath('iota')
+ rho_path = sbox.ospath('A/D/G/rho')
+ D_path = sbox.ospath('A/D')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ H_path = sbox.ospath('A/D/H')
+ F_path = sbox.ospath('A/B/F')
+ alpha2_path = sbox.ospath('A/C/alpha2')
+
+ # Make local mods to mu and rho
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # Copy rho to D -- local mods
+ svntest.actions.run_and_verify_svn(None, [], 'cp', rho_path, D_path)
+
+ # Copy alpha to C -- no local mods, and rename it to 'alpha2' also
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ alpha_path, alpha2_path)
+
+ # Move mu to H -- local mods
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ mu_path, H_path)
+
+ # Move iota to F -- no local mods
+ svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, F_path)
+
+ # Created expected output tree for 'svn ci':
+ # We should see four adds, two deletes, and one change in total.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(verb='Sending'),
+ 'A/D/rho' : Item(verb='Adding'),
+ 'A/C/alpha2' : Item(verb='Adding'),
+ 'A/D/H/mu' : Item(verb='Adding'),
+ 'A/B/F/iota' : Item(verb='Adding'),
+ 'A/mu' : Item(verb='Deleting'),
+ 'iota' : Item(verb='Deleting'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but several files should be at revision 2. Also, two files should
+ # be missing.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', 'A/mu', wc_rev=2)
+
+ expected_status.add({
+ 'A/D/rho' : Item(status=' ', wc_rev=2),
+ 'A/C/alpha2' : Item(status=' ', wc_rev=2),
+ 'A/D/H/mu' : Item(status=' ', wc_rev=2),
+ 'A/B/F/iota' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/mu', 'iota')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Issue 1091, alpha2 would now have the wrong checksum and so a
+ # subsequent commit would fail
+ svntest.main.file_append(alpha2_path, 'appended alpha2 text')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/alpha2' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/C/alpha2', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Assure that attempts at local copy and move fail when a log
+ # message is provided.
+ expected_stderr = \
+ ".*Local, non-commit operations do not take a log message"
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'cp', '-m', 'op fails', rho_path, D_path)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'mv', '-m', 'op fails', rho_path, D_path)
+
+
+#----------------------------------------------------------------------
+
+# This test passes over ra_local certainly; we're adding it because at
+# one time it failed over ra_neon. Specifically, it failed when
+# mod_dav_svn first started sending vsn-rsc-urls as "CR/path", and was
+# sending bogus CR/paths for items within copied subtrees.
+
+def receive_copy_in_update(sbox):
+ "receive a copied directory during update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy.
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Define a zillion paths in both working copies.
+ G_path = sbox.ospath('A/D/G')
+ newG_path = sbox.ospath('A/B/newG')
+
+ # Copy directory A/D to A/B/newG
+ svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, newG_path)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/newG' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/newG' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/tau' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now update the other working copy; it should receive a full add of
+ # the newG directory and its contents.
+
+ # Expected output of update
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A/B/newG' : Item(status='A '),
+ 'A/B/newG/pi' : Item(status='A '),
+ 'A/B/newG/rho' : Item(status='A '),
+ 'A/B/newG/tau' : Item(status='A '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/newG' : Item(),
+ 'A/B/newG/pi' : Item("This is the file 'pi'.\n"),
+ 'A/B/newG/rho' : Item("This is the file 'rho'.\n"),
+ 'A/B/newG/tau' : Item("This is the file 'tau'.\n"),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.add({
+ 'A/B/newG' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/newG/tau' : Item(status=' ', wc_rev=2),
+ })
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+# Regression test for issue #683. In particular, this bug prevented
+# us from running 'svn cp -r N src_URL dst_URL' as a means of
+# resurrecting a deleted directory. Also, the final 'update' at the
+# end of this test was uncovering a ghudson 'deleted' edge-case bug.
+# (In particular, re-adding G to D, when D already had a 'deleted'
+# entry for G. The entry-merge wasn't overwriting the 'deleted'
+# attribute, and thus the newly-added G was ending up disconnected
+# from D.)
+@Issue(683)
+def resurrect_deleted_dir(sbox):
+ "resurrect a deleted directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ G_path = sbox.ospath('A/D/G')
+
+ # Delete directory A/D/G, commit that as r2.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force',
+ G_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G')
+ expected_status.remove('A/D/G/pi')
+ expected_status.remove('A/D/G/rho')
+ expected_status.remove('A/D/G/tau')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Use 'svn cp URL@1 URL' to resurrect the deleted directory, where
+ # the two URLs are identical. This used to trigger a failure.
+ url = sbox.repo_url + '/A/D/G'
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ url + '@1', url,
+ '-m', 'logmsg')
+
+ # For completeness' sake, update to HEAD, and verify we have a full
+ # greek tree again, all at revision 3.
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+def copy_deleted_dir_into_prefix(sbox):
+ "copy a deleted dir to a prefix of its old path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ D_path = sbox.ospath('A/D')
+
+ # Delete directory A/D, commit that as r2.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force',
+ D_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Deleting'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+ # Ok, copy from a deleted URL into a prefix of that URL, this used to
+ # result in an assert failing.
+ url1 = sbox.repo_url + '/A/D/G'
+ url2 = sbox.repo_url + '/A/D'
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ url1 + '@1', url2,
+ '-m', 'logmsg')
+
+#----------------------------------------------------------------------
+
+# Test that we're enforcing proper 'svn cp' overwrite behavior. Note
+# that svn_fs_copy() will always overwrite its destination if an entry
+# by the same name already exists. However, libsvn_client should be
+# doing existence checks to prevent directories from being
+# overwritten, and files can't be overwritten because the RA layers
+# are doing out-of-dateness checks during the commit.
+
+
+def no_copy_overwrites(sbox):
+ "svn cp URL URL cannot overwrite destination"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ fileURL1 = sbox.repo_url + "/A/B/E/alpha"
+ fileURL2 = sbox.repo_url + "/A/B/E/beta"
+ dirURL1 = sbox.repo_url + "/A/D/G"
+ dirURL2 = sbox.repo_url + "/A/D/H"
+
+ # Expect out-of-date failure if 'svn cp URL URL' tries to overwrite a file
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cp', fileURL1, fileURL2,
+ '-m', 'fooogle')
+
+ # Create A/D/H/G by running 'svn cp ...A/D/G .../A/D/H'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', dirURL1, dirURL2,
+ '-m', 'fooogle')
+
+ # Repeat the last command. It should *fail* because A/D/H/G already exists.
+ svntest.actions.run_and_verify_svn(
+ None, ".*E160020: Path.*/A/D/H/G' already exists.*",
+ 'cp', dirURL1, dirURL2,
+ '-m', 'fooogle')
+
+#----------------------------------------------------------------------
+
+# Issue 845. WC -> WC copy should not overwrite base text-base
+@Issue(845)
+def no_wc_copy_overwrites(sbox):
+ "svn cp PATH PATH cannot overwrite destination"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # File simply missing
+ tau_path = sbox.ospath('A/D/G/tau')
+ os.remove(tau_path)
+
+ # Status before attempting copies
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/tau', status='! ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # These copies should fail
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cp', pi_path, rho_path)
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cp', pi_path, tau_path)
+
+ # Status after failed copies should not have changed
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+# Takes out working-copy locks for A/B2 and child A/B2/E. At one stage
+# during issue 749 the second lock cause an already-locked error.
+@Issue(749)
+def copy_modify_commit(sbox):
+ "copy a tree and modify before commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ B_path = sbox.ospath('A/B')
+ B2_path = sbox.ospath('A/B2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ B_path, B2_path)
+
+ alpha_path = sbox.ospath('A/B2/E/alpha')
+ svntest.main.file_append(alpha_path, "modified alpha")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2' : Item(verb='Adding'),
+ 'A/B2/E/alpha' : Item(verb='Sending'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+#----------------------------------------------------------------------
+
+# Issue 591, at one point copying a file from URL to WC didn't copy
+# properties.
+@Issue(591)
+def copy_files_with_properties(sbox):
+ "copy files with properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Set a property on a file
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'pname', 'pval', rho_path)
+
+ # and commit it
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Set another property, but don't commit it yet
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'pname2', 'pval2', rho_path)
+
+ # WC to WC copy of file with committed and uncommitted properties
+ rho_wc_path = sbox.ospath('A/D/G/rho_wc')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', rho_path, rho_wc_path)
+
+ # REPOS to WC copy of file with properties
+ rho_url_path = sbox.ospath('A/D/G/rho_url')
+ rho_url = sbox.repo_url + '/A/D/G/rho'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', rho_url, rho_url_path)
+
+ # Properties are not visible in WC status 'A'
+ expected_status.add({
+ 'A/D/G/rho' : Item(status=' M', wc_rev='2'),
+ 'A/D/G/rho_wc' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/D/G/rho_url' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Check properties explicitly
+ svntest.actions.run_and_verify_svn(['pval\n'], [],
+ 'propget', 'pname', rho_wc_path)
+ svntest.actions.run_and_verify_svn(['pval2\n'], [],
+ 'propget', 'pname2', rho_wc_path)
+ svntest.actions.run_and_verify_svn(['pval\n'], [],
+ 'propget', 'pname', rho_url_path)
+
+ # Commit and properties are visible in status
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(verb='Sending'),
+ 'A/D/G/rho_wc' : Item(verb='Adding'),
+ 'A/D/G/rho_url' : Item(verb='Adding'),
+ })
+ expected_status.tweak('A/D/G/rho', status=' ', wc_rev=3)
+ expected_status.remove('A/D/G/rho_wc', 'A/D/G/rho_url')
+ expected_status.add({
+ 'A/D/G/rho_wc' : Item(status=' ', wc_rev=3),
+ 'A/D/G/rho_url' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+#----------------------------------------------------------------------
+
+# Issue 918
+@Issue(918)
+def copy_delete_commit(sbox):
+ "copy a tree and delete part of it before commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ B_path = sbox.ospath('A/B')
+ B2_path = sbox.ospath('A/B2')
+
+ # copy a tree
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ B_path, B2_path)
+
+ # delete two files
+ lambda_path = sbox.ospath('A/B2/lambda')
+ alpha_path = sbox.ospath('A/B2/E/alpha')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', alpha_path, lambda_path)
+
+ # commit copied tree containing a deleted file
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2' : Item(verb='Adding'),
+ 'A/B2/lambda' : Item(verb='Deleting'),
+ 'A/B2/E/alpha' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+ # copy a tree
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B3'))
+
+ # delete a directory
+ E_path = sbox.ospath('A/B3/E')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', E_path)
+
+ # commit copied tree containing a deleted directory
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B3' : Item(verb='Adding'),
+ 'A/B3/E' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+
+#----------------------------------------------------------------------
+@Issues(931,932)
+def mv_and_revert_directory(sbox):
+ "move and revert a directory"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ E_path = sbox.ospath('A/B/E')
+ F_path = sbox.ospath('A/B/F')
+ new_E_path = os.path.join(F_path, 'E')
+
+ # Issue 931: move failed to lock the directory being deleted
+ svntest.actions.run_and_verify_svn(None, [], 'move',
+ E_path, F_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ')
+ expected_status.tweak('A/B/E', status='D ', moved_to='A/B/F/E')
+ expected_status.add({
+ 'A/B/F/E' : Item(status='A ', wc_rev='-', copied='+',
+ moved_from='A/B/E'),
+ 'A/B/F/E/alpha' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B/F/E/beta' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Issue 932: revert failed to lock the parent directory
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive',
+ new_E_path)
+ expected_status.remove('A/B/F/E', 'A/B/F/E/alpha', 'A/B/F/E/beta')
+ expected_status.tweak('A/B/E', moved_to=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Issue 982. When copying a file with the executable bit set, the copied
+# file should also have its executable bit set.
+@Issue(982)
+@SkipUnless(svntest.main.is_posix_os)
+def copy_preserve_executable_bit(sbox):
+ "executable bit should be preserved when copying"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create two paths
+ newpath1 = sbox.ospath('newfile1')
+ newpath2 = sbox.ospath('newfile2')
+
+ # Create the first file.
+ svntest.main.file_append(newpath1, "a new file")
+ svntest.actions.run_and_verify_svn(None, [], 'add', newpath1)
+
+ mode1 = os.stat(newpath1)[stat.ST_MODE]
+
+ # Doing this to get the executable bit set on systems that support
+ # that -- the property itself is not the point.
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:executable', 'on', newpath1)
+
+ mode2 = os.stat(newpath1)[stat.ST_MODE]
+
+ if mode1 == mode2:
+ logger.warn("setting svn:executable did not change file's permissions")
+ raise svntest.Failure
+
+ # Commit the file
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'create file and set svn:executable',
+ wc_dir)
+
+ # Copy the file
+ svntest.actions.run_and_verify_svn(None, [], 'cp', newpath1, newpath2)
+
+ mode3 = os.stat(newpath2)[stat.ST_MODE]
+
+ # The mode on the original and copied file should be identical
+ if mode2 != mode3:
+ logger.warn("permissions on the copied file are not identical to original file")
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Issue 1029, copy failed with a "working copy not locked" error
+@Issue(1029)
+def wc_to_repos(sbox):
+ "working-copy to repository copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ beta_path = sbox.ospath('A/B/E/beta')
+ beta2_url = sbox.repo_url + "/A/B/E/beta2"
+ H_path = sbox.ospath('A/D/H')
+ H2_url = sbox.repo_url + "/A/D/H2"
+
+ # modify some items to be copied
+ svntest.main.file_append(sbox.ospath('A/D/H/omega'),
+ "new otext\n")
+ svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar',
+ beta_path)
+
+ # copy a file
+ svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble file',
+ 'copy', beta_path, beta2_url)
+ # and a directory
+ svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble dir',
+ 'copy', H_path, H2_url)
+ # copy a file to a directory
+ svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble file',
+ 'copy', beta_path, H2_url)
+
+ # update the working copy. post-update mereinfo elision will remove
+ # A/D/H2/beta's mergeinfo, leaving a local mod.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/beta2' : Item(status='A '),
+ 'A/D/H2' : Item(status='A '),
+ 'A/D/H2/chi' : Item(status='A '),
+ 'A/D/H2/omega' : Item(status='A '),
+ 'A/D/H2/psi' : Item(status='A '),
+ 'A/D/H2/beta' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/H/omega',
+ contents="This is the file 'omega'.\nnew otext\n")
+ expected_disk.add({
+ 'A/B/E/beta2' : Item("This is the file 'beta'.\n"),
+ 'A/D/H2/chi' : Item("This is the file 'chi'.\n"),
+ 'A/D/H2/omega' : Item("This is the file 'omega'.\nnew otext\n"),
+ 'A/D/H2/psi' : Item("This is the file 'psi'.\n"),
+ 'A/D/H2/beta' : Item("This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.add({
+ 'A/B/E/beta' : Item(status=' M', wc_rev=4),
+ 'A/D/H/omega' : Item(status='M ', wc_rev=4),
+ 'A/B/E/beta2' : Item(status=' ', wc_rev=4),
+ 'A/D/H2' : Item(status=' ', wc_rev=4),
+ 'A/D/H2/chi' : Item(status=' ', wc_rev=4),
+ 'A/D/H2/omega' : Item(status=' ', wc_rev=4),
+ 'A/D/H2/psi' : Item(status=' ', wc_rev=4),
+ 'A/D/H2/beta' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # check local property was copied
+ svntest.actions.run_and_verify_svn(['bar\n'], [],
+ 'propget', 'foo',
+ beta_path + "2")
+
+#----------------------------------------------------------------------
+# Issue 1090: various use-cases of 'svn cp URL wc' where the
+# repositories might be different, or be the same repository.
+@Issues(1090, 1444, 3590)
+def repos_to_wc(sbox):
+ "repository to working-copy copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # We have a standard repository and working copy. Now we create a
+ # second repository with the same greek tree, but different UUID.
+ repo_dir = sbox.repo_dir
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
+
+ # URL->wc copy:
+ # copy a file and a directory from the same repository.
+ # we should get some scheduled additions *with history*.
+ E_url = sbox.repo_url + "/A/B/E"
+ pi_url = sbox.repo_url + "/A/D/G/pi"
+ pi_path = sbox.ospath('pi')
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', E_url, wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'copy', pi_url, wc_dir)
+
+ # Extra test: modify file ASAP to check there was a timestamp sleep
+ svntest.main.file_append(pi_path, 'zig\n')
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.add({
+ 'pi' : Item(status='A ', copied='+', wc_rev='-'),
+ 'E' : Item(status='A ', copied='+', wc_rev='-'),
+ 'E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Modification will only show up if timestamps differ
+ exit_code, out, err = svntest.main.run_svn(None, 'diff', pi_path)
+ if err or not out:
+ logger.warn("diff failed")
+ raise svntest.Failure
+ for line in out:
+ if line == '+zig\n': # Crude check for diff-like output
+ break
+ else:
+ logger.warn("diff output incorrect %s" % out)
+ raise svntest.Failure
+
+ # Revert everything and verify.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ svntest.main.safe_rmtree(sbox.ospath('E'))
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # URL->wc copy:
+ # Copy an empty directory from the same repository, see issue #1444.
+ C_url = sbox.repo_url + "/A/C"
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', C_url, wc_dir)
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.add({
+ 'C' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Revert everything and verify.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ svntest.main.safe_rmtree(sbox.ospath('C'))
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # URL->wc copy:
+ # copy a file and a directory from a foreign repository.
+ # we should get some scheduled additions *without history*.
+ E_url = other_repo_url + "/A/B/E"
+ pi_url = other_repo_url + "/A/D/G/pi"
+
+ # Finally, for 1.8 we allow this copy to succeed.
+ expected_output = svntest.verify.UnorderedOutput([
+ '--- Copying from foreign repository URL \'%s\':\n' % E_url,
+ 'A %s\n' % sbox.ospath('E'),
+ 'A %s\n' % sbox.ospath('E/beta'),
+ 'A %s\n' % sbox.ospath('E/alpha'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'copy', E_url, wc_dir)
+
+ expected_output = [
+ '--- Copying from foreign repository URL \'%s\':\n' % pi_url,
+ 'A %s\n' % sbox.ospath('pi'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'copy', pi_url, wc_dir)
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.add({
+ 'pi' : Item(status='A ', wc_rev='0'),
+ 'E' : Item(status='A ', wc_rev='0'),
+ 'E/beta' : Item(status='A ', wc_rev='0'),
+ 'E/alpha' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Revert everything and verify.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # URL->wc copy:
+ # Copy a directory to a pre-existing WC directory.
+ # The source directory should be copied *under* the target directory.
+ B_url = sbox.repo_url + "/A/B"
+ D_dir = sbox.ospath('A/D')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', B_url, D_dir)
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.add({
+ 'A/D/B' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Validate the mergeinfo of the copy destination (we expect none)
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'propget', SVN_PROP_MERGEINFO,
+ os.path.join(D_dir, 'B'))
+
+#----------------------------------------------------------------------
+# Issue 1084: ra_svn move/copy bug
+@Issue(1084)
+def copy_to_root(sbox):
+ 'copy item to root of repository'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ root = sbox.repo_url
+ mu = root + '/A/mu'
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ '-m', '',
+ mu, root)
+
+ # Update to HEAD, and check to see if the files really were copied in the
+ # repo
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'mu': Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'mu': Item(contents="This is the file 'mu'.\n")
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'mu': Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+@Issue(1367)
+def url_copy_parent_into_child(sbox):
+ "copy URL URL/subdir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ B_url = sbox.repo_url + "/A/B"
+ F_url = sbox.repo_url + "/A/B/F"
+
+ # Issue 1367 parent/child URL-to-URL was rejected.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'cp',
+ '-m', 'a can of worms',
+ B_url, F_url)
+
+ # Do an update to verify the copy worked
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/B' : Item(status='A '),
+ 'A/B/F/B/E' : Item(status='A '),
+ 'A/B/F/B/E/alpha' : Item(status='A '),
+ 'A/B/F/B/E/beta' : Item(status='A '),
+ 'A/B/F/B/F' : Item(status='A '),
+ 'A/B/F/B/lambda' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/F/B' : Item(),
+ 'A/B/F/B/E' : Item(),
+ 'A/B/F/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'A/B/F/B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'A/B/F/B/F' : Item(),
+ 'A/B/F/B/lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/F/B' : Item(status=' ', wc_rev=2),
+ 'A/B/F/B/E' : Item(status=' ', wc_rev=2),
+ 'A/B/F/B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/F/B/E/beta' : Item(status=' ', wc_rev=2),
+ 'A/B/F/B/F' : Item(status=' ', wc_rev=2),
+ 'A/B/F/B/lambda' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+@Issue(1367)
+def wc_copy_parent_into_child(sbox):
+ "copy WC URL/subdir"
+
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ B_url = sbox.repo_url + "/A/B"
+ F_B_url = sbox.repo_url + "/A/B/F/B"
+
+ # Want a smaller WC
+ svntest.main.safe_rmtree(wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ B_url, wc_dir)
+
+ # Issue 1367: A) copying '.' to URL failed with a parent/child
+ # error, and also B) copying root of a working copy attempted to
+ # lock the non-working copy parent directory.
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ svntest.actions.run_and_verify_svn(['Adding copy of .\n',
+ 'Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'cp',
+ '-m', 'a larger can',
+ '.', F_B_url)
+
+ os.chdir(was_cwd)
+
+ # Do an update to verify the copy worked
+ expected_output = svntest.wc.State(wc_dir, {
+ 'F/B' : Item(status='A '),
+ 'F/B/E' : Item(status='A '),
+ 'F/B/E/alpha' : Item(status='A '),
+ 'F/B/E/beta' : Item(status='A '),
+ 'F/B/F' : Item(status='A '),
+ 'F/B/lambda' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ 'F/B' : Item(),
+ 'F/B/E' : Item(),
+ 'F/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'F/B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'F/B/F' : Item(),
+ 'F/B/lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status=' ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ 'lambda' : Item(status=' ', wc_rev=2),
+ 'F/B' : Item(status=' ', wc_rev=2),
+ 'F/B/E' : Item(status=' ', wc_rev=2),
+ 'F/B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'F/B/E/beta' : Item(status=' ', wc_rev=2),
+ 'F/B/F' : Item(status=' ', wc_rev=2),
+ 'F/B/lambda' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Issue 1419: at one point ra_neon->get_uuid() was failing on a
+# non-existent public URL, which prevented us from resurrecting files
+# (svn cp -rOLD URL wc).
+@Issue(1419)
+def resurrect_deleted_file(sbox):
+ "resurrect a deleted file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete a file in the repository via immediate commit
+ rho_url = sbox.repo_url + '/A/D/G/rho'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', rho_url, '-m', 'rev 2')
+
+ # Update the wc to HEAD (r2)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/rho')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/D/G/rho')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # repos->wc copy, to resurrect deleted file.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', rho_url + '@1', wc_dir)
+
+ # status should now show the file scheduled for addition-with-history
+ expected_status.add({
+ 'rho' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#-------------------------------------------------------------
+# Regression tests for Issue #1297:
+# svn diff failed after a repository to WC copy of a single file
+# This test checks just that.
+@Issue(1297)
+def diff_repos_to_wc_copy(sbox):
+ "copy file from repos to working copy and run diff"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ iota_repos_path = sbox.repo_url + '/iota'
+ target_wc_path = sbox.ospath('new_file')
+
+ # Copy a file from the repository to the working copy.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ iota_repos_path, target_wc_path)
+
+ # Run diff.
+ svntest.actions.run_and_verify_svn(None, [], 'diff', wc_dir)
+
+
+#-------------------------------------------------------------
+@Issue(1473)
+def repos_to_wc_copy_eol_keywords(sbox):
+ "repos->WC copy with keyword or eol property set"
+
+ # See issue #1473: repos->wc copy would seg fault if svn:keywords or
+ # svn:eol were set on the copied file, because we'd be querying an
+ # entry for keyword values when the entry was still null (because
+ # not yet been fully installed in the wc).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_repos_path = sbox.repo_url + '/iota'
+ iota_wc_path = sbox.ospath('iota')
+ target_wc_path = sbox.ospath('new_file')
+
+ # Modify iota to make it checkworthy.
+ svntest.main.file_write(iota_wc_path,
+ "Hello\nSubversion\n$LastChangedRevision$\n",
+ "ab")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:eol-style',
+ 'CRLF', iota_wc_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords',
+ 'Rev', iota_wc_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'log msg',
+ wc_dir)
+
+ # Copy a file from the repository to the working copy.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ iota_repos_path, target_wc_path)
+
+ # The original bug was that the copy would seg fault. So we test
+ # that the copy target exists now; if it doesn't, it's probably
+ # because of the segfault. Note that the crash would be independent
+ # of whether there are actually any line breaks or keywords in the
+ # file's contents -- the mere existence of the property would
+ # trigger the bug.
+ if not os.path.exists(target_wc_path):
+ raise svntest.Failure
+
+ # Okay, if we got this far, we might as well make sure that the
+ # translations/substitutions were done correctly:
+ f = open(target_wc_path, "rb")
+ raw_contents = f.read()
+ f.seek(0, 0)
+ line_contents = f.readlines()
+ f.close()
+
+ if re.match(b'[^\\r]\\n', raw_contents):
+ raise svntest.Failure
+
+ if not re.match(b'.*\$LastChangedRevision:\s*\d+\s*\$', line_contents[3]):
+ raise svntest.Failure
+
+#-------------------------------------------------------------
+# Regression test for revision 7331, with commented-out parts for a further
+# similar bug.
+
+def revision_kinds_local_source(sbox):
+ "revision-kind keywords with non-URL source"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+
+ # Make a file with different content in each revision and WC; BASE != HEAD.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'), })
+ svntest.main.file_append(mu_path, "New r2 text.\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+ svntest.main.file_append(mu_path, "New r3 text.\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r2', mu_path)
+ svntest.main.file_append(mu_path, "Working copy.\n")
+
+ r1 = "This is the file 'mu'.\n"
+ r2 = r1 + "New r2 text.\n"
+ r3 = r2 + "New r3 text.\n"
+ rWC = r2 + "Working copy.\n"
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=rWC)
+
+ # Test the various revision-kind keywords, and none.
+ sub_tests = [ ('file0', 2, rWC, None),
+ ('file1', 3, r3, 'HEAD'),
+ ('file2', 2, r2, 'BASE'),
+ # ('file3', 2, r2, 'COMMITTED'),
+ # ('file4', 1, r1, 'PREV'),
+ ]
+
+ for dst, from_rev, text, peg_rev in sub_tests:
+ dst_path = os.path.join(wc_dir, dst)
+ if peg_rev is None:
+ svntest.actions.run_and_verify_svn(None, [], "copy",
+ mu_path, dst_path)
+ else:
+ svntest.actions.run_and_verify_svn(None, [], "copy",
+ mu_path + "@" + peg_rev, dst_path)
+ expected_disk.add({ dst: Item(contents=text) })
+
+ # Check that the copied-from revision == from_rev.
+ exit_code, output, errput = svntest.main.run_svn(None, "info", dst_path)
+ for line in output:
+ if line.rstrip() == "Copied From Rev: " + str(from_rev):
+ break
+ else:
+ logger.warn("%s should have been copied from revision %s" % (dst, from_rev))
+ raise svntest.Failure
+
+ # Check that the new files have the right contents
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+#-------------------------------------------------------------
+# Regression test for issue 1581.
+@Issue(1581)
+def copy_over_missing_file(sbox):
+ "copy over a missing file"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + "/iota"
+
+ # Make the target missing.
+ os.remove(mu_path)
+
+ # Try both wc->wc copy and repos->wc copy, expect failures:
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cp', iota_path, mu_path)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cp', iota_url, mu_path)
+
+ # Make sure that the working copy is not corrupted:
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output = svntest.wc.State(wc_dir, {'A/mu' : Item(verb='Restored')})
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+
+#----------------------------------------------------------------------
+# Regression test for issue 1634
+@Issue(1634)
+def repos_to_wc_1634(sbox):
+ "copy a deleted directory back from the repos"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # First delete a subdirectory and commit.
+ E_path = sbox.ospath('A/B/E')
+ svntest.actions.run_and_verify_svn(None, [], 'delete', E_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now copy the directory back.
+ E_url = sbox.repo_url + "/A/B/E@1"
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', E_url, E_path)
+ expected_status.add({
+ 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Regression test for issue 1814
+@Issue(1814)
+def double_uri_escaping_1814(sbox):
+ "check for double URI escaping in svn ls -R"
+
+ sbox.build(create_wc = False)
+
+ base_url = sbox.repo_url + '/base'
+
+ # rev. 2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'mybase',
+ base_url)
+
+ orig_url = base_url + '/foo%20bar'
+
+ # rev. 3
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'r1',
+ orig_url)
+ orig_rev = 3
+
+ # rev. 4
+ new_url = base_url + '/foo_bar'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', '-m', 'r2',
+ orig_url, new_url)
+
+ # This had failed with ra_neon because "foo bar" would be double-encoded
+ # "foo bar" ==> "foo%20bar" ==> "foo%2520bar"
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ls', ('-r'+str(orig_rev)),
+ '-R', base_url)
+
+
+#----------------------------------------------------------------------
+# Regression test for issues 2404
+@Issue(2404)
+def wc_to_wc_copy_between_different_repos(sbox):
+ "wc to wc copy attempts between different repos"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ sbox2 = sbox.clone_dependent()
+ sbox2.build()
+ wc2_dir = sbox2.wc_dir
+
+ # Attempt a copy between different repositories.
+ exit_code, out, err = svntest.main.run_svn(1, 'cp',
+ os.path.join(wc2_dir, 'A'),
+ sbox.ospath('A/B'))
+ for line in err:
+ if line.find("it is not from repository") != -1:
+ break
+ else:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Regression test for issues 2101, 2020 and 3776
+@Issues(2101,2020,3776)
+def wc_to_wc_copy_deleted(sbox):
+ "wc to wc copy with presence=not-present items"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ B_path = sbox.ospath('A/B')
+ B2_path = sbox.ospath('A/B2')
+
+ # Schedule for delete
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ os.path.join(B_path, 'E', 'alpha'),
+ os.path.join(B_path, 'lambda'),
+ os.path.join(B_path, 'F'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', 'A/B/lambda', 'A/B/F', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Commit to get state not-present
+ expected_status.remove('A/B/E/alpha', 'A/B/lambda', 'A/B/F')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ 'A/B/lambda' : Item(verb='Deleting'),
+ 'A/B/F' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Copy including stuff in state not-present
+ svntest.actions.run_and_verify_svn(None, [], 'copy', B_path, B2_path)
+ expected_status.add({
+ 'A/B2' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/B2/E' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B2/E/beta' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B2/E/alpha' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/B2/lambda' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/B2/F' : Item(status='D ', wc_rev='-', copied='+'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Reverting the copied not-present is a no-op.
+ svntest.main.run_svn(1, 'revert', os.path.join(B2_path, 'F'))
+ svntest.main.run_svn(1, 'revert', os.path.join(B2_path, 'lambda'))
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Revert the entire copy including the schedule not-present bits
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive',
+ B2_path)
+ expected_status.remove('A/B2',
+ 'A/B2/E',
+ 'A/B2/E/beta',
+ 'A/B2/E/alpha',
+ 'A/B2/lambda',
+ 'A/B2/F')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.main.safe_rmtree(B2_path)
+
+ # Copy again and commit
+ svntest.actions.run_and_verify_svn(None, [], 'copy', B_path, B2_path)
+
+ expected_status.add({
+ 'A/B2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B2/lambda' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B2/F' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B2/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B2/E/alpha' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B2/E/beta' : Item(status=' ', copied='+', wc_rev='-')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.remove('A/B2/lambda', 'A/B2/F', 'A/B2/E/alpha')
+ expected_status.tweak('A/B2', 'A/B2/E', 'A/B2/E/beta', status=' ',
+ copied=None, wc_rev=3)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2' : Item(verb='Adding'),
+ 'A/B2/E/alpha' : Item(verb='Deleting'),
+ 'A/B2/lambda' : Item(verb='Deleting'),
+ 'A/B2/F' : Item(verb='Deleting'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Test for copy into a non-existent URL path
+def url_to_non_existent_url_path(sbox):
+ "svn cp src-URL non-existent-URL-path"
+
+ sbox.build(create_wc = False)
+
+ dirURL1 = sbox.repo_url + "/A/B/E"
+ dirURL2 = sbox.repo_url + "/G/C/E/I"
+
+ # Look for both possible versions of the error message, as the DAV
+ # error is worded differently from that of other RA layers.
+ msg = ".*: (" + \
+ "|".join(["Path 'G(/C/E)?' not present",
+ ".*G(/C/E)?' path not found",
+ "File not found.*'/G/C/E/I'",
+ ]) + ")"
+
+ # Expect failure on 'svn cp SRC DST' where one or more ancestor
+ # directories of DST do not exist
+ exit_code, out, err = svntest.main.run_svn(1, 'cp', dirURL1, dirURL2,
+ '-m', 'fooogle')
+ for err_line in err:
+ if re.match (msg, err_line):
+ break
+ else:
+ logger.warn("message \"%s\" not found in error output: %s" % (msg, err))
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# Test for a copying (URL to URL) an old rev of a deleted file in a
+# deleted directory.
+def non_existent_url_to_url(sbox):
+ "svn cp oldrev-of-deleted-URL URL"
+
+ sbox.build(create_wc = False)
+
+ adg_url = sbox.repo_url + '/A/D/G'
+ pi_url = sbox.repo_url + '/A/D/G/pi'
+ new_url = sbox.repo_url + '/newfile'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete',
+ adg_url, '-m', '')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ pi_url + '@1', new_url,
+ '-m', '')
+
+#----------------------------------------------------------------------
+def old_dir_url_to_url(sbox):
+ "test URL to URL copying edge case"
+
+ sbox.build(create_wc = False)
+
+ adg_url = sbox.repo_url + '/A/D/G'
+ pi_url = sbox.repo_url + '/A/D/G/pi'
+ iota_url = sbox.repo_url + '/iota'
+ new_url = sbox.repo_url + '/newfile'
+
+ # Delete a directory
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete',
+ adg_url, '-m', '')
+
+ # Copy a file to where the directory used to be
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ iota_url, adg_url,
+ '-m', '')
+
+ # Try copying a file that was in the deleted directory that is now a
+ # file
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ pi_url + '@1', new_url,
+ '-m', '')
+
+
+
+#----------------------------------------------------------------------
+# Test fix for issue 2224 - copying wc dir to itself causes endless
+# recursion
+@Issue(2224)
+def wc_copy_dir_to_itself(sbox):
+ "copy wc dir to itself"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ dnames = ['A','A/B']
+
+ for dirname in dnames:
+ dir_path = os.path.join(wc_dir, dirname)
+
+ # try to copy dir to itself
+ svntest.actions.run_and_verify_svn([],
+ '.*Cannot copy .* into its own child.*',
+ 'copy', dir_path, dir_path)
+
+
+#----------------------------------------------------------------------
+@Issue(2153)
+def mixed_wc_to_url(sbox):
+ "copy a complex mixed-rev wc"
+
+ # For issue 2153.
+ #
+ # Copy a mixed-revision wc (that also has some uncommitted local
+ # mods, and an entry marked as 'deleted') to a URL. Make sure the
+ # copy gets the uncommitted mods, and does not contain the deleted
+ # file.
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ Z_url = sbox.repo_url + '/A/D/Z'
+ Z2_url = sbox.repo_url + '/A/D/Z2'
+ G_path = sbox.ospath('A/D/G')
+ B_path = sbox.ospath('A/B')
+ X_path = sbox.ospath('A/D/G/X')
+ Y_path = sbox.ospath('A/D/G/Y')
+ E_path = sbox.ospath('A/D/G/X/E')
+ alpha_path = sbox.ospath('A/D/G/X/E/alpha')
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+
+ # Remove A/D/G/pi, then commit that removal.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', pi_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', "Delete pi.", wc_dir)
+
+ # Make a modification to A/D/G/rho, then commit that modification.
+ svntest.main.file_append(rho_path, "\nFirst modification to rho.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', "Modify rho.", wc_dir)
+
+ # Make another modification to A/D/G/rho, but don't commit it.
+ svntest.main.file_append(rho_path, "Second modification to rho.\n")
+
+ # Copy into the source, delete part of the copy, add a non-copied directory
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', B_path, X_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', alpha_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', Y_path)
+
+ # Now copy local A/D/G to create new directory A/D/Z the repository.
+
+ expected_status = svntest.wc.State(G_path, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'X' : Item(status='A ', copied='+', wc_rev='-'),
+ 'X/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/E/alpha' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'Y' : Item(status='A ', wc_rev='-'),
+ 'rho' : Item(status='M ', wc_rev='3'),
+ 'tau' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_status(G_path, expected_status)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Adding copy of %s\n' % sbox.ospath('A/D/G'),
+ 'Adding copy of %s\n' % sbox.ospath('A/D/G/X'),
+ 'Deleting copy of %s\n' % sbox.ospath('A/D/G/X/E/alpha'),
+ 'Adding copy of %s\n' % sbox.ospath('A/D/G/Y'),
+ 'Deleting copy of %s\n' % sbox.ospath('A/D/G/pi'),
+ 'Replacing copy of %s\n' % sbox.ospath('A/D/G/rho'),
+ 'Transmitting file data .done\n',
+ 'Committing transaction...\n',
+ 'Committed revision 4.\n',
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'cp', '-m', "Make a copy.",
+ G_path, Z_url)
+ expected_output = svntest.verify.UnorderedOutput([
+ 'A + A/D/Z/\n',
+ ' (from A/D/G/:r1)\n',
+ 'A + A/D/Z/X/\n',
+ ' (from A/B/:r1)\n',
+ 'D A/D/Z/X/E/alpha\n',
+ 'A A/D/Z/Y/\n',
+ 'D A/D/Z/pi\n',
+ 'D A/D/Z/rho\n',
+ 'A + A/D/Z/rho\n',
+ ' (from A/D/G/rho:r3)\n',
+ ])
+ svntest.actions.run_and_verify_svnlook(expected_output, [],
+ 'changed', sbox.repo_dir,
+ '--copy-info')
+
+ # Copy from copied source
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', "Make a copy.",
+ E_path, Z2_url)
+ expected_output = svntest.verify.UnorderedOutput([
+ 'A + A/D/Z2/\n',
+ ' (from A/B/E/:r1)\n',
+ 'D A/D/Z2/alpha\n',
+ ])
+ svntest.actions.run_and_verify_svnlook(expected_output, [],
+ 'changed', sbox.repo_dir,
+ '--copy-info')
+
+ # Check out A/D/Z. If it has pi, that's a bug; or if its rho does
+ # not have the second local mod, that's also a bug.
+ svntest.main.safe_rmtree(wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', Z_url, wc_dir)
+
+ if os.path.exists(sbox.ospath('pi')):
+ raise svntest.Failure("Path 'pi' exists but should be gone.")
+
+ fp = open(sbox.ospath('rho'), 'r')
+ found_it = 0
+ for line in fp.readlines():
+ if re.match("^Second modification to rho.", line):
+ found_it = 1
+ if not found_it:
+ raise svntest.Failure("The second modification to rho didn't make it.")
+
+
+#----------------------------------------------------------------------
+
+# Issue 845 and 1516: WC replacement of files requires
+# a second text-base and prop-base
+@Issues(845,1516)
+def wc_copy_replacement(sbox):
+ "svn cp PATH PATH replace file"
+
+ copy_replace(sbox, 1)
+
+def wc_copy_replace_with_props(sbox):
+ "svn cp PATH PATH replace file with props"
+
+ copy_replace_with_props(sbox, 1)
+
+
+def repos_to_wc_copy_replacement(sbox):
+ "svn cp URL PATH replace file"
+
+ copy_replace(sbox, 0)
+
+def repos_to_wc_copy_replace_with_props(sbox):
+ "svn cp URL PATH replace file with props"
+
+ copy_replace_with_props(sbox, 0)
+
+# See also delete_replace_delete() which does the same for a directory.
+def delete_replaced_file(sbox):
+ "delete a file scheduled for replacement"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion.
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ # Status before attempting copies
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Copy 'pi' over 'rho' with history.
+ pi_src = sbox.ospath('A/D/G/pi')
+ svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path)
+
+ # Check that file copied.
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now delete replaced file.
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ '--force', rho_path)
+
+ # Verify status after deletion.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@Issue(2436)
+def mv_unversioned_file(sbox):
+ "move an unversioned file"
+ # Issue #2436: Attempting to move an unversioned file would seg fault.
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ unver_path_1 = sbox.ospath('unversioned1')
+ dest_path_1 = sbox.ospath('dest')
+ svntest.main.file_append(unver_path_1, "an unversioned file")
+
+ unver_path_2 = sbox.ospath('A/unversioned2')
+ dest_path_2 = sbox.ospath('A/dest_forced')
+ svntest.main.file_append(unver_path_2, "another unversioned file")
+
+ # Try to move an unversioned file.
+ svntest.actions.run_and_verify_svn(None,
+ ".*unversioned1' " +
+ "(does not exist|is not under version control)",
+ 'mv', unver_path_1, dest_path_1)
+
+ # Try to forcibly move an unversioned file.
+ svntest.actions.run_and_verify_svn(None,
+ ".*unversioned2.* " +
+ "(does not exist|is not under version control)",
+ 'mv',
+ unver_path_2, dest_path_2)
+
+@Issue(2435)
+def force_move(sbox):
+ "'move' should not lose local mods"
+ # Issue #2435: 'svn move' / 'svn mv' can lose local modifications.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ # modify the content
+ file_handle = open(file_path, "a")
+ file_handle.write("Added contents\n")
+ file_handle.close()
+ expected_file_content = [ "This is the file 'iota'.\n",
+ "Added contents\n",
+ ]
+
+ # check for the new content
+ file_handle = open(file_path, "r")
+ modified_file_content = file_handle.readlines()
+ file_handle.close()
+ if modified_file_content != expected_file_content:
+ raise svntest.Failure("Test setup failed. Incorrect file contents.")
+
+ # force move the file
+ move_output = [ "A dest\n",
+ "D iota\n",
+ ]
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ svntest.actions.run_and_verify_svn(move_output,
+ [],
+ 'move',
+ file_name, "dest")
+ os.chdir(was_cwd)
+
+ # check for the new content
+ file_handle = open(sbox.ospath('dest'), "r")
+ modified_file_content = file_handle.readlines()
+ file_handle.close()
+ # Error if we dont find the modified contents...
+ if modified_file_content != expected_file_content:
+ raise svntest.Failure("File modifications were lost on 'move'")
+
+ # Commit the move and make sure the new content actually reaches
+ # the repository.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota': Item(verb='Deleting'),
+ 'dest': Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove("iota")
+ expected_status.add({
+ 'dest': Item(status=' ', wc_rev='2'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ svntest.actions.run_and_verify_svn(expected_file_content, [],
+ 'cat',
+ sbox.repo_url + '/dest')
+
+
+def copy_copied_file_and_dir(sbox):
+ "copy a copied file and dir"
+ # Improve support for copy and move
+ # Allow copy of copied items without a commit between
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ rho_path = sbox.ospath('A/D/G/rho')
+ rho_copy_path_1 = sbox.ospath('A/D/rho_copy_1')
+ rho_copy_path_2 = sbox.ospath('A/B/F/rho_copy_2')
+
+ # Copy A/D/G/rho to A/D/rho_copy_1
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ rho_path, rho_copy_path_1)
+
+ # Copy the copied file: A/D/rho_copy_1 to A/B/F/rho_copy_2
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ rho_copy_path_1, rho_copy_path_2)
+
+ E_path = sbox.ospath('A/B/E')
+ E_path_copy_1 = sbox.ospath('A/B/F/E_copy_1')
+ E_path_copy_2 = sbox.ospath('A/D/G/E_copy_2')
+
+ # Copy A/B/E to A/B/F/E_copy_1
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ E_path, E_path_copy_1)
+
+ # Copy the copied dir: A/B/F/E_copy_1 to A/D/G/E_copy_2
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ E_path_copy_1, E_path_copy_2)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/rho_copy_1' : Item(verb='Adding'),
+ 'A/B/F/rho_copy_2' : Item(verb='Adding'),
+ 'A/B/F/E_copy_1/' : Item(verb='Adding'),
+ 'A/D/G/E_copy_2/' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/rho_copy_1' : Item(status=' ', wc_rev=2),
+ 'A/B/F/rho_copy_2' : Item(status=' ', wc_rev=2),
+ 'A/B/F/E_copy_1' : Item(status=' ', wc_rev=2),
+ 'A/B/F/E_copy_1/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/F/E_copy_1/beta' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_2' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_2/alpha' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_2/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_copied_file_and_dir(sbox):
+ "move a copied file and dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ rho_path = sbox.ospath('A/D/G/rho')
+ rho_copy_path = sbox.ospath('A/D/rho_copy')
+ rho_copy_move_path = sbox.ospath('A/B/F/rho_copy_moved')
+
+ # Copy A/D/G/rho to A/D/rho_copy
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ rho_path, rho_copy_path)
+
+ # Move the copied file: A/D/rho_copy to A/B/F/rho_copy_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ rho_copy_path, rho_copy_move_path)
+
+ E_path = sbox.ospath('A/B/E')
+ E_path_copy = sbox.ospath('A/B/F/E_copy')
+ E_path_copy_move = sbox.ospath('A/D/G/E_copy_moved')
+
+ # Copy A/B/E to A/B/F/E_copy
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ E_path, E_path_copy)
+
+ # Move the copied file: A/B/F/E_copy to A/D/G/E_copy_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ E_path_copy, E_path_copy_move)
+
+ # Created expected output tree for 'svn ci':
+ # Since we are moving items that were only *scheduled* for addition
+ # we expect only to additions when checking in, rather than a
+ # deletion/addition pair.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/rho_copy_moved' : Item(verb='Adding'),
+ 'A/D/G/E_copy_moved/' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/rho_copy_moved' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_moved' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_moved/alpha' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_copy_moved/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_moved_file_and_dir(sbox):
+ "move a moved file and dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ rho_path = sbox.ospath('A/D/G/rho')
+ rho_move_path = sbox.ospath('A/D/rho_moved')
+ rho_move_moved_path = sbox.ospath('A/B/F/rho_move_moved')
+
+ # Move A/D/G/rho to A/D/rho_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ rho_path, rho_move_path)
+
+ # Move the moved file: A/D/rho_moved to A/B/F/rho_move_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ rho_move_path, rho_move_moved_path)
+
+ E_path = sbox.ospath('A/B/E')
+ E_path_moved = sbox.ospath('A/B/F/E_moved')
+ E_path_move_moved = sbox.ospath('A/D/G/E_move_moved')
+
+ # Copy A/B/E to A/B/F/E_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ E_path, E_path_moved)
+
+ # Move the moved file: A/B/F/E_moved to A/D/G/E_move_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ E_path_moved, E_path_move_moved)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Deleting'),
+ 'A/D/G/E_move_moved/' : Item(verb='Adding'),
+ 'A/D/G/rho' : Item(verb='Deleting'),
+ 'A/B/F/rho_move_moved' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/G/E_move_moved' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_move_moved/alpha' : Item(status=' ', wc_rev=2),
+ 'A/D/G/E_move_moved/beta' : Item(status=' ', wc_rev=2),
+ 'A/B/F/rho_move_moved' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/D/G/rho')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_file_within_moved_dir(sbox):
+ "move a file twice within a moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ D_path_moved = sbox.ospath('A/B/F/D_moved')
+
+ # Move A/B/D to A/B/F/D_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ D_path, D_path_moved)
+
+ chi_path = sbox.ospath('A/B/F/D_moved/H/chi')
+ chi_moved_path = os.path.join(wc_dir, 'A', 'B', 'F', 'D_moved',
+ 'H', 'chi_moved')
+ chi_moved_again_path = os.path.join(wc_dir, 'A', 'B', 'F',
+ 'D_moved', 'H', 'chi_moved_again')
+
+ # Move A/B/F/D_moved/H/chi to A/B/F/D_moved/H/chi_moved
+ # then move that to A/B/F/D_moved/H/chi_moved_again
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ chi_path, chi_moved_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ chi_moved_path,
+ chi_moved_again_path)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/D_moved/' : Item(verb='Adding'),
+ 'A/B/F/D_moved/H/chi' : Item(verb='Deleting'),
+ 'A/B/F/D_moved/H/chi_moved_again' : Item(verb='Adding'),
+ 'A/D' : Item(verb='Deleting'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/D_moved' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/gamma' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/tau' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H/omega' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H/psi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H/chi_moved_again' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/D',
+ 'A/D/gamma',
+ 'A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ )
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_file_out_of_moved_dir(sbox):
+ "move a file out of a moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ D_path_moved = sbox.ospath('A/B/F/D_moved')
+
+ # Move A/B/D to A/B/F/D_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ D_path, D_path_moved)
+
+ chi_path = sbox.ospath('A/B/F/D_moved/H/chi')
+ chi_moved_path = os.path.join(wc_dir, 'A', 'B', 'F', 'D_moved',
+ 'H', 'chi_moved')
+ chi_moved_again_path = sbox.ospath('A/C/chi_moved_again')
+
+ # Move A/B/F/D_moved/H/chi to A/B/F/D_moved/H/chi_moved
+ # then move that to A/C/chi_moved_again
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ chi_path, chi_moved_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ chi_moved_path,
+ chi_moved_again_path)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/D_moved/' : Item(verb='Adding'),
+ 'A/B/F/D_moved/H/chi' : Item(verb='Deleting'),
+ 'A/C/chi_moved_again' : Item(verb='Adding'),
+ 'A/D' : Item(verb='Deleting'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/D_moved' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/gamma' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/tau' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H/omega' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/chi_moved_again' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/D',
+ 'A/D/gamma',
+ 'A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ )
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_dir_within_moved_dir(sbox):
+ "move a dir twice within a moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ D_path_moved = sbox.ospath('A/B/F/D_moved')
+
+ # Move A/D to A/B/F/D_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ D_path, D_path_moved)
+
+ H_path = sbox.ospath('A/B/F/D_moved/H')
+ H_moved_path = sbox.ospath('A/B/F/D_moved/H_moved')
+ H_moved_again_path = os.path.join(wc_dir, 'A', 'B', 'F',
+ 'D_moved', 'H_moved_again')
+
+ # Move A/B/F/D_moved/H to A/B/F/D_moved/H_moved
+ # then move that to A/B/F/D_moved/H_moved_again
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ H_path, H_moved_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ H_moved_path,
+ H_moved_again_path)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Deleting'),
+ 'A/B/F/D_moved' : Item(verb='Adding'),
+ 'A/B/F/D_moved/H' : Item(verb='Deleting'),
+ 'A/B/F/D_moved/H_moved_again' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/D_moved' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/gamma' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/tau' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H_moved_again' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H_moved_again/omega' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H_moved_again/psi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/H_moved_again/chi' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/D',
+ 'A/D/gamma',
+ 'A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ )
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+def move_dir_out_of_moved_dir(sbox):
+ "move a dir out of a moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ D_path_moved = sbox.ospath('A/B/F/D_moved')
+
+ # Move A/D to A/B/F/D_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ D_path, D_path_moved)
+
+ H_path = sbox.ospath('A/B/F/D_moved/H')
+ H_moved_path = sbox.ospath('A/B/F/D_moved/H_moved')
+ H_moved_again_path = sbox.ospath('A/C/H_moved_again')
+
+ # Move A/B/F/D_moved/H to A/B/F/D_moved/H_moved
+ # then move that to A/C/H_moved_again
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ H_path, H_moved_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ H_moved_path,
+ H_moved_again_path)
+
+ # Created expected output tree for 'svn ci':
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Deleting'),
+ 'A/B/F/D_moved' : Item(verb='Adding'),
+ 'A/B/F/D_moved/H' : Item(verb='Deleting'),
+ 'A/C/H_moved_again' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/D_moved' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/gamma' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/rho' : Item(status=' ', wc_rev=2),
+ 'A/B/F/D_moved/G/tau' : Item(status=' ', wc_rev=2),
+ 'A/C/H_moved_again' : Item(status=' ', wc_rev=2),
+ 'A/C/H_moved_again/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/H_moved_again/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/H_moved_again/chi' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_status.remove('A/D',
+ 'A/D/gamma',
+ 'A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ )
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+# Includes regression testing for issue #3429 ("svn mv A B; svn mv B A"
+# generates replace without history).
+@Issue(3429)
+def move_file_back_and_forth(sbox):
+ "move a moved file back to original location"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ rho_path = sbox.ospath('A/D/G/rho')
+ rho_move_path = sbox.ospath('A/D/rho_moved')
+
+ # Move A/D/G/rho away from and then back to its original path
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ rho_path, rho_move_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ rho_move_path, rho_path)
+
+ # Check expected status before commit
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Try to commit and find out that there is nothing to commit.
+ expected_output = []
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+# Includes regression testing for issue #3429 ("svn mv A B; svn mv B A"
+# generates replace without history).
+@Issue(3429)
+def move_dir_back_and_forth(sbox):
+ "move a moved dir back to original location"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ D_move_path = sbox.ospath('D_moved')
+
+ # Move A/D to D_moved
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ D_path, D_move_path)
+
+ # Move the moved dir: D_moved back to its starting
+ # location at A/D.
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv', D_move_path, D_path)
+
+ # Verify that the status indicates a replace with history
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def copy_move_added_paths(sbox):
+ "copy and move added paths without commits"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new file and schedule it for addition
+ upsilon_path = sbox.ospath('A/D/upsilon')
+ svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path)
+
+ # Create a dir with children and schedule it for addition
+ I_path = sbox.ospath('A/D/I')
+ J_path = os.path.join(I_path, 'J')
+ eta_path = os.path.join(I_path, 'eta')
+ theta_path = os.path.join(I_path, 'theta')
+ kappa_path = os.path.join(J_path, 'kappa')
+ os.mkdir(I_path)
+ os.mkdir(J_path)
+ svntest.main.file_write(eta_path, "This is the file 'eta'\n")
+ svntest.main.file_write(theta_path, "This is the file 'theta'\n")
+ svntest.main.file_write(kappa_path, "This is the file 'kappa'\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', I_path)
+
+ # Create another dir and schedule it for addition
+ K_path = sbox.ospath('K')
+ os.mkdir(K_path)
+ svntest.actions.run_and_verify_svn(None, [], 'add', K_path)
+
+ # Verify all the adds took place correctly.
+ expected_status_after_adds = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status_after_adds.add({
+ 'A/D/I' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/eta' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/J' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/J/kappa' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/theta' : Item(status='A ', wc_rev='0'),
+ 'A/D/upsilon' : Item(status='A ', wc_rev='0'),
+ 'K' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status_after_adds)
+
+ # Scatter some unversioned paths within the added dir I.
+ unversioned_path_1 = os.path.join(I_path, 'unversioned1')
+ unversioned_path_2 = os.path.join(J_path, 'unversioned2')
+ L_path = os.path.join(I_path, "L_UNVERSIONED")
+ unversioned_path_3 = os.path.join(L_path, 'unversioned3')
+ svntest.main.file_write(unversioned_path_1, "An unversioned file\n")
+ svntest.main.file_write(unversioned_path_2, "An unversioned file\n")
+ os.mkdir(L_path)
+ svntest.main.file_write(unversioned_path_3, "An unversioned file\n")
+
+ # Copy added dir A/D/I to added dir K/I
+ I_copy_path = os.path.join(K_path, 'I')
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ I_path, I_copy_path)
+
+ # Copy added file A/D/upsilon into added dir K
+ upsilon_copy_path = os.path.join(K_path, 'upsilon')
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ upsilon_path, upsilon_copy_path)
+
+ # Move added file A/D/upsilon to upsilon,
+ # then move it again to A/upsilon
+ upsilon_move_path = sbox.ospath('upsilon')
+ upsilon_move_path_2 = sbox.ospath('A/upsilon')
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ upsilon_path, upsilon_move_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ upsilon_move_path, upsilon_move_path_2)
+
+ # Move added dir A/D/I to A/B/I,
+ # then move it again to A/D/H/I
+ I_move_path = sbox.ospath('A/B/I')
+ I_move_path_2 = sbox.ospath('A/D/H/I')
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ I_path, I_move_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ I_move_path, I_move_path_2)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/I' : Item(verb='Adding'),
+ 'A/D/H/I/J' : Item(verb='Adding'),
+ 'A/D/H/I/J/kappa' : Item(verb='Adding'),
+ 'A/D/H/I/eta' : Item(verb='Adding'),
+ 'A/D/H/I/theta' : Item(verb='Adding'),
+ 'A/upsilon' : Item(verb='Adding'),
+ 'K' : Item(verb='Adding'),
+ 'K/I' : Item(verb='Adding'),
+ 'K/I/J' : Item(verb='Adding'),
+ 'K/I/J/kappa' : Item(verb='Adding'),
+ 'K/I/eta' : Item(verb='Adding'),
+ 'K/I/theta' : Item(verb='Adding'),
+ 'K/upsilon' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J/kappa' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/eta' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/theta' : Item(status=' ', wc_rev=2),
+ 'A/upsilon' : Item(status=' ', wc_rev=2),
+ 'K' : Item(status=' ', wc_rev=2),
+ 'K/I' : Item(status=' ', wc_rev=2),
+ 'K/I/J' : Item(status=' ', wc_rev=2),
+ 'K/I/J/kappa' : Item(status=' ', wc_rev=2),
+ 'K/I/eta' : Item(status=' ', wc_rev=2),
+ 'K/I/theta' : Item(status=' ', wc_rev=2),
+ 'K/upsilon' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Run_and_verify_commit() doesn't handle status of unversioned paths
+ # so manually confirm unversioned paths got copied and moved too.
+ unversioned_paths = [
+ sbox.ospath('A/D/H/I/unversioned1'),
+ sbox.ospath('A/D/H/I/L_UNVERSIONED'),
+ os.path.join(wc_dir, 'A', 'D', 'H', 'I', 'L_UNVERSIONED',
+ 'unversioned3'),
+ sbox.ospath('A/D/H/I/J/unversioned2'),
+ sbox.ospath('K/I/unversioned1'),
+ sbox.ospath('K/I/L_UNVERSIONED'),
+ sbox.ospath('K/I/L_UNVERSIONED/unversioned3'),
+ sbox.ospath('K/I/J/unversioned2')]
+ for path in unversioned_paths:
+ if not os.path.exists(path):
+ raise svntest.Failure("Unversioned path '%s' not found." % path)
+
+def copy_added_paths_with_props(sbox):
+ "copy added uncommitted paths with props"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new file, schedule it for addition and set properties
+ upsilon_path = sbox.ospath('A/D/upsilon')
+ svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path)
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'foo', 'bar', upsilon_path)
+
+ # Create a dir and schedule it for addition and set properties
+ I_path = sbox.ospath('A/D/I')
+ os.mkdir(I_path)
+ svntest.actions.run_and_verify_svn(None, [], 'add', I_path)
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'foo', 'bar', I_path)
+
+ # Verify all the adds took place correctly.
+ expected_status_after_adds = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status_after_adds.add({
+ 'A/D/upsilon' : Item(status='A ', wc_rev='0'),
+ 'A/D/I' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status_after_adds)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/D/upsilon' : Item(props={'foo' : 'bar'},
+ contents="This is the file 'upsilon'\n"),
+ 'A/D/I' : Item(props={'foo' : 'bar'}),
+ })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Copy added dir I to dir A/C
+ I_copy_path = sbox.ospath('A/C/I')
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ I_path, I_copy_path)
+
+ # Copy added file A/upsilon into dir A/C
+ upsilon_copy_path = sbox.ospath('A/C/upsilon')
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ upsilon_path, upsilon_copy_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/upsilon' : Item(verb='Adding'),
+ 'A/D/I' : Item(verb='Adding'),
+ 'A/C/upsilon' : Item(verb='Adding'),
+ 'A/C/I' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/D/I' : Item(status=' ', wc_rev=2),
+ 'A/C/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/I' : Item(status=' ', wc_rev=2),
+ })
+
+ # Tweak expected disk tree
+ expected_disk.add({
+ 'A/C/upsilon' : Item(props={ 'foo' : 'bar'},
+ contents="This is the file 'upsilon'\n"),
+ 'A/C/I' : Item(props={ 'foo' : 'bar'}),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+def copy_added_paths_to_URL(sbox):
+ "copy added path to URL"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new file and schedule it for addition
+ upsilon_path = sbox.ospath('A/D/upsilon')
+ svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path)
+
+ # Create a dir with children and schedule it for addition
+ I_path = sbox.ospath('A/D/I')
+ J_path = os.path.join(I_path, 'J')
+ eta_path = os.path.join(I_path, 'eta')
+ theta_path = os.path.join(I_path, 'theta')
+ kappa_path = os.path.join(J_path, 'kappa')
+ os.mkdir(I_path)
+ os.mkdir(J_path)
+ svntest.main.file_write(eta_path, "This is the file 'eta'\n")
+ svntest.main.file_write(theta_path, "This is the file 'theta'\n")
+ svntest.main.file_write(kappa_path, "This is the file 'kappa'\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', I_path)
+
+ # Verify all the adds took place correctly.
+ expected_status_after_adds = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status_after_adds.add({
+ 'A/D/I' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/eta' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/J' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/J/kappa' : Item(status='A ', wc_rev='0'),
+ 'A/D/I/theta' : Item(status='A ', wc_rev='0'),
+ 'A/D/upsilon' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status_after_adds)
+
+ # Scatter some unversioned paths within the added dir I.
+ # These don't get copied in a WC->URL copy obviously.
+ unversioned_path_1 = os.path.join(I_path, 'unversioned1')
+ unversioned_path_2 = os.path.join(J_path, 'unversioned2')
+ L_path = os.path.join(I_path, "L_UNVERSIONED")
+ unversioned_path_3 = os.path.join(L_path, 'unversioned3')
+ svntest.main.file_write(unversioned_path_1, "An unversioned file\n")
+ svntest.main.file_write(unversioned_path_2, "An unversioned file\n")
+ os.mkdir(L_path)
+ svntest.main.file_write(unversioned_path_3, "An unversioned file\n")
+
+ # Copy added file A/D/upsilon to URL://A/C/upsilon
+ upsilon_copy_URL = sbox.repo_url + '/A/C/upsilon'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', '',
+ upsilon_path, upsilon_copy_URL)
+
+ # Validate the mergeinfo of the copy destination (we expect none).
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'propget',
+ SVN_PROP_MERGEINFO, upsilon_copy_URL)
+
+ # Copy added dir A/D/I to URL://A/D/G/I
+ I_copy_URL = sbox.repo_url + '/A/D/G/I'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', '',
+ I_path, I_copy_URL)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/I' : Item(verb='Adding'),
+ 'A/D/I/J' : Item(verb='Adding'),
+ 'A/D/I/J/kappa' : Item(verb='Adding'),
+ 'A/D/I/eta' : Item(verb='Adding'),
+ 'A/D/I/theta' : Item(verb='Adding'),
+ 'A/D/upsilon' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/I' : Item(status=' ', wc_rev=4),
+ 'A/D/I/J' : Item(status=' ', wc_rev=4),
+ 'A/D/I/J/kappa' : Item(status=' ', wc_rev=4),
+ 'A/D/I/eta' : Item(status=' ', wc_rev=4),
+ 'A/D/I/theta' : Item(status=' ', wc_rev=4),
+ 'A/D/upsilon' : Item(status=' ', wc_rev=4),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Created expected output for update
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/I' : Item(status='A '),
+ 'A/D/G/I/theta' : Item(status='A '),
+ 'A/D/G/I/J' : Item(status='A '),
+ 'A/D/G/I/J/kappa' : Item(status='A '),
+ 'A/D/G/I/eta' : Item(status='A '),
+ 'A/C/upsilon' : Item(status='A '),
+ })
+
+ # Created expected disk for update
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/D/G/I' : Item(),
+ 'A/D/G/I/theta' : Item("This is the file 'theta'\n"),
+ 'A/D/G/I/J' : Item(),
+ 'A/D/G/I/J/kappa' : Item("This is the file 'kappa'\n"),
+ 'A/D/G/I/eta' : Item("This is the file 'eta'\n"),
+ 'A/C/upsilon' : Item("This is the file 'upsilon'\n"),
+ 'A/D/I' : Item(),
+ 'A/D/I/J' : Item(),
+ 'A/D/I/J/kappa' : Item("This is the file 'kappa'\n"),
+ 'A/D/I/eta' : Item("This is the file 'eta'\n"),
+ 'A/D/I/theta' : Item("This is the file 'theta'\n"),
+ 'A/D/upsilon' : Item("This is the file 'upsilon'\n"),
+ 'A/D/I/L_UNVERSIONED/unversioned3' : Item("An unversioned file\n"),
+ 'A/D/I/L_UNVERSIONED' : Item(),
+ 'A/D/I/unversioned1' : Item("An unversioned file\n"),
+ 'A/D/I/J/unversioned2' : Item("An unversioned file\n"),
+ })
+
+ # Some more changes to the expected_status to reflect post update WC
+ expected_status.tweak(wc_rev=4)
+ expected_status.add({
+ 'A/C' : Item(status=' ', wc_rev=4),
+ 'A/C/upsilon' : Item(status=' ', wc_rev=4),
+ 'A/D/G' : Item(status=' ', wc_rev=4),
+ 'A/D/G/I' : Item(status=' ', wc_rev=4),
+ 'A/D/G/I/theta' : Item(status=' ', wc_rev=4),
+ 'A/D/G/I/J' : Item(status=' ', wc_rev=4),
+ 'A/D/G/I/J/kappa' : Item(status=' ', wc_rev=4),
+ 'A/D/G/I/eta' : Item(status=' ', wc_rev=4),
+ })
+
+ # Update WC, the WC->URL copies above should be added
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+# Issue #1869.
+@Issue(1869)
+def move_to_relative_paths(sbox):
+ "move file using relative dst path names"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ E_path = sbox.ospath('A/B/E')
+ rel_path = os.path.join('..', '..', '..')
+
+ current_dir = os.getcwd()
+ os.chdir(E_path)
+ svntest.main.run_svn(None, 'mv', 'beta', rel_path)
+ os.chdir(current_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'beta' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E/beta'),
+ 'A/B/E/beta' : Item(status='D ', wc_rev='1', moved_to='beta')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+def move_from_relative_paths(sbox):
+ "move file using relative src path names"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ F_path = sbox.ospath('A/B/F')
+ beta_rel_path = os.path.join('..', 'E', 'beta')
+
+ current_dir = os.getcwd()
+ os.chdir(F_path)
+ svntest.main.run_svn(None, 'mv', beta_rel_path, '.')
+ os.chdir(current_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/beta' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E/beta'),
+ 'A/B/E/beta' : Item(status='D ', wc_rev='1', moved_to='A/B/F/beta')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+def copy_to_relative_paths(sbox):
+ "copy file using relative dst path names"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ E_path = sbox.ospath('A/B/E')
+ rel_path = os.path.join('..', '..', '..')
+
+ current_dir = os.getcwd()
+ os.chdir(E_path)
+ svntest.main.run_svn(None, 'cp', 'beta', rel_path)
+ os.chdir(current_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'beta' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+def copy_from_relative_paths(sbox):
+ "copy file using relative src path names"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ F_path = sbox.ospath('A/B/F')
+ beta_rel_path = os.path.join('..', 'E', 'beta')
+
+ current_dir = os.getcwd()
+ os.chdir(F_path)
+ svntest.main.run_svn(None, 'cp', beta_rel_path, '.')
+ os.chdir(current_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/beta' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+
+# Test moving multiple files within a wc.
+
+def move_multiple_wc(sbox):
+ "svn mv multiple files to a common directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_path = sbox.ospath('A/D/H/chi')
+ psi_path = sbox.ospath('A/D/H/psi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ E_path = sbox.ospath('A/B/E')
+ C_path = sbox.ospath('A/C')
+
+ # Move chi, psi, omega and E to A/C
+ svntest.actions.run_and_verify_svn(None, [], 'mv', chi_path, psi_path,
+ omega_path, E_path, C_path)
+
+ # Create expected output
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(verb='Adding'),
+ 'A/C/psi' : Item(verb='Adding'),
+ 'A/C/omega' : Item(verb='Adding'),
+ 'A/C/E' : Item(verb='Adding'),
+ 'A/D/H/chi' : Item(verb='Deleting'),
+ 'A/D/H/psi' : Item(verb='Deleting'),
+ 'A/D/H/omega' : Item(verb='Deleting'),
+ 'A/B/E' : Item(verb='Deleting'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Add the moved files
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=2),
+ 'A/C/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/E' : Item(status=' ', wc_rev=2),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Removed the moved files
+ expected_status.remove('A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/E')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test copying multiple files within a wc.
+
+def copy_multiple_wc(sbox):
+ "svn cp multiple files to a common directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_path = sbox.ospath('A/D/H/chi')
+ psi_path = sbox.ospath('A/D/H/psi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ E_path = sbox.ospath('A/B/E')
+ C_path = sbox.ospath('A/C')
+
+ # Copy chi, psi, omega and E to A/C
+ svntest.actions.run_and_verify_svn(None, [], 'cp', chi_path, psi_path,
+ omega_path, E_path, C_path)
+
+ # Create expected output
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(verb='Adding'),
+ 'A/C/psi' : Item(verb='Adding'),
+ 'A/C/omega' : Item(verb='Adding'),
+ 'A/C/E' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Add the moved files
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=2),
+ 'A/C/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/E' : Item(status=' ', wc_rev=2),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test moving multiple files within a repo.
+
+def move_multiple_repo(sbox):
+ "move multiple files within a repo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_url = sbox.repo_url + '/A/D/H/chi'
+ psi_url = sbox.repo_url + '/A/D/H/psi'
+ omega_url = sbox.repo_url + '/A/D/H/omega'
+ E_url = sbox.repo_url + '/A/B/E'
+ C_url = sbox.repo_url + '/A/C'
+
+ # Move three files and a directory in the repo to a different location
+ # in the repo
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ chi_url, psi_url, omega_url, E_url, C_url,
+ '-m', 'logmsg')
+
+ # Update to HEAD, and check to see if the files really moved in the repo
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(status='A '),
+ 'A/C/psi' : Item(status='A '),
+ 'A/C/omega' : Item(status='A '),
+ 'A/C/E' : Item(status='A '),
+ 'A/C/E/alpha' : Item(status='A '),
+ 'A/C/E/beta' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D '),
+ 'A/B/E' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/C/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A/C/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A/C/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A/C/E' : Item(),
+ 'A/C/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/C/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/E')
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=2),
+ 'A/C/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/E' : Item(status=' ', wc_rev=2),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test copying multiple files within a repo.
+
+def copy_multiple_repo(sbox):
+ "copy multiple files within a repo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_url = sbox.repo_url + '/A/D/H/chi'
+ psi_url = sbox.repo_url + '/A/D/H/psi'
+ omega_url = sbox.repo_url + '/A/D/H/omega'
+ E_url = sbox.repo_url + '/A/B/E'
+ C_url = sbox.repo_url + '/A/C'
+
+ # Copy three files and a directory in the repo to a different location
+ # in the repo
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ chi_url, psi_url, omega_url, E_url, C_url,
+ '-m', 'logmsg')
+
+ # Update to HEAD, and check to see if the files really moved in the repo
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(status='A '),
+ 'A/C/psi' : Item(status='A '),
+ 'A/C/omega' : Item(status='A '),
+ 'A/C/E' : Item(status='A '),
+ 'A/C/E/alpha' : Item(status='A '),
+ 'A/C/E/beta' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A/C/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A/C/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A/C/E' : Item(),
+ 'A/C/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/C/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=2),
+ 'A/C/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/E' : Item(status=' ', wc_rev=2),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test moving copying multiple files from a repo to a wc
+@Issue(2955)
+def copy_multiple_repo_wc(sbox):
+ "copy multiple files from a repo to a wc"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_url = sbox.repo_url + '/A/D/H/chi'
+ psi_url = sbox.repo_url + '/A/D/H/psi'
+ omega_with_space_url = sbox.repo_url + '/A/D/H/omega 2'
+ E_url = sbox.repo_url + '/A/B/E'
+ C_path = sbox.ospath('A/C')
+
+ # We need this in order to check that we don't end up with URI-encoded
+ # paths in the WC (issue #2955)
+ svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'log_msg',
+ sbox.repo_url + '/A/D/H/omega',
+ omega_with_space_url)
+
+ # Perform the copy and check the output
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ chi_url, psi_url, omega_with_space_url,
+ E_url, C_path)
+
+ # Commit the changes, and verify the content actually got copied
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(verb='Adding'),
+ 'A/C/psi' : Item(verb='Adding'),
+ 'A/C/omega 2' : Item(verb='Adding'),
+ 'A/C/E' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=3),
+ 'A/C/psi' : Item(status=' ', wc_rev=3),
+ 'A/C/omega 2' : Item(status=' ', wc_rev=3),
+ 'A/C/E' : Item(status=' ', wc_rev=3),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=3),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test moving copying multiple files from a wc to a repo
+
+def copy_multiple_wc_repo(sbox):
+ "copy multiple files from a wc to a repo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_path = sbox.ospath('A/D/H/chi')
+ psi_path = sbox.ospath('A/D/H/psi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ E_path = sbox.ospath('A/B/E')
+ C_url = sbox.repo_url + '/A/C'
+
+ # Perform the copy and check the output
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ chi_path, psi_path, omega_path, E_path,
+ C_url, '-m', 'logmsg')
+
+ # Update to HEAD, and check to see if the files really got copied in the repo
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/chi' : Item(status='A '),
+ 'A/C/psi' : Item(status='A '),
+ 'A/C/omega' : Item(status='A '),
+ 'A/C/E' : Item(status='A '),
+ 'A/C/E/alpha' : Item(status='A '),
+ 'A/C/E/beta' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/chi': Item(contents="This is the file 'chi'.\n"),
+ 'A/C/psi': Item(contents="This is the file 'psi'.\n"),
+ 'A/C/omega': Item(contents="This is the file 'omega'.\n"),
+ 'A/C/E' : Item(),
+ 'A/C/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/C/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/C/chi' : Item(status=' ', wc_rev=2),
+ 'A/C/psi' : Item(status=' ', wc_rev=2),
+ 'A/C/omega' : Item(status=' ', wc_rev=2),
+ 'A/C/E' : Item(status=' ', wc_rev=2),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Test copying local files using peg revision syntax
+# (Issue 2546)
+@Issue(2546)
+def copy_peg_rev_local_files(sbox):
+ "copy local files using peg rev syntax"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ psi_path = sbox.ospath('A/D/H/psi')
+ new_iota_path = sbox.ospath('new_iota')
+ iota_path = sbox.ospath('iota')
+ sigma_path = sbox.ospath('sigma')
+
+ psi_text = "This is the file 'psi'.\n"
+ iota_text = "This is the file 'iota'.\n"
+
+ # Play a shell game with some WC files, then commit the changes back
+ # to the repository (making r2).
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ psi_path, new_iota_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ iota_path, psi_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ new_iota_path, iota_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 2',
+ wc_dir)
+
+ # Copy using a peg rev (remember, the object at iota_path at HEAD
+ # was at psi_path back at r1).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ iota_path + '@HEAD', '-r', '1',
+ sigma_path)
+
+ # Commit and verify disk contents
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', 'rev 3')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/H/psi', contents=iota_text)
+ expected_disk.add({
+ 'iota' : Item(contents=psi_text),
+ 'A/D/H/psi' : Item(contents=iota_text),
+ 'sigma' : Item(contents=psi_text, props={}),
+ })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+#----------------------------------------------------------------------
+
+# Test copying local directories using peg revision syntax
+# (Issue 2546)
+@Issue(2546)
+def copy_peg_rev_local_dirs(sbox):
+ "copy local dirs using peg rev syntax"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ E_path = sbox.ospath('A/B/E')
+ G_path = sbox.ospath('A/D/G')
+ I_path = sbox.ospath('A/D/I')
+ J_path = sbox.ospath('A/J')
+ alpha_path = os.path.join(E_path, 'alpha')
+
+ # Make some changes to the repository
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ alpha_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 2',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv',
+ E_path, I_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 3',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv',
+ G_path, E_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 4',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv',
+ I_path, G_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 5',
+ wc_dir)
+
+ # Copy using a peg rev
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ G_path + '@HEAD', '-r', '1',
+ J_path)
+
+ # Commit and verify disk contents
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', 'rev 6')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/beta')
+ expected_disk.remove('A/B/E/alpha')
+ expected_disk.remove('A/D/G/pi')
+ expected_disk.remove('A/D/G/rho')
+ expected_disk.remove('A/D/G/tau')
+ expected_disk.add({
+ 'A/B/E' : Item(),
+ 'A/B/E/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'A/B/E/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'A/B/E/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'A/D/G' : Item(),
+ 'A/D/G/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/J' : Item(),
+ 'A/J/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/J/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+#----------------------------------------------------------------------
+
+# Test copying urls using peg revision syntax
+# (Issue 2546)
+@Issues(2546,3651)
+def copy_peg_rev_url(sbox):
+ "copy urls using peg rev syntax"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ psi_path = sbox.ospath('A/D/H/psi')
+ new_iota_path = sbox.ospath('new_iota')
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + '/iota'
+ sigma_url = sbox.repo_url + '/sigma'
+
+ psi_text = "This is the file 'psi'.\n"
+ iota_text = "This is the file 'iota'.\n"
+
+ # Make some changes to the repository
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ psi_path, new_iota_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ iota_path, psi_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ new_iota_path, iota_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci',
+ '-m', 'rev 2',
+ wc_dir)
+
+ # Copy using a peg rev
+ # Add an empty peg specifier ('@') to sigma_url when copying, to test for
+ # issue #3651 "svn copy does not eat peg revision within copy target path".
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ iota_url + '@HEAD', '-r', '1',
+ sigma_url + '@', '-m', 'rev 3')
+
+ # Validate the copy destination's mergeinfo (we expect none).
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'propget', SVN_PROP_MERGEINFO, sigma_url)
+
+ # Update to HEAD and verify disk contents
+ expected_output = svntest.wc.State(wc_dir, {
+ 'sigma' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=psi_text)
+ expected_disk.tweak('A/D/H/psi', contents=iota_text)
+ expected_disk.add({
+ 'sigma' : Item(contents=psi_text),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'sigma' : Item(status=' ', wc_rev=3)
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+# Test copying an older revision of a wc directory in the wc.
+def old_dir_wc_to_wc(sbox):
+ "copy old revision of wc dir to new dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ E = sbox.ospath('A/B/E')
+ E2 = sbox.ospath('E2')
+ E_url = sbox.repo_url + '/A/B/E'
+ alpha_url = E_url + '/alpha'
+
+ # delete E/alpha in r2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', '', alpha_url)
+
+ # delete E in r3
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', '', E_url)
+
+ # Copy an old revision of E into a new path in the WC
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-r1', E, E2)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'E2' : Item(verb='Adding'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'E2' : Item(status=' ', wc_rev=4),
+ 'E2/alpha' : Item(status=' ', wc_rev=4),
+ 'E2/beta' : Item(status=' ', wc_rev=4),
+ })
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+# Test copying and creating parents in the wc
+
+def copy_make_parents_wc_wc(sbox):
+ "svn cp --parents WC_PATH WC_PATH"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ new_iota_path = sbox.ospath('X/Y/Z/iota')
+
+ # Copy iota
+ svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents',
+ iota_path, new_iota_path)
+
+ # Create expected output
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(verb='Adding'),
+ 'X/Y' : Item(verb='Adding'),
+ 'X/Y/Z' : Item(verb='Adding'),
+ 'X/Y/Z/iota' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Add the moved files
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2),
+ 'X/Y' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z/iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Test copying and creating parents from the repo to the wc
+
+def copy_make_parents_repo_wc(sbox):
+ "svn cp --parents URL WC_PATH"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_url = sbox.repo_url + '/iota'
+ new_iota_path = sbox.ospath('X/Y/Z/iota')
+
+ # Copy iota
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '--parents',
+ iota_url, new_iota_path)
+
+ # Create expected output
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(verb='Adding'),
+ 'X/Y' : Item(verb='Adding'),
+ 'X/Y/Z' : Item(verb='Adding'),
+ 'X/Y/Z/iota' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Add the moved files
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2),
+ 'X/Y' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z/iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+# Test copying and creating parents from the wc to the repo
+
+def copy_make_parents_wc_repo(sbox):
+ "svn cp --parents WC_PATH URL"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ new_iota_url = sbox.repo_url + '/X/Y/Z/iota'
+
+ # Copy iota
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '--parents',
+ '-m', 'log msg',
+ iota_path, new_iota_url)
+
+ # Update to HEAD and verify disk contents
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(status='A '),
+ 'X/Y' : Item(status='A '),
+ 'X/Y/Z' : Item(status='A '),
+ 'X/Y/Z/iota' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/Y' : Item(),
+ 'X/Y/Z' : Item(),
+ 'X/Y/Z/iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2),
+ 'X/Y' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z/iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+# Test copying and creating parents from repo to repo
+
+def copy_make_parents_repo_repo(sbox):
+ "svn cp --parents URL URL"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_url = sbox.repo_url + '/iota'
+ new_iota_url = sbox.repo_url + '/X/Y/Z/iota'
+
+ # Copy iota
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '--parents',
+ '-m', 'log msg',
+ iota_url, new_iota_url)
+
+ # Update to HEAD and verify disk contents
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(status='A '),
+ 'X/Y' : Item(status='A '),
+ 'X/Y/Z' : Item(status='A '),
+ 'X/Y/Z/iota' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/Y' : Item(),
+ 'X/Y/Z' : Item(),
+ 'X/Y/Z/iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2),
+ 'X/Y' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z' : Item(status=' ', wc_rev=2),
+ 'X/Y/Z/iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+# Test for issue #2894
+# Can't perform URL to WC copy if URL needs URI encoding.
+@Issue(2894)
+def URI_encoded_repos_to_wc(sbox):
+ "copy a URL that needs URI encoding to WC"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+
+ def copy_URL_to_WC(URL_rel_path, dest_name, rev):
+ lines = [
+ "A " + os.path.join(wc_dir, dest_name, "B") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "lambda") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E", "alpha") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E", "beta") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "F") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "mu") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "C") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "gamma") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "pi") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "rho") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "tau") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "chi") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "omega") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "psi") + "\n",
+ "Checked out revision " + str(rev - 1) + ".\n",
+ "A " + os.path.join(wc_dir, dest_name) + "\n"]
+ expected = svntest.verify.UnorderedOutput(lines)
+ expected_status.add({
+ dest_name + "/B" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/lambda" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E/alpha" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E/beta" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/F" : Item(status=' ', wc_rev=rev),
+ dest_name + "/mu" : Item(status=' ', wc_rev=rev),
+ dest_name + "/C" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/gamma" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/pi" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/rho" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/tau" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/chi" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/omega" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/psi" : Item(status=' ', wc_rev=rev),
+ dest_name : Item(status=' ', wc_rev=rev)})
+ expected_disk.add({
+ dest_name : Item(props={}),
+ dest_name + '/B' : Item(),
+ dest_name + '/B/lambda' : Item("This is the file 'lambda'.\n"),
+ dest_name + '/B/E' : Item(),
+ dest_name + '/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ dest_name + '/B/E/beta' : Item("This is the file 'beta'.\n"),
+ dest_name + '/B/F' : Item(),
+ dest_name + '/mu' : Item("This is the file 'mu'.\n"),
+ dest_name + '/C' : Item(),
+ dest_name + '/D' : Item(),
+ dest_name + '/D/gamma' : Item("This is the file 'gamma'.\n"),
+ dest_name + '/D/G' : Item(),
+ dest_name + '/D/G/pi' : Item("This is the file 'pi'.\n"),
+ dest_name + '/D/G/rho' : Item("This is the file 'rho'.\n"),
+ dest_name + '/D/G/tau' : Item("This is the file 'tau'.\n"),
+ dest_name + '/D/H' : Item(),
+ dest_name + '/D/H/chi' : Item("This is the file 'chi'.\n"),
+ dest_name + '/D/H/omega' : Item("This is the file 'omega'.\n"),
+ dest_name + '/D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+
+ # Make a copy
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'copy',
+ sbox.repo_url + '/' + URL_rel_path,
+ os.path.join(wc_dir,
+ dest_name))
+
+ expected_output = svntest.wc.State(wc_dir,
+ {dest_name : Item(verb='Adding')})
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ copy_URL_to_WC('A', 'A COPY', 2)
+ copy_URL_to_WC('A COPY', 'A_COPY_2', 3)
+
+#----------------------------------------------------------------------
+# Issue #3068: copy source parent may be unversioned
+@Issue(3068)
+def allow_unversioned_parent_for_copy_src(sbox):
+ "copy wc in unversioned parent to other wc"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Make the "other" working copy
+ wc2_dir = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, wc2_dir)
+ copy_to_path = sbox.ospath('A/copy_of_wc2')
+
+ # Copy the wc-in-unversioned-parent working copy to our original wc.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'cp',
+ wc2_dir,
+ copy_to_path)
+
+def unneeded_parents(sbox):
+ "svn cp --parents FILE_URL DIR_URL"
+
+ # In this message...
+ #
+ # http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=138738
+ # From: Alexander Kitaev <Alexander.Kitaev@svnkit.com>
+ # To: dev@subversion.tigris.org
+ # Subject: 1.5.x segmentation fault on Repos to Repos copy
+ # Message-ID: <4830332A.6060301@svnkit.com>
+ # Date: Sun, 18 May 2008 15:46:18 +0200
+ #
+ # ...Alexander Kitaev describes the bug:
+ #
+ # svn cp --parents SRC_FILE_URL DST_DIR_URL -m "message"
+ #
+ # SRC_FILE_URL - existing file
+ # DST_DIR_URL - existing directory
+ #
+ # Omitting "--parents" option makes above copy operation work as
+ # expected.
+ #
+ # Bug is in libsvn_client/copy.c:801, where "dir" should be
+ # checked for null before using it in svn_ra_check_path call.
+ #
+ # At first we couldn't reproduce it, but later he added this:
+ #
+ # Looks like there is one more condition to reproduce the problem -
+ # dst URL should has no more segments count than source one.
+ #
+ # In other words, if we had "/A/B" below instead of "/A" (adjusting
+ # expected_* accordingly, of course), the bug wouldn't reproduce.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_url = sbox.repo_url + '/iota'
+ A_url = sbox.repo_url + '/A'
+
+ # The --parents is unnecessary, but should still work (not segfault).
+ svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents',
+ '-m', 'log msg', iota_url, A_url)
+
+ # Verify that it worked.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/iota' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/iota' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_update(
+ wc_dir, expected_output, expected_disk, expected_status)
+
+
+def double_parents_with_url(sbox):
+ "svn cp --parents URL/src_dir URL/dst_dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ E_url = sbox.repo_url + '/A/B/E'
+ Z_url = sbox.repo_url + '/A/B/Z'
+
+ # --parents shouldn't result in a double commit of the same directory.
+ svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents',
+ '-m', 'log msg', E_url, Z_url)
+
+ # Verify that it worked.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/Z/alpha' : Item(status='A '),
+ 'A/B/Z/beta' : Item(status='A '),
+ 'A/B/Z' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/Z/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/Z/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/Z/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/Z/beta' : Item(status=' ', wc_rev=2),
+ 'A/B/Z' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_update(
+ wc_dir, expected_output, expected_disk, expected_status)
+
+
+# Used to cause corruption not fixable by 'svn cleanup'.
+def copy_into_missing_dir(sbox):
+ "copy file into missing dir"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ iota_path = sbox.ospath('iota')
+
+ # Remove 'A'
+ svntest.main.safe_rmtree(A_path)
+
+ # Copy into the now-missing dir. This used to give this error:
+ # svn: In directory '.'
+ # svn: Error processing command 'modify-entry' in '.'
+ # svn: Error modifying entry for 'A'
+ # svn: Entry 'A' is already under version control
+ svntest.actions.run_and_verify_svn(None, ".*: Path '.*' is not a directory",
+ 'cp', iota_path, A_path)
+
+ # 'cleanup' should not error.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cleanup', wc_dir)
+
+
+def find_copyfrom_information_upstairs(sbox):
+ "renaming inside a copied subtree shouldn't hang"
+
+ # The final command in this series would cause the client to hang...
+ #
+ # ${SVN} cp A A2
+ # cd A2/B
+ # ${SVN} mkdir blah
+ # ${SVN} mv lambda blah
+ #
+ # ...because it wouldn't walk up past "" to find copyfrom information
+ # (which would be in A2/.svn/entries, not on A2/B/.svn/entries).
+ # Instead, it would keep thinking the parent of "" is "", and so
+ # loop forever, gobbling a little bit more memory with each iteration.
+ #
+ # Two things fixed this:
+ #
+ # 1) The client walks upward beyond CWD now, so it finds the
+ # copyfrom information.
+ #
+ # 2) Even if we do top out at "" without finding copyfrom information
+ # (say, because someone has corrupted their working copy), we'll
+ # still detect it and error, thus breaking the loop.
+ #
+ # This only tests case (1). We could test that (2) gets the expected
+ # error ("no parent with copyfrom information found above 'lambda'"),
+ # but we'd need to chroot to the top of the working copy or manually
+ # corrupt the wc by removing the copyfrom lines from A2/.svn/entries.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A2_path = sbox.ospath('A2')
+ B2_path = os.path.join(A2_path, 'B')
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', A_path, A2_path)
+ saved_cwd = os.getcwd()
+ try:
+ os.chdir(B2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', 'blah')
+ svntest.actions.run_and_verify_svn(None, [], 'mv', 'lambda', 'blah')
+ finally:
+ os.chdir(saved_cwd)
+
+#----------------------------------------------------------------------
+
+def change_case_of_hostname(input):
+ "Change the case of the hostname, try uppercase first"
+
+ m = re.match(r"^(.*://)([^/]*)(.*)", input)
+ if m:
+ scheme = m.group(1)
+ host = m.group(2).upper()
+ if host == m.group(2):
+ host = m.group(2).lower()
+
+ path = m.group(3)
+
+ return scheme + host + path
+
+# regression test for issue #2475 - move file and folder
+@Issue(2475)
+def path_move_and_copy_between_wcs_2475(sbox):
+ "issue #2475 - move and copy between working copies"
+ sbox.build()
+
+ # checkout a second working copy, use repository url with different case
+ wc2_dir = sbox.add_wc_path('2')
+ repo_url2 = change_case_of_hostname(sbox.repo_url)
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = wc2_dir
+ expected_output.tweak(status='A ', contents=None)
+
+ expected_wc = svntest.main.greek_state
+
+ # Do a checkout, and verify the resulting output and disk contents.
+ svntest.actions.run_and_verify_checkout(repo_url2,
+ wc2_dir,
+ expected_output,
+ expected_wc)
+
+ # Copy a file from wc to wc2
+ mu_path = sbox.ospath('A/mu')
+ E_path = os.path.join(wc2_dir, 'A', 'B', 'E')
+
+ svntest.main.run_svn(None, 'cp', mu_path, E_path)
+
+ # Copy a folder from wc to wc2
+ C_path = sbox.ospath('A/C')
+ B_path = os.path.join(wc2_dir, 'A', 'B')
+
+ svntest.main.run_svn(None, 'cp', C_path, B_path)
+
+ # Move a file from wc to wc2
+ mu_path = sbox.ospath('A/mu')
+ B_path = os.path.join(wc2_dir, 'A', 'B')
+
+ svntest.main.run_svn(None, 'mv', mu_path, B_path)
+
+ # Move a folder from wc to wc2
+ C_path = sbox.ospath('A/C')
+ D_path = os.path.join(wc2_dir, 'A', 'D')
+
+ svntest.main.run_svn(None, 'mv', C_path, D_path)
+
+ # Verify modified status
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/C', status='D ')
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+ expected_status2 = svntest.actions.get_virginal_state(wc2_dir, 1)
+ expected_status2.add({ 'A/B/mu' :
+ Item(status='A ', copied='+', wc_rev='-') })
+ expected_status2.add({ 'A/B/C' :
+ Item(status='A ', copied='+', wc_rev='-') })
+ expected_status2.add({ 'A/B/E/mu' :
+ Item(status='A ', copied='+', wc_rev='-') })
+ expected_status2.add({ 'A/D/C' :
+ Item(status='A ', copied='+', wc_rev='-') })
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status2)
+
+
+# regression test for issue #2475 - direct copy in the repository
+# this test handles the 'direct move' case too, that uses the same code.
+@Issue(2475)
+def path_copy_in_repo_2475(sbox):
+ "issue #2475 - direct copy in the repository"
+ sbox.build()
+
+ repo_url2 = change_case_of_hostname(sbox.repo_url)
+
+ # Copy a file from repo to repo2
+ mu_url = sbox.repo_url + '/A/mu'
+ E_url = repo_url2 + '/A/B/E'
+
+ svntest.main.run_svn(None, 'cp', mu_url, E_url, '-m', 'copy mu to /A/B/E')
+
+ # For completeness' sake, update to HEAD, and verify we have a full
+ # greek tree again, all at revision 2.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E/mu' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/B/E/mu' : Item("This is the file 'mu'.\n") })
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 2)
+ expected_status.add({'A/B/E/mu' : Item(status=' ', wc_rev=2) })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+def commit_copy_depth_empty(sbox):
+ "copy a wcdir, then commit it with --depth empty"
+ sbox.build()
+
+ a = sbox.ospath('A')
+ new_a = sbox.ospath('new_A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', a, new_a)
+
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ new_a, '--depth', 'empty',
+ '-m', 'Copied directory')
+
+def copy_below_copy(sbox):
+ "copy a dir below a copied dir"
+ sbox.build()
+
+ A = sbox.ospath('A')
+ new_A = sbox.ospath('new_A')
+ new_A_D = os.path.join(new_A, 'D')
+ new_A_new_D = os.path.join(new_A, 'new_D')
+ new_A_mu = os.path.join(new_A, 'mu')
+ new_A_new_mu = os.path.join(new_A, 'new_mu')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', A, new_A)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', new_A_D, new_A_new_D)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', new_A_mu, new_A_new_mu)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'new_A' : Item(verb='Adding'),
+ 'new_A/new_D' : Item(verb='Adding'),
+ 'new_A/new_mu' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ expected_status.add({
+ 'new_A' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/gamma' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/omega': Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'new_A/D' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/H' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/G' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'new_A/D/gamma' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_mu' : Item(status=' ', wc_rev='2'),
+ 'new_A/B' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E/alpha' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E/beta' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/F' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/lambda' : Item(status=' ', wc_rev='2'),
+ 'new_A/C' : Item(status=' ', wc_rev='2'),
+ 'new_A/mu' : Item(status=' ', wc_rev='2'),
+ })
+
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ expected_output,
+ expected_status)
+
+def move_below_move(sbox):
+ "move a dir below a moved dir"
+ sbox.build()
+
+ A = sbox.ospath('A')
+ new_A = sbox.ospath('new_A')
+ new_A_D = os.path.join(new_A, 'D')
+ new_A_new_D = os.path.join(new_A, 'new_D')
+ new_A_mu = os.path.join(new_A, 'mu')
+ new_A_new_mu = os.path.join(new_A, 'new_mu')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', A, new_A)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', new_A_D, new_A_new_D)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', new_A_mu, new_A_new_mu)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A' : Item(verb='Deleting'),
+ 'new_A/D' : Item(verb='Deleting'),
+ 'new_A/mu' : Item(verb='Deleting'),
+ 'new_A' : Item(verb='Adding'),
+ 'new_A/new_D' : Item(verb='Adding'),
+ 'new_A/new_mu' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ expected_status.add({
+ 'new_A' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/gamma' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/omega': Item(status=' ', wc_rev='2'),
+ 'new_A/new_D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'new_A/new_mu' : Item(status=' ', wc_rev='2'),
+ 'new_A/B' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E/alpha' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/E/beta' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/F' : Item(status=' ', wc_rev='2'),
+ 'new_A/B/lambda' : Item(status=' ', wc_rev='2'),
+ 'new_A/C' : Item(status=' ', wc_rev='2'),
+ })
+
+ expected_status.remove('A', 'A/D', 'A/D/gamma', 'A/D/G', 'A/D/G/pi',
+ 'A/D/G/rho', 'A/D/G/tau', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/omega', 'A/D/H/psi', 'A/B', 'A/B/E',
+ 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda',
+ 'A/C', 'A/mu')
+
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ expected_output,
+ expected_status)
+
+
+def reverse_merge_move(sbox):
+ """reverse merge move"""
+
+ # Alias for svntest.actions.run_and_verify_svn
+ rav_svn = svntest.actions.run_and_verify_svn
+
+ wc_dir = sbox.wc_dir
+ a_dir = sbox.ospath('A')
+ a_repo_url = sbox.repo_url + '/A'
+ sbox.build()
+
+ # Create another working copy path and checkout.
+ wc2_dir = sbox.add_wc_path('2')
+ rav_svn(None, [], 'co', sbox.repo_url, wc2_dir)
+
+ # Update working directory and ensure that we are at revision 1.
+ rav_svn(exp_noop_up_out(1), [], 'up', wc_dir)
+
+ # Add new folder and file, later commit
+ new_path = os.path.join(a_dir, 'New')
+ os.mkdir(new_path)
+ first_path = os.path.join(new_path, 'first')
+ svntest.main.file_append(first_path, 'appended first text')
+ svntest.main.run_svn(None, "add", new_path)
+ rav_svn(None, [], 'ci', wc_dir, '-m', 'Add new folder %s' % new_path)
+ rav_svn(exp_noop_up_out(2), [], 'up', wc_dir)
+
+ # Reverse merge to revert previous changes and commit
+ rav_svn(None, [], 'merge', '-c', '-2', a_repo_url, a_dir)
+ rav_svn(None, [], 'ci', '-m', 'Reverting svn merge -c -2.', a_dir)
+ rav_svn(exp_noop_up_out(3), [], 'up', wc_dir)
+
+ # Reverse merge again to undo last revert.
+ rav_svn(None, [], 'merge', '-c', '-3', a_repo_url, a_dir)
+
+ # Move new added file to another one and commit.
+ second_path = os.path.join(new_path, 'second')
+ rav_svn(None, [], 'move', first_path, second_path)
+ rav_svn("Adding.*New|Adding.*first||Committed revision 4.", [],
+ 'ci', '-m',
+ 'Revert svn merge. svn mv %s %s.' % (first_path, second_path), a_dir)
+
+ # Update second working copy. There was a bug (at least on the 1.6.x
+ # branch) in which this update received both "first" and "second".
+ expected_output = svntest.wc.State(wc2_dir, {
+ 'A/New' : Item(status='A '),
+ 'A/New/second' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc2_dir,
+ expected_output,
+ None,
+ None)
+
+@Issue(3699)
+def nonrecursive_commit_of_copy(sbox):
+ """commit only top of copy; check child behavior"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ main.run_svn(None, 'cp', sbox.ospath('A'),
+ sbox.ospath('A_new'))
+ main.run_svn(None, 'cp', sbox.ospath('A/D/G'),
+ sbox.ospath('A_new/G_new'))
+ main.run_svn(None, 'rm', sbox.ospath('A_new/C'))
+ main.run_svn(None, 'rm', sbox.ospath('A_new/B/E'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A_new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A_new/D' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/G' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/D/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/B' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/B/E' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A_new/B/E/alpha' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A_new/B/E/beta' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A_new/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/mu' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/C' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A_new/G_new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A_new/G_new/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/G_new/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_new/G_new/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_new': Item(verb='Adding'),
+ })
+
+ # These nodes are added by the commit
+ expected_status.tweak('A_new', 'A_new/D', 'A_new/D/G', 'A_new/D/G/pi',
+ 'A_new/D/G/rho', 'A_new/D/G/tau', 'A_new/D/H',
+ 'A_new/D/H/psi', 'A_new/D/H/chi', 'A_new/D/H/omega',
+ 'A_new/D/gamma', 'A_new/B', 'A_new/B/lambda',
+ 'A_new/B/F', 'A_new/mu',
+ status=' ', copied=None, wc_rev='2')
+
+ # And these are now normal deletes, because their parent was committed.
+ expected_status.tweak('A_new/C', 'A_new/B/E', 'A_new/B/E/alpha',
+ 'A_new/B/E/beta', copied=None, wc_rev='2')
+
+ # 'A_new/G_new' and everything below should still be added
+ # as their operation root was not committed
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir, '--depth', 'immediates')
+
+# Regression test for issue #3474 - making a new subdir, moving files into it
+# and then renaming the subdir, breaks history of the moved files.
+@Issue(3474)
+def copy_added_dir_with_copy(sbox):
+ """copy/mv of new dir with copied file keeps history"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ new_dir = sbox.ospath('NewDir')
+ new_dir2 = sbox.ospath('NewDir2')
+ new_dir3 = sbox.ospath('NewDir3')
+
+ # Alias for svntest.actions.run_and_verify_svn
+ rav_svn = svntest.actions.run_and_verify_svn
+
+ rav_svn(None, [], 'mkdir', new_dir)
+ rav_svn(None, [], 'cp', sbox.ospath('A/mu'), new_dir)
+ rav_svn(None, [], 'cp', new_dir, new_dir2)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_status.add(
+ {
+ 'NewDir' : Item(status='A ', wc_rev='0'),
+ 'NewDir/mu' : Item(status='A ', copied='+', wc_rev='-'),
+ 'NewDir2' : Item(status='A ', wc_rev='0'),
+ 'NewDir2/mu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # move of added dir also retains copy history of children
+ rav_svn(None, [], 'mv', new_dir, new_dir3)
+ expected_status.remove('NewDir', 'NewDir/mu')
+ expected_status.add(
+ {
+ 'NewDir3' : Item(status='A ', wc_rev='0'),
+ 'NewDir3/mu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@Issue(3303)
+def copy_broken_symlink(sbox):
+ """copy broken symlink"""
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3303. ##
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ new_symlink = sbox.ospath('new_symlink')
+ copied_symlink = sbox.ospath('copied_symlink')
+
+ # Alias for svntest.actions.run_and_verify_svn
+ rav_svn = svntest.actions.run_and_verify_svn
+
+ sbox.simple_add_symlink('linktarget', 'new_symlink')
+
+ rav_svn(None, [], 'cp', new_symlink, copied_symlink)
+
+ # Check whether both new_symlink and copied_symlink are added to the
+ # working copy
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_status.add(
+ {
+ 'new_symlink' : Item(status='A ', wc_rev='0'),
+ 'copied_symlink' : Item(status='A ', wc_rev='0'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def move_dir_containing_move(sbox):
+ """move a directory containing moved node"""
+
+ sbox.build()
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/B/E/alpha_moved'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B/F'),
+ sbox.ospath('A/B/F_moved'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B_tmp'))
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/B', status='D ', moved_to='A/B_tmp')
+ expected_status.tweak('A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/B/F',
+ 'A/B/lambda',
+ status='D ')
+ expected_status.add({
+ 'A/B_tmp' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B'),
+ # alpha has a revision that isn't reported by status.
+ 'A/B_tmp/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_tmp/E/alpha' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/B_tmp/E/alpha_moved'),
+ 'A/B_tmp/E/alpha_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B_tmp/E/alpha'),
+ 'A/B_tmp/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_tmp/F' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/B_tmp/F_moved'),
+ 'A/B_tmp/F_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B_tmp/F'),
+ 'A/B_tmp/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B_tmp'),
+ sbox.ospath('A/B_moved'))
+ expected_status.tweak('A/B', moved_to='A/B_moved')
+ expected_status.remove('A/B_tmp',
+ 'A/B_tmp/E',
+ 'A/B_tmp/E/alpha',
+ 'A/B_tmp/E/alpha_moved',
+ 'A/B_tmp/E/beta',
+ 'A/B_tmp/F',
+ 'A/B_tmp/F_moved',
+ 'A/B_tmp/lambda')
+ expected_status.add({
+ 'A/B_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B'),
+ 'A/B_moved/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_moved/E/alpha' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/B_moved/E/alpha_moved'),
+ 'A/B_moved/E/alpha_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B_moved/E/alpha'),
+ 'A/B_moved/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_moved/F' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/B_moved/F_moved'),
+ 'A/B_moved/F_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B_moved/F'),
+ 'A/B_moved/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B' : Item(verb='Deleting'),
+ 'A/B_moved' : Item(verb='Adding'),
+ 'A/B_moved/E/alpha' : Item(verb='Deleting'),
+ 'A/B_moved/E/alpha_moved': Item(verb='Adding'),
+ 'A/B_moved/F' : Item(verb='Deleting'),
+ 'A/B_moved/F_moved' : Item(verb='Adding'),
+ })
+
+ expected_status.tweak('A/B_moved',
+ 'A/B_moved/E',
+ 'A/B_moved/E/alpha_moved',
+ 'A/B_moved/E/beta',
+ 'A/B_moved/F_moved',
+ 'A/B_moved/lambda',
+ status=' ', copied=None, wc_rev='2')
+ expected_status.remove('A/B',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/B/F',
+ 'A/B/lambda',
+ 'A/B_moved/E/alpha',
+ 'A/B_moved/F')
+ expected_status.tweak('A/B_moved', 'A/B_moved/E/alpha_moved',
+ 'A/B_moved/F_moved', moved_from=None)
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ expected_output,
+ expected_status)
+
+def copy_dir_with_space(sbox):
+ """copy a directory with whitespace to one without"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('E with spaces'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('E with spaces/al pha'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'E with spaces' : Item(verb='Adding'),
+ 'E with spaces/al pha' : Item(verb='Adding'),
+ })
+ expected_status.add({
+ 'E with spaces' : Item(status=' ', wc_rev='2'),
+ 'E with spaces/alpha' : Item(status=' ', wc_rev='2'),
+ 'E with spaces/beta' : Item(status=' ', wc_rev='2'),
+ 'E with spaces/al pha' : Item(status=' ', wc_rev='2'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('E with spaces'),
+ sbox.ospath('E also spaces')
+ )
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('E with spaces/al pha'),
+ sbox.ospath('E also spaces/al b')
+ )
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'E also spaces' : Item(verb='Adding'),
+ 'E also spaces/al b': Item(verb='Adding'),
+ })
+ expected_status.add({
+ 'E also spaces' : Item(status=' ', wc_rev='3'),
+ 'E also spaces/beta': Item(status=' ', wc_rev='3'),
+ 'E also spaces/al b': Item(status=' ', wc_rev='3'),
+ 'E also spaces/alpha': Item(status=' ', wc_rev='3'),
+ 'E also spaces/al pha': Item(status=' ', wc_rev='3'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('E with spaces'),
+ sbox.ospath('E new spaces')
+ )
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('E new spaces/al pha'),
+ sbox.ospath('E also spaces/al c')
+ )
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'E with spaces' : Item(verb='Deleting'),
+ 'E also spaces/al c': Item(verb='Adding'),
+ 'E new spaces' : Item(verb='Adding'),
+ 'E new spaces/al pha': Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'E also spaces' : Item(status=' ', wc_rev='3'),
+ 'E also spaces/beta': Item(status=' ', wc_rev='3'),
+ 'E also spaces/al b': Item(status=' ', wc_rev='3'),
+ 'E also spaces/al c': Item(status=' ', wc_rev='4'),
+ 'E also spaces/alpha': Item(status=' ', wc_rev='3'),
+ 'E also spaces/al pha': Item(status=' ', wc_rev='3'),
+ 'E new spaces' : Item(status=' ', wc_rev='4'),
+ 'E new spaces/alpha': Item(status=' ', wc_rev='4'),
+ 'E new spaces/beta' : Item(status=' ', wc_rev='4'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+# Regression test for issue #3676
+@Issue(3676)
+def changed_data_should_match_checkout(sbox):
+ """changed data after commit should match checkout"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_B_E = sbox.ospath('A/B/E')
+ E_new = sbox.ospath('E_new')
+
+ verify_dir = sbox.add_wc_path('verify')
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', A_B_E, E_new)
+
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, verify_dir)
+
+ was_cwd = os.getcwd()
+ os.chdir(verify_dir)
+
+ rv, verify_out, err = main.run_svn(None, 'status', '-v')
+
+ os.chdir(was_cwd)
+ os.chdir(wc_dir)
+ verify_out = svntest.verify.UnorderedOutput(verify_out)
+ svntest.actions.run_and_verify_svn(verify_out, [], 'status', '-v')
+ os.chdir(was_cwd)
+
+# Regression test for issue #3676 for copies including directories
+@Issue(3676)
+def changed_dir_data_should_match_checkout(sbox):
+ """changed dir after commit should match checkout"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_B = sbox.ospath('A/B')
+ B_new = sbox.ospath('B_new')
+
+ verify_dir = sbox.add_wc_path('verify')
+
+ svntest.actions.run_and_verify_svn(None, [], 'copy', A_B, B_new)
+
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, verify_dir)
+
+ was_cwd = os.getcwd()
+ os.chdir(verify_dir)
+
+ rv, verify_out, err = main.run_svn(None, 'status', '-v')
+
+ os.chdir(was_cwd)
+ os.chdir(wc_dir)
+ rv, verify_out2, err = main.run_svn (None, 'status', '-v')
+ os.chdir(was_cwd)
+
+ # The order of the status output is not absolutely defined, but
+ # otherwise should match
+ svntest.verify.verify_outputs(None,
+ sorted(verify_out2), None,
+ sorted(verify_out), None)
+
+def move_added_nodes(sbox):
+ """move added nodes"""
+
+ sbox.build(read_only=True)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ sbox.ospath('X'),
+ sbox.ospath('X/Y'))
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev='0'),
+ 'X/Y' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('X/Y'),
+ sbox.ospath('X/Z'))
+ expected_status.remove('X/Y')
+ expected_status.add({'X/Z' : Item(status='A ', wc_rev='0')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('X/Z'),
+ sbox.ospath('Z'))
+ expected_status.remove('X/Z')
+ expected_status.add({'Z' : Item(status='A ', wc_rev='0')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('Z'),
+ sbox.ospath('X/Z'))
+ expected_status.remove('Z')
+ expected_status.add({'X/Z' : Item(status='A ', wc_rev='0')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+def copy_over_deleted_dir(sbox):
+ "copy a directory over a deleted directory"
+ sbox.build(read_only = True)
+
+ main.run_svn(None, 'rm', sbox.ospath('A/B'))
+ main.run_svn(None, 'cp', sbox.ospath('A/D'),
+ sbox.ospath('A/B'))
+
+@Issue(3314)
+def mixed_rev_copy_del(sbox):
+ """copy mixed-rev and delete children"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete and commit A/B/E/alpha
+ svntest.main.run_svn(None, 'rm', sbox.ospath('A/B/E/alpha'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha': Item(verb='Deleting'),
+ })
+ expected_status.remove('A/B/E/alpha')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update to r2, then update A/B/E/alpha and A/B/E/beta to r1
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_status.tweak(wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='A '),
+ })
+
+ expected_status.add({
+ 'A/B/E/alpha' : Item(status=' ', wc_rev=1),
+ })
+ expected_status.tweak('A/B/E/beta', wc_rev=1)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None,
+ expected_status, [], False,
+ '-r1',
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/B/E/beta'))
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Copy A/B/E to A/B/E_copy
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/E_copy'))
+ expected_status.add({
+ 'A/B/E_copy' : Item(status='A ', copied='+', wc_rev='-'),
+ # In the entries world mixed revision copies have only a single op_root
+ 'A/B/E_copy/alpha' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '),
+ 'A/B/E_copy/beta' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Delete A/B/E_copy/alpha and A/B/E_copy/beta
+ svntest.main.run_svn(None, 'rm', '--force',
+ sbox.ospath('A/B/E_copy/alpha'),
+ sbox.ospath('A/B/E_copy/beta'))
+ expected_status.tweak('A/B/E_copy/alpha', 'A/B/E_copy/beta', status='D ',
+ entry_status=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E_copy' : Item(verb='Adding'),
+ 'A/B/E_copy/beta' : Item(verb='Deleting'),
+ })
+ expected_status.tweak('A/B/E_copy', wc_rev=3, copied=None, status=' ')
+ expected_status.remove('A/B/E_copy/alpha', 'A/B/E_copy/beta')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+def copy_delete_undo(sbox, use_revert):
+ "copy, delete child, undo"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ # Copy directory with children
+ svntest.main.run_svn(wc_dir, 'copy',
+ sbox.ospath('A/B/E'), sbox.ospath('A/B/E-copied'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/E-copied' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E-copied/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E-copied/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Delete a child
+ svntest.main.run_svn(wc_dir, 'rm', sbox.ospath('A/B/E-copied/alpha'))
+ expected_status.tweak('A/B/E-copied/alpha', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Undo the whole copy
+ if (use_revert):
+ svntest.main.run_svn(wc_dir, 'revert', '--recursive',
+ sbox.ospath('A/B/E-copied'))
+ svntest.main.safe_rmtree(sbox.ospath('A/B/E-copied'))
+ else:
+ svntest.main.run_svn(wc_dir, 'rm', '--force', sbox.ospath('A/B/E-copied'))
+ expected_status.remove('A/B/E-copied',
+ 'A/B/E-copied/alpha',
+ 'A/B/E-copied/beta')
+
+ # Undo via revert FAILs here because a wq item remains
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Copy a directory without children.
+ svntest.main.run_svn(wc_dir, 'copy',
+ sbox.ospath('A/B/F'), sbox.ospath('A/B/E-copied'))
+ expected_status.add({
+ 'A/B/E-copied' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def copy_delete_delete(sbox):
+ "copy, delete child, delete copy"
+ copy_delete_undo(sbox, False)
+
+@Issue(3784)
+def copy_delete_revert(sbox):
+ "copy, delete child, revert copy"
+ copy_delete_undo(sbox, True)
+
+# See also delete_replaced_file() which does the same for a file.
+def delete_replace_delete(sbox):
+ "delete a directory scheduled for replacement"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ # Delete directory with children
+ svntest.main.run_svn(wc_dir, 'rm', sbox.ospath('A/B/E'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Replace with directory with different children
+ svntest.main.run_svn(wc_dir, 'copy',
+ sbox.ospath('A/D/G'), sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', status='R ', copied='+', wc_rev='-')
+ expected_status.add({
+ 'A/B/E/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ # A/B/E/alpha and A/B/E/beta show up as deleted, is that right?
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Delete replacement
+ svntest.main.run_svn(wc_dir, 'rm', '--force', sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', status='D ', copied=None, wc_rev='1')
+ expected_status.remove('A/B/E/pi', 'A/B/E/rho', 'A/B/E/tau')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+A_B_children = ['A/B/lambda', 'A/B/F', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/E']
+A_D_children = ['A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega']
+
+def copy_repos_over_deleted_same_kind(sbox):
+ "copy repos node over deleted node, same kind"
+ sbox.build(read_only = True)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ # Set up some deleted paths
+ sbox.simple_rm('iota', 'A/B')
+ for path in ['iota', 'A/B'] + A_B_children:
+ expected_status.tweak(path, status='D ')
+
+ # Test copying
+ main.run_svn(None, 'cp', sbox.repo_url + '/A/mu', sbox.ospath('iota'))
+ expected_status.tweak('iota', status='R ', wc_rev='-', copied='+')
+ main.run_svn(None, 'cp', sbox.repo_url + '/A/D', sbox.ospath('A/B'))
+ expected_status.tweak('A/B', status='R ', wc_rev='-', copied='+')
+ for child in A_D_children:
+ expected_status.add({ child.replace('A/D', 'A/B'):
+ Item(status=' ', wc_rev='-', copied='+')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+def copy_repos_over_deleted_other_kind(sbox):
+ "copy repos node over deleted node, other kind"
+ sbox.build(read_only = True)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ # Set up some deleted paths
+ sbox.simple_rm('iota', 'A/B')
+ for path in ['iota', 'A/B'] + A_B_children:
+ expected_status.tweak(path, status='D ')
+
+ # Test copying
+ main.run_svn(None, 'cp', sbox.repo_url + '/iota', sbox.ospath('A/B'))
+ expected_status.tweak('A/B', status='R ', wc_rev='-', copied='+')
+ expected_status.remove(*A_B_children)
+ main.run_svn(None, 'cp', sbox.repo_url + '/A/B', sbox.ospath('iota'))
+ expected_status.tweak('iota', status='R ', wc_rev='-', copied='+')
+ for child in A_B_children:
+ expected_status.add({ child.replace('A/B', 'iota'):
+ Item(status=' ', wc_rev='-', copied='+')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+def copy_wc_over_deleted_same_kind(sbox):
+ "copy WC node over a deleted node, same kind"
+ sbox.build(read_only = True)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ # Set up some deleted paths
+ sbox.simple_rm('iota', 'A/B')
+ for path in ['iota', 'A/B'] + A_B_children:
+ expected_status.tweak(path, status='D ')
+
+ # Test copying
+ main.run_svn(None, 'cp', sbox.ospath('A/mu'), sbox.ospath('iota'))
+ expected_status.tweak('iota', status='R ', wc_rev='-', copied='+')
+ main.run_svn(None, 'cp', sbox.ospath('A/D'), sbox.ospath('A/B'))
+ expected_status.tweak('A/B', status='R ', wc_rev='-', copied='+')
+ for child in A_D_children:
+ expected_status.add({ child.replace('A/D', 'A/B'):
+ Item(status=' ', wc_rev='-', copied='+')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+def copy_wc_over_deleted_other_kind(sbox):
+ "copy WC node over deleted node, other kind"
+ sbox.build(read_only = True)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ # Set up some deleted paths
+ sbox.simple_rm('iota', 'A/B')
+ for path in ['iota', 'A/B'] + A_B_children:
+ expected_status.tweak(path, status='D ')
+
+ # Test copying
+ main.run_svn(None, 'cp', sbox.ospath('A/mu'), sbox.ospath('A/B'))
+ expected_status.tweak('A/B', status='R ', wc_rev='-', copied='+')
+ expected_status.remove(*A_B_children)
+ main.run_svn(None, 'cp', sbox.ospath('A/D'), sbox.ospath('iota'))
+ expected_status.tweak('iota', status='R ', wc_rev='-', copied='+')
+ for child in A_D_children:
+ expected_status.add({ child.replace('A/D', 'iota'):
+ Item(status=' ', wc_rev='-', copied='+')})
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+def move_wc_and_repo_dir_to_itself(sbox):
+ "move wc and repo dir to itself"
+ sbox.build(read_only = True)
+ wc_dir = sbox.ospath('A')
+ repo_url = sbox.repo_url + '/A'
+
+ # try to move wc dir to itself
+ svntest.actions.run_and_verify_svn([],
+ '.*Cannot move path.* into itself.*',
+ 'move', wc_dir, wc_dir)
+
+ # try to move repo dir to itself
+ svntest.actions.run_and_verify_svn([],
+ '.*Cannot move URL.* into itself.*',
+ 'move', repo_url, repo_url)
+
+@Issues(2763,3314)
+def copy_wc_url_with_absent(sbox):
+ "copy wc to url with several absent children"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A/B a normal delete
+ sbox.simple_rm('A/B')
+
+ # A/no not-present but in HEAD
+ sbox.simple_copy('A/mu', 'A/no')
+ sbox.simple_commit('A/no')
+ svntest.main.run_svn(None, 'up', '-r', '1', sbox.ospath('A/no'))
+
+ # A/mu not-present and not in HEAD
+ sbox.simple_rm('A/mu')
+ sbox.simple_commit('A/mu')
+
+ # A/D excluded
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude',
+ sbox.ospath('A/D'))
+
+ # Test issue #3314 after copy
+ sbox.simple_copy('A', 'A_copied')
+ svntest.main.run_svn(None, 'ci', sbox.ospath('A_copied'),
+ '-m', 'Commit A_copied')
+
+ # This tests issue #2763
+ svntest.main.run_svn(None, 'cp', sbox.ospath('A'),
+ '^/A_tagged', '-m', 'Tag A')
+
+ # And perform a normal commit
+ svntest.main.run_svn(None, 'ci', sbox.ospath('A'),
+ '-m', 'Commit A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_tagged' : Item(status='A '),
+ 'A_tagged/D' : Item(status='A '),
+ 'A_tagged/D/gamma' : Item(status='A '),
+ 'A_tagged/D/H' : Item(status='A '),
+ 'A_tagged/D/H/psi' : Item(status='A '),
+ 'A_tagged/D/H/chi' : Item(status='A '),
+ 'A_tagged/D/H/omega': Item(status='A '),
+ 'A_tagged/D/G' : Item(status='A '),
+ 'A_tagged/D/G/pi' : Item(status='A '),
+ 'A_tagged/D/G/rho' : Item(status='A '),
+ 'A_tagged/D/G/tau' : Item(status='A '),
+ 'A_tagged/C' : Item(status='A '),
+
+ 'A/no' : Item(status='A '),
+ })
+
+ # This should bring in A_tagged and A/no
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ None)
+
+ # And now bring in the excluded nodes from A and A_copied
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+
+ 'A_copied/D' : Item(status='A '),
+ 'A_copied/D/H' : Item(status='A '),
+ 'A_copied/D/H/omega': Item(status='A '),
+ 'A_copied/D/H/psi' : Item(status='A '),
+ 'A_copied/D/H/chi' : Item(status='A '),
+ 'A_copied/D/G' : Item(status='A '),
+ 'A_copied/D/G/tau' : Item(status='A '),
+ 'A_copied/D/G/rho' : Item(status='A '),
+ 'A_copied/D/G/pi' : Item(status='A '),
+ 'A_copied/D/gamma' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ None,
+ [], False,
+ wc_dir, '--set-depth', 'infinity')
+
+ # Except for A/no, the 3 directories should now have the same children
+
+ items = {
+ '' : Item(status=' ', wc_rev='6'),
+ 'C' : Item(status=' ', wc_rev='6'),
+ 'D' : Item(status=' ', wc_rev='6'),
+ 'D/gamma' : Item(status=' ', wc_rev='6'),
+ 'D/H' : Item(status=' ', wc_rev='6'),
+ 'D/H/psi' : Item(status=' ', wc_rev='6'),
+ 'D/H/chi' : Item(status=' ', wc_rev='6'),
+ 'D/H/omega' : Item(status=' ', wc_rev='6'),
+ 'D/G' : Item(status=' ', wc_rev='6'),
+ 'D/G/pi' : Item(status=' ', wc_rev='6'),
+ 'D/G/tau' : Item(status=' ', wc_rev='6'),
+ 'D/G/rho' : Item(status=' ', wc_rev='6'),
+ }
+
+ expected_status = svntest.wc.State(sbox.ospath('A_copied'), items)
+ svntest.actions.run_and_verify_status(sbox.ospath('A_copied'),
+ expected_status)
+
+ expected_status = svntest.wc.State(sbox.ospath('A_tagged'), items)
+ svntest.actions.run_and_verify_status(sbox.ospath('A_tagged'),
+ expected_status)
+
+ expected_status.add({
+ 'no' : Item(status=' ', wc_rev='6')
+ })
+
+ expected_status = svntest.wc.State(sbox.ospath('A'), items)
+ svntest.actions.run_and_verify_status(sbox.ospath('A'),
+ expected_status)
+
+
+def copy_url_shortcut(sbox):
+ "copy using URL shortcut source"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Can't use ^/A/D/G shortcut here because wc/X is unversioned.
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.ospath('A/D/G'), sbox.ospath('X'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ sbox.ospath('X/pi'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', copied='+', wc_rev='-'),
+ 'X/pi' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Can use ^/A/D/G even though X/pi is a delete within a copy.
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ '^/A/D/G/pi', sbox.ospath('X/pi'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', copied='+', wc_rev='-'),
+ 'X/pi' : Item(status='R ', copied='+', wc_rev='-', entry_status=' '),
+ 'X/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+# Regression test for issue #3865: 'svn' on Windows cannot address
+# scheduled-for-delete file, if another file differing only in case is
+# present on disk
+@Issue(3865)
+def deleted_file_with_case_clash(sbox):
+ """address a deleted file hidden by case clash"""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ iota2_path = sbox.ospath('iota2')
+ IOTA_path = sbox.ospath('IOTA')
+ iota_url = sbox.repo_url + '/iota'
+
+ # Perform a case-only rename in two steps.
+ svntest.main.run_svn(None, 'move', iota_path, iota2_path)
+ svntest.main.run_svn(None, 'move', iota2_path, IOTA_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status='D ', wc_rev=1, moved_to='IOTA'),
+ 'IOTA' : Item(status='A ', copied='+', wc_rev='-', moved_from='iota'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Perform 'info' calls on both the deleted and added paths, to see if
+ # we get the correct information. The deleted path is not on disk and
+ # hidden by the on-disk case-clashing file, but we should be able to
+ # target it explicitly because it's in the wc-db.
+ expected_info_iota = {'Path' : re.escape(iota_path),
+ 'Schedule' : 'delete',
+ 'Copied From URL': None,
+ }
+ svntest.actions.run_and_verify_info([expected_info_iota], iota_path)
+
+ expected_info_IOTA = {'Path' : re.escape(IOTA_path),
+ 'Schedule' : 'add',
+ 'Copied From URL': iota_url,
+ }
+ svntest.actions.run_and_verify_info([expected_info_IOTA], IOTA_path)
+
+def copy_base_of_deleted(sbox):
+ """copy -rBASE deleted"""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', '-rBASE',
+ sbox.ospath('A/mu'), sbox.ospath('A/mu2'))
+ expected_status.add({
+ 'A/mu2' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+
+# Regression test for issue #3702: Unable to perform case-only rename
+# on windows.
+@Issue(3702)
+# APR's apr_filepath_merge() with APR_FILEPATH_TRUENAME is broken on OS X.
+@XFail(svntest.main.is_os_darwin)
+def case_only_rename(sbox):
+ """case-only rename"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ IoTa_path = sbox.ospath('IoTa')
+ B_path = sbox.ospath('A/B')
+ b_path = sbox.ospath('A/b')
+
+ # Perform a couple of case-only renames.
+ svntest.main.run_svn(None, 'move', iota_path, IoTa_path)
+ svntest.main.run_svn(None, 'move', B_path, b_path)
+
+ # Create expected status.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+
+ 'iota' : Item(status='D ', wc_rev=1, moved_to='IoTa'),
+ 'IoTa' : Item(status='A ', copied='+', wc_rev='-', moved_from='iota'),
+ 'A/B' : Item(status='D ', wc_rev='1', moved_to='A/b'),
+ 'A/B/lambda' : Item(status='D ', wc_rev='1'),
+ 'A/B/E' : Item(status='D ', wc_rev='1'),
+ 'A/B/E/alpha' : Item(status='D ', wc_rev='1'),
+ 'A/B/E/beta' : Item(status='D ', wc_rev='1'),
+ 'A/B/F' : Item(status='D ', wc_rev='1'),
+ 'A/b' : Item(status='A ', copied='+', wc_rev='-', moved_from='A/B'),
+ 'A/b/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/b/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/b/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/b/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/b/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ # Test that the necessary deletes and adds are present in status.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@XFail()
+@Issue(3899)
+def copy_and_move_conflicts(sbox):
+ """copy and move conflicts"""
+
+ # The destination of a copy or move operation should *not* be
+ # conflicted, and should contain the "mine-full" contents.
+
+ sbox.build()
+ wc = sbox.ospath
+ def url(relpath):
+ return '/'.join([sbox.repo_url, relpath])
+
+ # Create an assortment of conflicts.
+ # text A/B/E/alpha
+ # text (resolved by deleting markers) A/B/E/alpha
+ # property (dir) A/D/H
+ # property (file) A/D/H/chi
+ # tree: local delete, incoming edit A/D/gamma
+ # tree: local edit, incoming delete A/D/G
+ # tree: local add, incoming add A/Q
+ # tree: local missing, incoming edit A/B/E/sigma
+
+ ### As we improve tree-conflict handling, this test may need some
+ ### maintenance.
+
+ # Create a branch for merging.
+ run_svn(None, 'cp', url('A'), url('A2'), '-m', make_log_msg()) # r2
+ sbox.simple_update()
+
+ # This revision won't be included in the merge, producing a "local
+ # missing" tree conflict.
+ file_write(wc('A2/B/E/sigma'), "New for merge.\n")
+ sbox.simple_add('A2/B/E/sigma')
+
+ sbox.simple_commit('A2') # r3
+
+ # Make "incoming" changes in A2 for the merge
+ # incoming edits
+ file_append(wc('A2/B/E/alpha'), "Edit for merge\n")
+ file_append(wc('A2/B/E/beta'), "Edit for merge\n")
+ file_append(wc('A2/B/E/sigma'), "Edit for merge\n")
+ sbox.simple_propset('foo', '99', 'A2/D/H')
+ sbox.simple_propset('foo', '99', 'A2/D/H/chi')
+ # incoming add
+ sbox.simple_mkdir('A2/Q')
+ file_write(wc('A2/Q/zeta'), "New for merge\n")
+ sbox.simple_add('A2/Q/zeta')
+
+ sbox.simple_commit('A2') # r4
+
+ # Make some "local" changes in A before the merge.
+ # local edit
+ file_append(wc('A/B/E/alpha'), "Local edit\n")
+ file_append(wc('A/B/E/beta'), "Local edit\n")
+ sbox.simple_propset('foo', '100', 'A/D/H')
+ sbox.simple_propset('foo', '100', 'A/D/H/chi')
+ # local add
+ sbox.simple_mkdir('A/Q')
+ file_write(wc('A/Q/sigma'), "New local file\n")
+ sbox.simple_add('A/Q/sigma')
+
+ # Make some "incoming" changes in A before the update.
+ # incoming edit
+ file_append(wc('A/D/gamma'), "Edit for merge\n")
+ # incoming delete
+ sbox.simple_rm('A/D/G')
+
+ sbox.simple_commit('A') # r5
+
+ # Roll back, make local, uncommitted changes.
+ run_svn(None, 'up', '-r', 4, sbox.wc_dir)
+ # local delete
+ sbox.simple_rm('A/D/gamma')
+ # local edit
+ file_append(wc('A/D/G/rho'), "Local edit\n")
+
+ # Update to reveal the "local {delete,edit'}" tree conflicts,
+ # which we can't yet catch when merging.
+ sbox.simple_update()
+
+ # Merge just one revision to reveal more conflicts.
+ run_svn(None, 'merge', '-c', 4, url('A2'), wc('A'))
+
+ # Resolve one text conflict via marker file deletion.
+ os.remove(wc('A/B/E/beta.merge-left.r3'))
+ os.remove(wc('A/B/E/beta.working'))
+ os.remove(wc('A/B/E/beta'))
+ os.rename(wc('A/B/E/beta.merge-right.r4'), wc('A/B/E/beta'))
+
+ # Prepare for local copies and moves.
+ sbox.simple_mkdir('copy-dest')
+ sbox.simple_mkdir('move-dest')
+
+ # Copy conflict victims.
+ sbox.simple_copy('A/B/E/alpha', 'copy-dest')
+ sbox.simple_copy('A/D/H', 'copy-dest')
+ sbox.simple_copy('A/D/G', 'copy-dest')
+ sbox.simple_copy('A/Q', 'copy-dest')
+
+ # Copy directories with conflicted children.
+ sbox.simple_copy('A/B', 'copy-dest')
+ sbox.simple_copy('A/D', 'copy-dest')
+
+ # Everything copied without conflicts. The entry_status for D/G is
+ # for 1.6 compatibility (see notes/api-errata/1.7/wc003.xt).
+ expected_status = svntest.wc.State(wc('copy-dest'), {
+ '' : Item(status='A ', wc_rev=0),
+ 'B' : Item(status='A ', copied='+', wc_rev='-'),
+ 'B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E/beta' : Item(status='M ', copied='+', wc_rev='-'),
+ 'B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D' : Item(status='A ', copied='+', wc_rev='-'),
+ 'D/G' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '),
+ 'D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/G/rho' : Item(status='M ', copied='+', wc_rev='-'),
+ 'D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D/gamma' : Item(status='D ', copied='+', wc_rev='-'),
+ 'G' : Item(status='A ', copied='+', wc_rev='-'),
+ 'G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'G/rho' : Item(status='M ', copied='+', wc_rev='-'),
+ 'G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'H' : Item(status='A ', copied='+', wc_rev='-'),
+ 'H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'Q' : Item(status='A ', copied='+', wc_rev='-'),
+ 'Q/sigma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'alpha' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc('copy-dest'), expected_status)
+
+ # Only the local changes appear at the copy destinations. Note that
+ # B/E/beta had been resolved via marker-file deletion before the copy.
+ expected_disk = svntest.wc.State('', {
+ 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"
+ "Local edit\n"),
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"
+ "Edit for merge\n"),
+ 'B/F' : Item(),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'D/G/rho' : Item(contents="This is the file 'rho'.\n"
+ "Local edit\n"),
+ 'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'D/H' : Item(props={'foo':'100'}),
+ 'D/H/chi' : Item(contents="This is the file 'chi'.\n",
+ props={'foo':'100'}),
+ 'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'G/rho' : Item(contents="This is the file 'rho'.\n"
+ "Local edit\n"),
+ 'G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'H' : Item(props={'foo':'100'}),
+ 'H/chi' : Item(contents="This is the file 'chi'.\n",
+ props={'foo':'100'}),
+ 'H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'Q/sigma' : Item(contents="New local file\n"),
+ 'alpha' : Item(contents="This is the file 'alpha'.\n"
+ "Local edit\n"),
+ })
+ svntest.actions.verify_disk(wc('copy-dest'), expected_disk, True)
+
+ # Move conflict victims.
+ sbox.simple_move('A/B/E/alpha', 'move-dest')
+ sbox.simple_move('A/D/H', 'move-dest')
+ sbox.simple_move('A/D/G', 'move-dest')
+ sbox.simple_move('A/Q', 'move-dest')
+
+ # Move directories with conflicted children.
+ sbox.simple_move('A/B', 'move-dest')
+ sbox.simple_move('A/D', 'move-dest')
+
+ # Expect same status and disk content as at the copy destination, except
+ # that A/B/E/alpha, A/D/G, and A/D/H were moved away first.
+ expected_status.wc_dir = wc('move-dest')
+ expected_status.tweak('B/E/alpha',
+ 'D/H',
+ 'D/H/chi',
+ 'D/H/omega',
+ 'D/H/psi',
+ status='D ')
+ # A/D/G had been re-added from r4 due to a "local edit, incoming delete"
+ # tree conflict, so moving it away has a different effect.
+ expected_status.remove('D/G',
+ 'D/G/pi',
+ 'D/G/rho',
+ 'D/G/tau')
+ expected_status.tweak('B', moved_from='../A/B')
+ expected_status.tweak('D', moved_from='../A/D')
+ expected_status.tweak('H', moved_from='D/H')
+ expected_status.tweak('Q', moved_from='../A/Q')
+ expected_status.tweak('D/H', moved_to='H')
+ expected_status.tweak('alpha', moved_from='B/E/alpha')
+ expected_status.tweak('B/E/alpha', moved_to='alpha')
+ svntest.actions.run_and_verify_status(wc('move-dest'), expected_status)
+
+ expected_disk = svntest.wc.State('', {
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"
+ "Edit for merge\n"),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'H' : Item(props={'foo':'100'}),
+ 'H/chi' : Item(contents="This is the file 'chi'.\n",
+ props={'foo':'100'}),
+ 'H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D' : Item(),
+ 'G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'G/rho' : Item(contents="This is the file 'rho'.\n"
+ "Local edit\n"),
+ 'G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'Q/sigma' : Item(contents="New local file\n"),
+ 'alpha' : Item(contents="This is the file 'alpha'.\n"
+ "Local edit\n"),
+ })
+ svntest.actions.verify_disk(wc('move-dest'), expected_disk, True)
+
+def copy_deleted_dir(sbox):
+ "try to copy a deleted directory that exists"
+ sbox.build(read_only = True)
+
+ sbox.simple_rm('iota')
+ sbox.simple_rm('A')
+
+ # E145000 - SVN_ERR_NODE_UNKNOWN_KIND
+ # E155035 - SVN_ERR_WC_PATH_UNEXPECTED_STATUS
+ # E155010 - SVN_ERR_WC_PATH_NOT_FOUND
+
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: (E145000|E155035|E155010): ' +
+ '(Path \'.*iota\' does not exist)|' +
+ '(Deleted node .*iota\' copied)',
+ 'cp', sbox.ospath('iota'),
+ sbox.ospath('new_iota'))
+
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: (E145000|E155035|E155010): ' +
+ '(Path \'.*D\' does not exist)|' +
+ '(Deleted node .*D\' copied)',
+ 'cp', sbox.ospath('A/D'),
+ sbox.ospath('new_D'))
+
+ svntest.main.file_write(sbox.ospath('iota'), 'Not iota!')
+ os.mkdir(sbox.ospath('A'))
+ os.mkdir(sbox.ospath('A/D'))
+
+ # At one time these two invocations raised an assertion.
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: (E155035|E155010): ' +
+ '(Path \'.*iota\' does not exist)|' +
+ '(Deleted node.* .*iota\' can\'t be.*)',
+ 'cp', sbox.ospath('iota'),
+ sbox.ospath('new_iota'))
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: (E155035|E155010): ' +
+ '(Path \'.*D\' does not exist)|' +
+ '(Deleted node.* .*D\' can\'t be.*)',
+ 'cp', sbox.ospath('A/D'),
+ sbox.ospath('new_D'))
+
+@Issue(3631)
+def commit_copied_half_of_move(sbox):
+ "attempt to commit the copied part of move"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ D_path = sbox.ospath('A/D')
+
+ # iota -> A/D/iota; verify we cannot commit just A/D/iota
+ svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, D_path)
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \
+ "moved from '.*%s'" % (re.escape(sbox.ospath('A/D/iota')),
+ re.escape(iota_path))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo',
+ os.path.join(D_path, 'iota'))
+
+ # verify we cannot commit just A/D
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \
+ "moved from '.*%s'" % (re.escape(sbox.ospath('A/D/iota')),
+ re.escape(iota_path))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo', D_path)
+
+ # A/D -> A/C/D; verify we cannot commit just A/C
+ C_path = sbox.ospath('A/C')
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, C_path)
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \
+ "from '.*%s'" % (re.escape(os.path.join(C_path, "D")),
+ re.escape(D_path))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo', C_path)
+
+ # A/C/D/iota -> A/iota; verify that iota's moved-from hasn't changed
+ D_iota_path = sbox.ospath('A/C/D/iota')
+ A_iota_path = sbox.ospath('A/iota')
+ svntest.actions.run_and_verify_svn(None, [], 'mv', D_iota_path,
+ A_iota_path)
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \
+ "moved from '.*%s'" % (re.escape(A_iota_path),
+ re.escape(iota_path))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo', A_iota_path)
+
+
+@Issue(3631)
+def commit_deleted_half_of_move(sbox):
+ "attempt to commit the deleted part of move"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ A_path = sbox.ospath('A')
+ D_path = sbox.ospath('A/D')
+
+ # iota -> A/D/iota; verify we cannot commit just iota
+ svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, D_path)
+
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \
+ "to '.*%s'" % (re.escape(iota_path),
+ re.escape(os.path.join(D_path, "iota")))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo', iota_path)
+
+ # A/D -> C; verify we cannot commit just A
+ C_path = sbox.ospath('C')
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, C_path)
+ expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \
+ "to '.*%s'" % (re.escape(D_path), re.escape(C_path))
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'commit', '-m', 'foo', A_path)
+
+@Issue(4026)
+def wc_wc_copy_incomplete(sbox):
+ "wc-to-wc copy of an incomplete directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # We don't know what order the copy will do children of A/B so
+ # remove files so that only subdirs remain then all children can be
+ # marked incomplete.
+ sbox.simple_rm('A/B/lambda')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # We don't know whether copy will do E or F first, so make both
+ # incomplete
+ svntest.actions.set_incomplete(sbox.ospath('A/B/E'), 2)
+ svntest.actions.set_incomplete(sbox.ospath('A/B/F'), 2)
+
+ # Copy fails with no changes to wc
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E155035: Cannot handle status',
+ 'copy',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/E2'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/B/lambda')
+ expected_status.tweak('A/B/E', 'A/B/F', status='! ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Copy fails part way through
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E155035: Cannot handle status',
+ 'copy',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B2'))
+
+ expected_status.add({
+ 'A/B2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B2/E' : Item(status='! ', wc_rev='-'),
+ 'A/B2/F' : Item(status='! ', wc_rev='-'),
+ })
+ ### Can't get this to work as copied status of E and F in 1.6
+ ### entries tree doesn't match 1.7 status tree
+ #svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Commit preserves incomplete status
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2': Item(verb='Adding'),
+ })
+ expected_status.tweak('A/B2',
+ status=' ', copied=None, wc_rev=3)
+ expected_status.tweak('A/B2/E', 'A/B2/F',
+ status='! ', copied=None, wc_rev=3)
+ ### E and F are status '!' but the test code ignores them?
+ expected_status.remove('A/B2/E', 'A/B2/F')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ expected_status.add({
+ 'A/B2/E' : Item(status='! ', wc_rev=3),
+ 'A/B2/F' : Item(status='! ', wc_rev=3),
+ })
+
+ # Update makes things complete
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2/E' : Item(status='A '),
+ 'A/B2/E/alpha' : Item(status='A '),
+ 'A/B2/E/beta' : Item(status='A '),
+ 'A/B2/F' : Item(status='A '),
+ })
+ expected_status.tweak(wc_rev=3, status=' ')
+ expected_status.add({
+ 'A/B2/E/alpha' : Item(status=' ', wc_rev=3),
+ 'A/B2/E/beta' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status)
+
+def three_nested_moves(sbox):
+ "three nested moves"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B2'))
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B2/E'),
+ sbox.ospath('A/B2/E2'))
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ sbox.ospath('A/B2/E2/alpha'),
+ sbox.ospath('A/B2/E2/alpha2'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B2' : Item(status=' ', wc_rev=2),
+ 'A/B2/E2' : Item(status=' ', wc_rev=2),
+ 'A/B2/E2/alpha2' : Item(status=' ', wc_rev=2),
+ 'A/B2/E2/beta' : Item(status=' ', wc_rev=2),
+ 'A/B2/F' : Item(status=' ', wc_rev=2),
+ 'A/B2/lambda' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(verb='Deleting'),
+ 'A/B2' : Item(verb='Adding'),
+ 'A/B2/E' : Item(verb='Deleting'),
+ 'A/B2/E2' : Item(verb='Adding'),
+ 'A/B2/E2/alpha' : Item(verb='Deleting'),
+ 'A/B2/E2/alpha2' : Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+def copy_to_unversioned_parent(sbox):
+ "copy to unversioned parent"
+
+ sbox.build()
+
+ # This succeeds
+ #svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents',
+ # sbox.ospath('A/B'),
+ # sbox.ospath('New/B2'))
+
+ # And this currently fails with The node '.*Unversioned' was not found,
+ # while it should succeed or returns some error that a GUI client can use.
+ os.mkdir(sbox.ospath('Unversioned'))
+ svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents',
+ sbox.ospath('A/B'),
+ sbox.ospath('Unversioned/B2'))
+
+def copy_text_conflict(sbox):
+ "copy with a text conflict should not copy markers"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ sbox.simple_commit()
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ sbox.simple_update(revision='1')
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A'),
+ sbox.ospath('A_copied'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='C ')
+ expected_status.add({
+ # The markers in A
+ 'A/mu.mine' : Item(status='? '),
+ 'A/mu.r1' : Item(status='? '),
+ 'A/mu.r2' : Item(status='? '),
+ # And what is copied (without markers)
+ 'A_copied' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A_copied/C' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B/E/alpha': Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/G' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/H/omega': Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/D/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_copied/mu' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+@Issue(2843)
+def copy_over_excluded(sbox):
+ "copy on top of excluded should give error"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '--set-depth', 'exclude',
+ sbox.ospath('A/D'))
+
+ expected_error = "svn: E155000: Path '.*D' exists.*excluded.*"
+
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp',
+ sbox.repo_url + '/A/C',
+ sbox.ospath('A/D'))
+
+ expected_error = "svn: E155000: Path '.*D' exists.*excluded.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp',
+ sbox.ospath('A/C'),
+ sbox.ospath('A/D'))
+
+def copy_relocate(sbox):
+ "copy from a relocated location"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ tmp_dir, url = sbox.add_repo_path('relocated')
+
+ shutil.copytree(sbox.repo_dir, tmp_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'relocate', url, wc_dir)
+
+ copiedpath = sbox.ospath('AA')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', url + '/A', copiedpath)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'info', copiedpath)
+
+def ext_wc_copy_deleted(sbox):
+ "copy deleted tree from separate wc"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ wc2_dir = sbox.add_wc_path('2')
+
+ sbox.simple_rm('A/B')
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude',
+ sbox.ospath('A/D'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', sbox.repo_url, wc2_dir, '-r', 1)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', sbox.path('A'), os.path.join(wc2_dir,'AA'))
+
+ expected_output = expected_output = svntest.wc.State(wc2_dir, {
+ 'AA' : Item(verb='Adding'),
+ 'AA/B' : Item(verb='Deleting'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc2_dir,
+ expected_output, None)
+
+def copy_subtree_deleted(sbox):
+ "copy to-be-deleted subtree"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc2_dir = sbox.add_wc_path('2')
+ svntest.actions.duplicate_dir(wc_dir, wc2_dir)
+
+ sbox.simple_rm('A/B')
+
+ # Commit copy within a working copy
+ sbox.simple_copy('A', 'AA')
+ expected_output = expected_output = svntest.wc.State(wc_dir, {
+ 'AA' : Item(verb='Adding'),
+ 'AA/B' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, None, [],
+ sbox.ospath('AA'))
+
+ # Commit copy between working copies
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', sbox.path('A'),
+ os.path.join(wc2_dir,'AA2'))
+ expected_output = expected_output = svntest.wc.State(wc2_dir, {
+ 'AA2' : Item(verb='Adding'),
+ 'AA2/B' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc2_dir,
+ expected_output, None)
+
+def resurrect_at_root(sbox):
+ "resurrect directory at root"
+
+ sbox.build(create_wc=False)
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A',
+ '-m', '')
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.repo_url + '/A/D/H@1',
+ sbox.repo_url + '/A', '-m', '')
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_copy_and_move_files,
+ receive_copy_in_update,
+ resurrect_deleted_dir,
+ no_copy_overwrites,
+ no_wc_copy_overwrites,
+ copy_modify_commit,
+ copy_files_with_properties,
+ copy_delete_commit,
+ mv_and_revert_directory,
+ copy_preserve_executable_bit,
+ wc_to_repos,
+ repos_to_wc,
+ copy_to_root,
+ url_copy_parent_into_child,
+ wc_copy_parent_into_child,
+ resurrect_deleted_file,
+ diff_repos_to_wc_copy,
+ repos_to_wc_copy_eol_keywords,
+ revision_kinds_local_source,
+ copy_over_missing_file,
+ repos_to_wc_1634,
+ double_uri_escaping_1814,
+ wc_to_wc_copy_between_different_repos,
+ wc_to_wc_copy_deleted,
+ url_to_non_existent_url_path,
+ non_existent_url_to_url,
+ old_dir_url_to_url,
+ wc_copy_dir_to_itself,
+ mixed_wc_to_url,
+ wc_copy_replacement,
+ wc_copy_replace_with_props,
+ repos_to_wc_copy_replacement,
+ repos_to_wc_copy_replace_with_props,
+ delete_replaced_file,
+ mv_unversioned_file,
+ force_move,
+ copy_deleted_dir_into_prefix,
+ copy_copied_file_and_dir,
+ move_copied_file_and_dir,
+ move_moved_file_and_dir,
+ move_file_within_moved_dir,
+ move_file_out_of_moved_dir,
+ move_dir_within_moved_dir,
+ move_dir_out_of_moved_dir,
+ move_file_back_and_forth,
+ move_dir_back_and_forth,
+ copy_move_added_paths,
+ copy_added_paths_with_props,
+ copy_added_paths_to_URL,
+ move_to_relative_paths,
+ move_from_relative_paths,
+ copy_to_relative_paths,
+ copy_from_relative_paths,
+ move_multiple_wc,
+ copy_multiple_wc,
+ move_multiple_repo,
+ copy_multiple_repo,
+ copy_multiple_repo_wc,
+ copy_multiple_wc_repo,
+ copy_peg_rev_local_files,
+ copy_peg_rev_local_dirs,
+ copy_peg_rev_url,
+ old_dir_wc_to_wc,
+ copy_make_parents_wc_wc,
+ copy_make_parents_repo_wc,
+ copy_make_parents_wc_repo,
+ copy_make_parents_repo_repo,
+ URI_encoded_repos_to_wc,
+ allow_unversioned_parent_for_copy_src,
+ unneeded_parents,
+ double_parents_with_url,
+ copy_into_missing_dir,
+ find_copyfrom_information_upstairs,
+ path_move_and_copy_between_wcs_2475,
+ path_copy_in_repo_2475,
+ commit_copy_depth_empty,
+ copy_below_copy,
+ move_below_move,
+ reverse_merge_move,
+ nonrecursive_commit_of_copy,
+ copy_added_dir_with_copy,
+ copy_broken_symlink,
+ move_dir_containing_move,
+ copy_dir_with_space,
+ changed_data_should_match_checkout,
+ changed_dir_data_should_match_checkout,
+ move_added_nodes,
+ copy_over_deleted_dir,
+ mixed_rev_copy_del,
+ copy_delete_delete,
+ copy_delete_revert,
+ delete_replace_delete,
+ copy_repos_over_deleted_same_kind,
+ copy_repos_over_deleted_other_kind,
+ copy_wc_over_deleted_same_kind,
+ copy_wc_over_deleted_other_kind,
+ move_wc_and_repo_dir_to_itself,
+ copy_wc_url_with_absent,
+ copy_url_shortcut,
+ deleted_file_with_case_clash,
+ copy_base_of_deleted,
+ case_only_rename,
+ copy_and_move_conflicts,
+ copy_deleted_dir,
+ commit_copied_half_of_move,
+ commit_deleted_half_of_move,
+ wc_wc_copy_incomplete,
+ three_nested_moves,
+ copy_to_unversioned_parent,
+ copy_text_conflict,
+ copy_over_excluded,
+ copy_relocate,
+ ext_wc_copy_deleted,
+ copy_subtree_deleted,
+ resurrect_at_root,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/dav-mirror-autocheck.sh b/subversion/tests/cmdline/dav-mirror-autocheck.sh
new file mode 100755
index 0000000..298a8ba
--- /dev/null
+++ b/subversion/tests/cmdline/dav-mirror-autocheck.sh
@@ -0,0 +1,500 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# Script to automate testing of an svnsync master/slave
+# configuration. Commits to the slave should write through
+# to the master, and the master's post-commit hook svnsync's
+# to the slave. The test should be able to throw all kinds
+# of svn operations at one or the other, and master/slave
+# verified as identical in the end.
+#
+# Master / slave setup is achieved in a single httpd process
+# using virtual hosts bound to different addresses on the
+# loopback network (127.0.0.1, 127.0.0.2) for slave and
+# master, respectively.
+#
+# The set of changes sent through the system is currently
+# just the test case for issue 2939, using svnmucc
+# http://subversion.tigris.org/issues/show_bug.cgi?id=2939
+# But of course, any svn traffic liable to break over
+# mirroring would be a good addition.
+#
+# Most of the httpd setup was lifted from davautocheck.sh.
+# The common boilerplate snippets to setup/start/stop httpd
+# between the two could be factored out and shared.
+#
+
+SCRIPTDIR=$(dirname $0)
+SCRIPT=$(basename $0)
+
+trap stop_httpd_and_die SIGHUP SIGTERM SIGINT
+
+# Ensure the server uses a known locale.
+LC_ALL=C
+export LC_ALL
+
+function stop_httpd_and_die() {
+ [ -e "$HTTPD_PID" ] && kill $(cat "$HTTPD_PID")
+ exit 1
+}
+
+function say() {
+ echo "$SCRIPT: $*"
+}
+
+function fail() {
+ say "FAIL: " $*
+ stop_httpd_and_die
+}
+
+function get_loadmodule_config() {
+ local SO="$($APXS -q LIBEXECDIR)/$1.so"
+
+ # shared object module?
+ if [ -r "$SO" ]; then
+ local NM=$(echo "$1" | sed 's|mod_\(.*\)|\1_module|')
+ echo "LoadModule $NM \"$SO\"" &&
+ return
+ fi
+
+ # maybe it's built-in?
+ "$HTTPD" -l | grep -q "$1\\.c" && return
+
+ return 1
+}
+
+
+# Check apxs's SBINDIR and BINDIR for given program names
+function get_prog_name() {
+ for prog in $*
+ do
+ for dir in $($APXS -q SBINDIR) $($APXS -q BINDIR)
+ do
+ if [ -e "$dir/$prog" ]; then
+ echo "$dir/$prog" && return
+ fi
+ done
+ done
+
+ return 1
+}
+
+# splat out httpd config
+function setup_config() {
+
+ say "setting up config: " $1
+cat > "$1" <<__EOF__
+$LOAD_MOD_LOG_CONFIG
+$LOAD_MOD_MIME
+$LOAD_MOD_UNIXD
+$LOAD_MOD_DAV
+LoadModule dav_svn_module "$MOD_DAV_SVN"
+$LOAD_MOD_AUTH
+$LOAD_MOD_AUTHN_CORE
+$LOAD_MOD_AUTHN_FILE
+$LOAD_MOD_PROXY
+$LOAD_MOD_PROXY_HTTP
+$LOAD_MOD_AUTHZ_CORE
+$LOAD_MOD_AUTHZ_USER
+$LOAD_MOD_AUTHZ_HOST
+
+LockFile lock
+User $(id -un)
+Group $(id -gn)
+Listen ${TEST_PORT}
+ServerName localhost
+PidFile "${HTTPD_ROOT}/pid"
+LogFormat "%h %l %u %t \"%r\" %>s %b" common
+CustomLog "${HTTPD_ROOT}/access_log" common
+ErrorLog "${HTTPD_ROOT}/error_log"
+LogLevel Debug
+ServerRoot "${HTTPD_ROOT}"
+DocumentRoot "${HTTPD_ROOT}"
+CoreDumpDirectory "${HTTPD_ROOT}"
+TypesConfig "${HTTPD_ROOT}/mime.types"
+StartServers 4
+MaxRequestsPerChild 0
+<IfModule worker.c>
+ ThreadsPerChild 8
+</IfModule>
+MaxClients 16
+HostNameLookups Off
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" format
+CustomLog "${HTTPD_ROOT}/req" format
+CustomLog "${HTTPD_ROOT}/ops" "%t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e" env=SVN-ACTION
+
+<Directory />
+ AllowOverride none
+</Directory>
+<Directory "${HTTPD_ROOT}">
+ AllowOverride none
+ #Require all granted
+</Directory>
+
+# slave
+<VirtualHost ${SLAVE_HOST}>
+ ServerName ${SLAVE_HOST}
+ CustomLog "${HTTPD_ROOT}/slave_access_log" common
+ ErrorLog "${HTTPD_ROOT}/slave_error_log"
+# slave 'normal' location
+ <Location "/${SLAVE_LOCATION}">
+ DAV svn
+ SVNPath "${SLAVE_REPOS}"
+ SVNMasterURI "${MASTER_URL}"
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile ${HTTPD_ROOT}/users
+ Require valid-user
+ </Location>
+# slave 'sync' location
+ <Location "/${SYNC_LOCATION}">
+ DAV svn
+ SVNPath "${SLAVE_REPOS}"
+ AuthType Basic
+ AuthName "Slave Sync Repository"
+ AuthUserFile ${HTTPD_ROOT}/users
+ Require valid-user
+</Location>
+</VirtualHost>
+
+# master
+<VirtualHost ${MASTER_HOST}>
+ ServerName ${MASTER_HOST}>
+ CustomLog "${HTTPD_ROOT}/master_access_log" common
+ ErrorLog "${HTTPD_ROOT}/master_error_log"
+ <Location "/${MASTER_LOCATION}">
+ DAV svn
+ SVNPath "${MASTER_REPOS}"
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile ${HTTPD_ROOT}/users
+ Require valid-user
+ </Location>
+</VirtualHost>
+__EOF__
+}
+
+function usage() {
+ echo "usage: $SCRIPT <test-work-directory>" 1>&2
+ echo " e.g. \"$SCRIPT /tmp/test-work\"" 1>&2
+ echo
+ echo " " '<test-work-directory>' must not exist, \
+ I will not clobber it for you 1>&2
+ exit 1
+}
+### Start execution here ###
+
+SCRIPT=$(basename $0)
+
+if [ $# -ne 1 ] ; then
+ usage
+fi
+
+
+# httpd ServerRoot, all test and runtime artifacts below here
+# verify that this doesn't already exist - don't clobber
+HTTPD_ROOT=$1
+
+if [ -e "$HTTPD_ROOT" ] ; then
+ say "ERROR: test work directory $HTTPD_ROOT already exists, please remove" 1>&2
+ usage
+fi
+
+#set -e
+
+# Don't assume sbin is in the PATH.
+PATH="$PATH:/usr/sbin:/usr/local/sbin"
+
+# Pick up value from environment or PATH (also try apxs2 - for Debian)
+[ ${APXS:+set} ] \
+ || APXS=$(which apxs) \
+ || APXS=$(which apxs2) \
+ || fail "neither apxs or apxs2 found - required to run $SCRIPT"
+
+[ -x $APXS ] || fail "Can't execute apxs executable $APXS"
+
+say APXS: $APXS
+
+if [ -x subversion/svn/svn ]; then
+ ABS_BUILDDIR=$(pwd)
+elif [ -x $SCRIPTDIR/../../svn/svn ]; then
+ pushd $SCRIPTDIR/../../../ >/dev/null
+ ABS_BUILDDIR=$(pwd)
+ popd >/dev/null
+else
+ fail "Run this script from the root of Subversion's build tree!"
+fi
+
+# find all our needed executables, in WC or via apxs
+httpd="$($APXS -q PROGNAME)"
+HTTPD=$(get_prog_name $httpd) || fail "HTTPD not found"
+HTPASSWD=$(get_prog_name htpasswd htpasswd2) \
+ || fail "Could not find htpasswd or htpasswd2"
+SVN=$ABS_BUILDDIR/subversion/svn/svn
+SVNADMIN=$ABS_BUILDDIR/subversion/svnadmin/svnadmin
+SVNSYNC=$ABS_BUILDDIR/subversion/svnsync/svnsync
+SVNMUCC=${SVNMUCC:-$ABS_BUILDDIR/tools/client-side/svnmucc/svnmucc}
+SVNLOOK=$ABS_BUILDDIR/subversion/svnlook/svnlook
+
+[ -x $HTTPD ] || fail "HTTPD '$HTTPD' not executable"
+[ -x $HTPASSWD ] \
+ || fail "HTPASSWD '$HTPASSWD' not executable"
+[ -x $SVN ] || fail "SVN $SVN not built"
+[ -x $SVNADMIN ] || fail "SVNADMIN $SVNADMIN not built"
+[ -x $SVNSYNC ] || fail "SVNSYNC $SVNSYNC not built"
+[ -x $SVNLOOK ] || fail "SVNLOOK $SVNLOOK not built"
+[ -x $SVNMUCC ] \
+ || fail SVNMUCC $SVNMUCC executable not built, needed for test. \
+ \'cd $ABS_BUILDDIR\; make svnmucc\' to fix.
+
+say HTTPD: $HTTPD
+say SVN: $SVN
+say SVNADMIN: $SVNADMIN
+say SVNSYNC: $SVNSYNC
+say SVNLOOK: $SVNLOOK
+say SVNMUCC: $SVNMUCC
+
+LOAD_MOD_DAV=$(get_loadmodule_config mod_dav) \
+ || fail "DAV module not found"
+
+LOAD_MOD_LOG_CONFIG=$(get_loadmodule_config mod_log_config) \
+ || fail "log_config module not found"
+
+# proxy needed for svnsync mirroring
+LOAD_MOD_PROXY=$(get_loadmodule_config mod_proxy) \
+ || fail "proxy module not found"
+LOAD_MOD_PROXY_HTTP=$(get_loadmodule_config mod_proxy_http) \
+ || fail "proxy_http module not found"
+
+# needed for TypesConfig
+LOAD_MOD_MIME=$(get_loadmodule_config mod_mime) \
+ || fail "MIME module not found"
+
+# needed for Auth*, Require, etc. directives
+LOAD_MOD_AUTH=$(get_loadmodule_config mod_auth) \
+ || {
+say "Monolithic Auth module not found. Assuming we run against Apache 2.1+"
+LOAD_MOD_AUTH="$(get_loadmodule_config mod_auth_basic)" \
+ || fail "Auth_Basic module not found."
+LOAD_MOD_ACCESS_COMPAT="$(get_loadmodule_config mod_access_compat)" \
+ && {
+say "Found modules for Apache 2.3.0+"
+LOAD_MOD_AUTHN_CORE="$(get_loadmodule_config mod_authn_core)" \
+ || fail "Authn_Core module not found."
+LOAD_MOD_AUTHZ_CORE="$(get_loadmodule_config mod_authz_core)" \
+ || fail "Authz_Core module not found."
+LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \
+ || fail "Authz_Host module not found."
+LOAD_MOD_UNIXD=$(get_loadmodule_config mod_unixd) \
+ || fail "UnixD module not found"
+}
+LOAD_MOD_AUTHN_FILE="$(get_loadmodule_config mod_authn_file)" \
+ || fail "Authn_File module not found."
+LOAD_MOD_AUTHZ_USER="$(get_loadmodule_config mod_authz_user)" \
+ || fail "Authz_User module not found."
+}
+
+if [ ${MODULE_PATH:+set} ]; then
+ MOD_DAV_SVN="$MODULE_PATH/mod_dav_svn.so"
+ MOD_AUTHZ_SVN="$MODULE_PATH/mod_authz_svn.so"
+else
+ MOD_DAV_SVN="$ABS_BUILDDIR/subversion/mod_dav_svn/.libs/mod_dav_svn.so"
+ MOD_AUTHZ_SVN="$ABS_BUILDDIR/subversion/mod_authz_svn/.libs/mod_authz_svn.so"
+fi
+
+[ -r "$MOD_DAV_SVN" ] \
+ || fail "dav_svn_module not found, please use '--enable-shared --enable-dso --with-apxs' with your 'configure' script"
+[ -r "$MOD_AUTHZ_SVN" ] \
+ || fail "authz_svn_module not found, please use '--enable-shared --enable-dso --with-apxs' with your 'configure' script"
+
+export LD_LIBRARY_PATH="$ABS_BUILDDIR/subversion/libsvn_ra_neon/.libs:$ABS_BUILDDIR/subversion/libsvn_ra_local/.libs:$ABS_BUILDDIR/subversion/libsvn_ra_svn/.libs:$LD_LIBRARY_PATH"
+
+MASTER_REPOS="${MASTER_REPOS:-"$HTTPD_ROOT/master_repos"}"
+SLAVE_REPOS="${SLAVE_REPOS:-"$HTTPD_ROOT/slave_repos"}"
+
+MASTER_HOST=127.0.0.2
+SLAVE_HOST=127.0.0.1
+#TEST_PORT=11111
+TEST_PORT=$(($RANDOM+1024))
+
+# location directive elements for master,slave,sync
+# tests currently work if master==slave,fail if different
+# ** Should different locations for each work?
+#MASTER_LOCATION="master"
+#SLAVE_LOCATION="slave"
+MASTER_LOCATION="repo"
+SLAVE_LOCATION="repo"
+SYNC_LOCATION="sync"
+
+MASTER_URL="http://${MASTER_HOST}:${TEST_PORT}/${MASTER_LOCATION}"
+SLAVE_URL="http://${SLAVE_HOST}:${TEST_PORT}/${SLAVE_LOCATION}"
+SYNC_URL="http://${SLAVE_HOST}:${TEST_PORT}/${SYNC_LOCATION}"
+
+BASE_URL="$SLAVE_URL"
+
+# setup server and repositories
+say "setting up in ${HTTPD_ROOT}:"
+mkdir -p $HTTPD_ROOT || fail "cannot mkdir $HTTPD_ROOT"
+HTTPD_CONFIG=$HTTPD_ROOT/cfg
+setup_config $HTTPD_CONFIG
+touch $HTTPD_ROOT/mime.types
+HTTPD_USERS="$HTTPD_ROOT/users"
+$HTPASSWD -bc $HTTPD_USERS jrandom rayjandom
+$HTPASSWD -b $HTTPD_USERS jconstant rayjandom
+$HTPASSWD -b $HTTPD_USERS scm scm
+$HTPASSWD -b $HTTPD_USERS svnsync svnsync
+$SVNADMIN create "$MASTER_REPOS" || fail "create master repos failed"
+$SVNADMIN create "$SLAVE_REPOS" || fail "create slave repos failed"
+# dup them
+$SVNADMIN dump "$MASTER_REPOS" | $SVNADMIN load "$SLAVE_REPOS" \
+ || fail "duplicate repositories failed"
+# make sure uuid's match
+[ `cat "$SLAVE_REPOS/db/uuid"` = `cat "$MASTER_REPOS/db/uuid"` ] \
+ || fail "master/slave uuid mismatch"
+# setup hooks:
+# slave allows revprop changes
+# master syncs changes to slave
+echo "#!/bin/sh" > "$SLAVE_REPOS/hooks/pre-revprop-change"
+echo "#!/bin/sh" > "$MASTER_REPOS/hooks/post-revprop-change"
+echo "#!/bin/sh" > "$MASTER_REPOS/hooks/post-commit"
+echo "$SVNSYNC --non-interactive sync '$SYNC_URL' --username=svnsync --password=svnsync" \
+ >> "$MASTER_REPOS/hooks/post-revprop-change"
+echo "$SVNSYNC --non-interactive sync '$SYNC_URL' --username=svnsync --password=svnsync" \
+ >> "$MASTER_REPOS/hooks/post-commit"
+
+chmod 0755 "$SLAVE_REPOS/hooks/pre-revprop-change"
+chmod 0755 "$MASTER_REPOS/hooks/post-revprop-change"
+chmod 0755 "$MASTER_REPOS/hooks/post-commit"
+
+say "created master and slave repositories"
+
+# test config
+$HTTPD -f $HTTPD_CONFIG -t || fail "httpd config failure in $HTTPD_CONFIG"
+
+# start httpd
+echo -n "${SCRIPT}: starting httpd: "
+$HTTPD -f $HTTPD_CONFIG -k start || fail "httpd start failed"
+echo "."
+say initializing svnsync to $SYNC_URL
+HTTPD_PID=$HTTPD_ROOT/pid
+$SVNSYNC initialize --non-interactive "$SYNC_URL" "$MASTER_URL" \
+ --username=svnsync --password=svnsync \
+ || fail "svnsync initialize failed"
+
+# OK, let's start testing! Commit changes to slave, expect
+# them to proxy through to the master, and then
+# svnsync back to the slave
+#
+# reproducible test case from:
+# http://subversion.tigris.org/issues/show_bug.cgi?id=2939
+#
+BASE_URL="$SLAVE_URL"
+say running svnmucc test to $BASE_URL
+svnmucc="$SVNMUCC --non-interactive --username jrandom --password rayjandom -mm"
+
+$svnmucc mkdir "$BASE_URL/trunk" mkdir "$BASE_URL/trunk/dir1" mkdir "$BASE_URL/trunk/dir1/dir2"
+$svnmucc rm "$BASE_URL/trunk/dir1/dir2"
+$svnmucc cp 2 "$BASE_URL/trunk" "$BASE_URL/branch" put /dev/null "$BASE_URL/branch/dir1/dir2"
+$svnmucc rm "$BASE_URL/branch" cp 2 "$BASE_URL/trunk" "$BASE_URL/branch" put /dev/null "$BASE_URL/branch/dir1/dir2"
+
+say "svn log on $BASE_URL : "
+$SVN --username jrandom --password rayjandom log -vq "$BASE_URL"
+
+
+# verify result: should be at rev 4 in both repos
+# FIXME: do more rigorous verification here
+MASTER_HEAD=`$SVNLOOK youngest "$MASTER_REPOS"`
+SLAVE_HEAD=`$SVNLOOK youngest "$SLAVE_REPOS"`
+
+say checking consistency of master, slave repositories:
+
+if [ "$MASTER_HEAD" != "4" ] || [ "$SLAVE_HEAD" != "4" ] ;
+then
+ say FAIL: master, slave are at rev $MASTER_HEAD, $SLAVE_HEAD, not 4
+ say server may be started/stopped manually with:
+ say " $HTTPD -f $HTTPD_CONFIG -k start|stop"
+ fail charred remains in $HTTPD_ROOT for your perusal
+fi
+
+say "PASS: master, slave are both at r4, as expected"
+
+# The following test case is for the regression issue triggered by r917523.
+# The revision r917523 do some url encodings to the paths and uris which are
+# not url-encoded. But there is one additional url-encoding of an uri which is
+# already encoded. With this extra encoding, committing a path to slave which
+# has space in it fails. Please see this thread
+# http://svn.haxx.se/dev/archive-2011-03/0641.shtml for more info.
+
+say "Test case for regression issue triggered by r917523"
+
+$svnmucc cp 2 "$BASE_URL/trunk" "$BASE_URL/branch new"
+$svnmucc put /dev/null "$BASE_URL/branch new/file" \
+--config-option servers:global:http-library=neon
+RETVAL=$?
+
+if [ $RETVAL -eq 0 ] ; then
+ say "PASS: committing a path which has space in it passes"
+else
+ say "FAIL: committing a path which has space in it fails as there are extra
+ url-encodings happening in server side"
+fi
+
+# Test case for commit to out-dated(though target path is up to date) slave.
+# See issue #3860 for details.
+say "Test case for out-dated slave commit"
+
+svn="$SVN --non-interactive --username=jrandom --password=rayjandom"
+# Make a working copy of the slave.
+$svn checkout $SLAVE_URL $HTTPD_ROOT/wc
+cd $HTTPD_ROOT/wc
+# Add a new file named newfile and commit it.
+touch branch/newfile
+$svn add branch/newfile
+$svn commit -mm
+
+say "De-activating post-commit hook on $MASTER_REPOS to make $SLAVE_REPOS go out of sync"
+mv "$MASTER_REPOS/hooks/post-commit" "$MASTER_REPOS/hooks/post-commit_"
+
+echo "Change made to file in branch" > $HTTPD_ROOT/wc/branch/newfile
+$svn ci -m "Commit from slave"
+
+MASTER_HEAD=`$SVNLOOK youngest "$MASTER_REPOS"`
+SLAVE_HEAD=`$SVNLOOK youngest "$SLAVE_REPOS"`
+say "Now the slave is at r$SLAVE_HEAD and master is at r$MASTER_HEAD."
+
+# Now any other commit operation will fail with an out-of-date error
+
+$svn cp -m "Creating a branch" ^/trunk ^/branch/newbranch --config-option "servers:global:http-library=neon"
+RETVAL=$?
+
+if [ $RETVAL -eq 0 ]; then
+ say "PASS: Commits succeed even with an out-of-date slave"
+else
+ say "FAIL: Commits fail with an out-of-date slave"
+fi
+say "Some house-keeping..."
+say "Re-activating the post-commit hook on the master repo: $MASTER_REPOS."
+mv "$MASTER_REPOS/hooks/post-commit_" "$MASTER_REPOS/hooks/post-commit"
+say "Syncing slave with master."
+$SVNSYNC --non-interactive sync "$SYNC_URL" --username=svnsync --password=svnsync
+# shut it down
+echo -n "${SCRIPT}: stopping httpd: "
+$HTTPD -f $HTTPD_CONFIG -k stop
+echo "."
+exit 0
diff --git a/subversion/tests/cmdline/davautocheck.sh b/subversion/tests/cmdline/davautocheck.sh
new file mode 100755
index 0000000..064feb1
--- /dev/null
+++ b/subversion/tests/cmdline/davautocheck.sh
@@ -0,0 +1,791 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# -*- mode: shell-script; -*-
+# $Id$
+
+# This script simplifies preparation of environment for Subversion client
+# communicating with a server via DAV protocol. The prerequisites of such
+# testing are:
+# - Subversion built using --enable-shared --with-apxs options,
+# - Working Apache 2 HTTPD Server with the apxs program reachable through
+# PATH or specified via the APXS Makefile variable or environment variable,
+# - Modules dav_module and log_config_module compiled as DSO or built into
+# Apache HTTPD Server executable.
+# The basic intension of this script is to be able to perform "make check"
+# operation over DAV without any configuration efforts whatsoever, provided
+# that conditions above are met.
+#
+# The script will find Apache and all necessary modules including mod_dav_svn,
+# create a temporary directory in subversion/tests/cmdline, create
+# Apache 2 configuration file in this directory, start Apache 2 on a random
+# port number higher than 1024, and execute Subversion command-line client
+# test suites against this instance of HTTPD. Every vital configuration
+# parameter is checked before the tests start. The script will ask questions
+# about browsing Apache error log (default is "no") and about deleting
+# temporary directory (default "yes") and pause for 32 seconds before
+# proceeding with the default. HTTPD access log is also created in the
+# temporary directory.
+#
+# Run this script without parameters to execute the full battery of tests:
+# subversion/tests/cmdline/davautocheck.sh
+# Run this script with the name of a test suite to run this suite:
+# subversion/tests/cmdline/davautocheck.sh basic
+# Run this script with the test suite name and test number to execute just this
+# test:
+# subversion/tests/cmdline/davautocheck.sh basic 4
+# This script can also be invoked via "make davautocheck".
+#
+# If the temporary directory is not deleted, it can be reused for further
+# manual DAV protocol interoperation testing. HTTPD must be started by
+# specifying configuration file on the command line:
+# httpd -f subversion/tests/cmdline/<httpd-...>/cfg
+#
+# If you want to run this against an *installed* HTTPD (for example, to test
+# one version's client against another version's server) specify both APXS
+# *and* MODULE_PATH for the other server:
+#
+# APXS=/opt/svn/1.4.x/bin/apxs MODULE_PATH=/opt/svn/1.4.x/modules \
+# subversion/tests/cmdline/davautocheck.sh
+#
+# Other environment variables that are interpreted by this script:
+#
+# make davautocheck CACHE_REVPROPS=1 # sets SVNCacheRevProps on
+#
+# make davautocheck BLOCK_READ=1 # sets SVNBlockRead on
+#
+# make davautocheck USE_SSL=1 # run over https
+#
+# make davautocheck USE_HTTPV1=1 # sets SVNAdvertiseV2Protocol off
+#
+# make davautocheck APACHE_MPM=event # specifies the 2.4 MPM
+#
+# make davautocheck SVN_PATH_AUTHZ=short_circuit # SVNPathAuthz short_circuit
+#
+# Passing --no-tests as argv[1] will have the script start a server
+# but not run any tests. Passing --gdb or --lldb will do the same, and in
+# addition spawn gdb/lldb in the foreground attached to the running server.
+
+PYTHON=${PYTHON:-python}
+
+SCRIPTDIR=$(dirname $0)
+SCRIPT=$(basename $0)
+STOPSCRIPT=$SCRIPTDIR/.$SCRIPT.stop
+
+trap stop_httpd_and_die HUP TERM INT
+
+# Ensure the server uses a known locale.
+LC_ALL=C
+export LC_ALL
+
+stop_httpd_and_die() {
+ [ -e "$HTTPD_PID" ] && kill $(cat "$HTTPD_PID")
+ echo "HTTPD stopped."
+ exit 1
+}
+
+say() {
+ echo "$SCRIPT: $*"
+}
+
+fail() {
+ say $*
+ stop_httpd_and_die
+}
+
+query() {
+ printf "%s" "$SCRIPT: $1 (y/n)? [$2] "
+ if [ -n "$BASH_VERSION" ]; then
+ read -n 1 -t 32
+ else
+ #
+ prog="
+import select as s
+import sys
+import tty, termios
+tty.setcbreak(sys.stdin.fileno(), termios.TCSANOW)
+if s.select([sys.stdin.fileno()], [], [], 32)[0]:
+ sys.stdout.write(sys.stdin.read(1))
+"
+ stty_state=`stty -g`
+ REPLY=`$PYTHON -u -c "$prog" "$@"`
+ stty $stty_state
+ fi
+ echo
+ [ "${REPLY:-$2}" = 'y' ]
+}
+
+get_loadmodule_config() {
+ local SO="$($APXS -q LIBEXECDIR)/$1.so"
+
+ # shared object module?
+ if [ -r "$SO" ]; then
+ local NM=$(echo "$1" | sed 's|mod_\(.*\)|\1_module|')
+ echo "LoadModule $NM \"$SO\"" &&
+ return
+ fi
+
+ # maybe it's built-in?
+ "$HTTPD" -l | grep "$1\\.c" >/dev/null && return
+
+ return 1
+}
+
+# Check apxs's SBINDIR and BINDIR for given program names
+get_prog_name() {
+ for prog in $*
+ do
+ for dir in $($APXS -q SBINDIR) $($APXS -q BINDIR)
+ do
+ if [ -e "$dir/$prog" ]; then
+ echo "$dir/$prog" && return
+ fi
+ done
+ done
+
+ return 1
+}
+
+# Don't assume sbin is in the PATH.
+# This is used to locate apxs when the script is invoked manually; when
+# invoked by 'make davautocheck' the APXS environment variable is set.
+PATH="$PATH:/usr/sbin:/usr/local/sbin"
+
+# Find the source and build directories. The build dir can be found if it is
+# the current working dir or the source dir.
+ABS_SRCDIR=$(cd ${SCRIPTDIR}/../../../; pwd)
+if [ -x subversion/svn/svn ]; then
+ ABS_BUILDDIR=$(pwd)
+elif [ -x $ABS_SRCDIR/subversion/svn/svn ]; then
+ ABS_BUILDDIR=$ABS_SRCDIR
+else
+ fail "Run this script from the root of Subversion's build tree!"
+fi
+
+# Remove any proxy environmental variables that affect wget or curl.
+# We don't need a proxy to connect to localhost and having the proxy
+# environmental variables set breaks the Apache configuration file
+# test below, since wget or curl will ask the proxy to connect to
+# localhost.
+unset PROXY
+unset http_proxy
+unset HTTPS_PROXY
+
+# Pick up value from environment or PATH (also try apxs2 - for Debian)
+if [ ${APXS:+set} ]; then
+ :
+elif APXS=$(grep '^APXS' $ABS_BUILDDIR/Makefile | sed 's/^APXS *= *//') && \
+ [ -n "$APXS" ]; then
+ :
+elif APXS=$(which apxs); then
+ :
+elif APXS=$(which apxs2); then
+ :
+else
+ fail "neither apxs or apxs2 found - required to run davautocheck"
+fi
+
+[ -x $APXS ] || fail "Can't execute apxs executable $APXS"
+
+say "Using '$APXS'..."
+
+# Pick up $USE_HTTPV1
+ADVERTISE_V2_PROTOCOL=on
+if [ ${USE_HTTPV1:+set} ]; then
+ ADVERTISE_V2_PROTOCOL=off
+fi
+
+# Pick up $SVN_PATH_AUTHZ
+SVN_PATH_AUTHZ_LINE=""
+if [ ${SVN_PATH_AUTHZ:+set} ]; then
+ SVN_PATH_AUTHZ_LINE="SVNPathAuthz ${SVN_PATH_AUTHZ}"
+fi
+
+CACHE_REVPROPS_SETTING=off
+if [ ${CACHE_REVPROPS:+set} ]; then
+ CACHE_REVPROPS_SETTING=on
+fi
+
+BLOCK_READ_SETTING=off
+if [ ${BLOCK_READ:+set} ]; then
+ BLOCK_READ_SETTING=on
+fi
+
+if [ ${MODULE_PATH:+set} ]; then
+ MOD_DAV_SVN="$MODULE_PATH/mod_dav_svn.so"
+ MOD_AUTHZ_SVN="$MODULE_PATH/mod_authz_svn.so"
+ MOD_DONTDOTHAT="$MODULE_PATH/mod_dontdothat.so"
+else
+ MOD_DAV_SVN="$ABS_BUILDDIR/subversion/mod_dav_svn/.libs/mod_dav_svn.so"
+ MOD_AUTHZ_SVN="$ABS_BUILDDIR/subversion/mod_authz_svn/.libs/mod_authz_svn.so"
+ MOD_DONTDOTHAT="$ABS_BUILDDIR/tools/server-side/mod_dontdothat/.libs/mod_dontdothat.so"
+fi
+
+[ -r "$MOD_DAV_SVN" ] \
+ || fail "dav_svn_module not found, please use '--enable-shared --with-apxs' with your 'configure' script"
+[ -r "$MOD_AUTHZ_SVN" ] \
+ || fail "authz_svn_module not found, please use '--enable-shared --with-apxs' with your 'configure' script"
+[ -r "$MOD_DONTDOTHAT" ] \
+ || fail "dontdothat_module not found, please use '--enable-shared --with-apxs' with your 'configure' script"
+
+for d in "$ABS_BUILDDIR"/subversion/*/.libs; do
+ if [ -z "$BUILDDIR_LIBRARY_PATH" ]; then
+ BUILDDIR_LIBRARY_PATH="$d"
+ else
+ BUILDDIR_LIBRARY_PATH="$BUILDDIR_LIBRARY_PATH:$d"
+ fi
+done
+
+case "`uname`" in
+ Darwin*)
+ DYLD_LIBRARY_PATH="$BUILDDIR_LIBRARY_PATH:$DYLD_LIBRARY_PATH"
+ export DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH="$BUILDDIR_LIBRARY_PATH:$LD_LIBRARY_PATH"
+ export LD_LIBRARY_PATH
+ ;;
+esac
+
+httpd="$($APXS -q PROGNAME)"
+HTTPD=$(get_prog_name $httpd) || fail "HTTPD '$HTTPD' not found"
+[ -x $HTTPD ] || fail "HTTPD '$HTTPD' not executable"
+
+"$HTTPD" -v 1>/dev/null 2>&1 \
+ || fail "HTTPD '$HTTPD' doesn't start properly"
+
+HTPASSWD=$(get_prog_name htpasswd htpasswd2) \
+ || fail "Could not find htpasswd or htpasswd2"
+[ -x $HTPASSWD ] \
+ || fail "HTPASSWD '$HTPASSWD' not executable"
+say "Using '$HTPASSWD'..."
+
+LOAD_MOD_DAV=$(get_loadmodule_config mod_dav) \
+ || fail "DAV module not found"
+
+LOAD_MOD_LOG_CONFIG=$(get_loadmodule_config mod_log_config) \
+ || fail "log_config module not found"
+
+# needed for TypesConfig
+LOAD_MOD_MIME=$(get_loadmodule_config mod_mime) \
+ || fail "MIME module not found"
+
+LOAD_MOD_ALIAS=$(get_loadmodule_config mod_alias) \
+ || fail "ALIAS module not found"
+
+# needed for Auth*, Require, etc. directives
+LOAD_MOD_AUTH=$(get_loadmodule_config mod_auth) \
+ || {
+say "Monolithic Auth module not found. Assuming we run against Apache 2.1+"
+LOAD_MOD_AUTH="$(get_loadmodule_config mod_auth_basic)" \
+ || fail "Auth_Basic module not found."
+LOAD_MOD_ACCESS_COMPAT="$(get_loadmodule_config mod_access_compat)" \
+ && {
+say "Found modules for Apache 2.3.0+"
+LOAD_MOD_AUTHN_CORE="$(get_loadmodule_config mod_authn_core)" \
+ || fail "Authn_Core module not found."
+LOAD_MOD_AUTHZ_CORE="$(get_loadmodule_config mod_authz_core)" \
+ || fail "Authz_Core module not found."
+LOAD_MOD_UNIXD=$(get_loadmodule_config mod_unixd) \
+ || fail "UnixD module not found"
+}
+LOAD_MOD_AUTHN_FILE="$(get_loadmodule_config mod_authn_file)" \
+ || fail "Authn_File module not found."
+LOAD_MOD_AUTHZ_USER="$(get_loadmodule_config mod_authz_user)" \
+ || fail "Authz_User module not found."
+LOAD_MOD_AUTHZ_GROUPFILE="$(get_loadmodule_config mod_authz_groupfile)" \
+ || fail "Authz_GroupFile module not found."
+LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \
+ || fail "Authz_Host module not found."
+}
+if [ ${APACHE_MPM:+set} ]; then
+ LOAD_MOD_MPM=$(get_loadmodule_config mod_mpm_$APACHE_MPM) \
+ || fail "MPM module not found"
+fi
+if [ x"$APACHE_MPM" = x"event" ] && [ x"$FS_TYPE" = x"bdb" ]; then
+ fail "FS_TYPE=bdb and APACHE_MPM=event are mutually exclusive (see SVN-4157)"
+fi
+if [ ${USE_SSL:+set} ]; then
+ LOAD_MOD_SSL=$(get_loadmodule_config mod_ssl) \
+ || fail "SSL module not found"
+fi
+
+# Stop any previous instances, os we can re-use the port.
+if [ -x $STOPSCRIPT ]; then $STOPSCRIPT ; sleep 1; fi
+
+ss > /dev/null 2>&1 || netstat > /dev/null 2>&1 || fail "unable to find ss or netstat required to find a free port"
+
+HTTPD_PORT=3691
+while \
+ (ss -ltn sport = :$HTTPD_PORT 2>&1 | grep :$HTTPD_PORT > /dev/null ) \
+ || \
+ (netstat -an 2>&1 | grep $HTTPD_PORT | grep 'LISTEN' > /dev/null ) \
+ do
+ HTTPD_PORT=$(( HTTPD_PORT + 1 ))
+ if [ $HTTPD_PORT -eq 65536 ]; then
+ # Most likely the loop condition is true regardless of $HTTPD_PORT
+ fail "ss/netstat claim you have no free ports for httpd to listen on."
+ fi
+done
+HTTPD_ROOT="$ABS_BUILDDIR/subversion/tests/cmdline/httpd-$(date '+%Y%m%d-%H%M%S')"
+HTTPD_CFG="$HTTPD_ROOT/cfg"
+HTTPD_PID="$HTTPD_ROOT/pid"
+HTTPD_ACCESS_LOG="$HTTPD_ROOT/access_log"
+HTTPD_ERROR_LOG="$HTTPD_ROOT/error_log"
+HTTPD_MIME_TYPES="$HTTPD_ROOT/mime.types"
+HTTPD_DONTDOTHAT="$HTTPD_ROOT/dontdothat"
+if [ -z "$BASE_URL" ]; then
+ BASE_URL="http://localhost:$HTTPD_PORT"
+else
+ # Specify the public name of the host when using a proxy on another host, the
+ # port number will be appended.
+ BASE_URL="$BASE_URL:$HTTPD_PORT"
+fi
+HTTPD_USERS="$HTTPD_ROOT/users"
+HTTPD_GROUPS="$HTTPD_ROOT/groups"
+
+mkdir "$HTTPD_ROOT" \
+ || fail "couldn't create temporary directory '$HTTPD_ROOT'"
+
+say "Using directory '$HTTPD_ROOT'..."
+
+if [ ${USE_SSL:+set} ]; then
+ say "Setting up SSL"
+ BASE_URL="https://localhost:$HTTPD_PORT"
+# A self-signed certifcate for localhost that expires after 2039-12-30
+# generated via:
+# openssl req -new -x509 -nodes -days 10000 -out cert.pem -keyout cert-key.pem
+# This is embedded, rather than generated on-the-fly, to avoid consuming
+# system entropy.
+ SSL_CERTIFICATE_FILE="$HTTPD_ROOT/cert.pem"
+cat > "$SSL_CERTIFICATE_FILE" <<__EOF__
+-----BEGIN CERTIFICATE-----
+MIIC7zCCAligAwIBAgIJALP1pLDiJRtuMA0GCSqGSIb3DQEBBQUAMFkxCzAJBgNV
+BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX
+aWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xMjA4MTMxNDA5
+MDRaFw0zOTEyMzAxNDA5MDRaMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21l
+LVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNV
+BAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA9kBx6trU
+WQnFNDrW+dU159zEbSWGts3ScITIMTLE4EclMh50SP2BnJDnetkNO8JhPXOm4KZi
+XdJugWAk0NmpawhAk3xVxHh5N8wwyPk3IMx7+Yu+sgcsd0Dj9YK1fIazgTUp/Dsk
+VGJvqu+kgNYxPvzWi/OsBLW/ZNp+spTzoAcCAwEAAaOBvjCBuzAdBgNVHQ4EFgQU
+f7OIDackB7zzPm10aiQgq9WzRdQwgYsGA1UdIwSBgzCBgIAUf7OIDackB7zzPm10
+aiQgq9WzRdShXaRbMFkxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRl
+MSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQxEjAQBgNVBAMTCWxv
+Y2FsaG9zdIIJALP1pLDiJRtuMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+gYEAD2rdgeVYCSEeseEfFCTNte//rDsT3coO9SbGOpmlCJ5TfbmXjs2YaQZH7NST
+mla3hw2Bf9ppTUw1ZWvOVgD3mpxAbYNBA/4HaxmK4GlS2kZsKiMr0xgcVGjmEIW/
+HS9q+PHwStDKNSyYc1+m+bUmeRGUKLgC4kuBF7JDK8A2WYc=
+-----END CERTIFICATE-----
+__EOF__
+ SSL_CERTIFICATE_KEY_FILE="$HTTPD_ROOT/cert-key.pem"
+cat > "$SSL_CERTIFICATE_KEY_FILE" <<__EOF__
+-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQD2QHHq2tRZCcU0Otb51TXn3MRtJYa2zdJwhMgxMsTgRyUyHnRI
+/YGckOd62Q07wmE9c6bgpmJd0m6BYCTQ2alrCECTfFXEeHk3zDDI+TcgzHv5i76y
+Byx3QOP1grV8hrOBNSn8OyRUYm+q76SA1jE+/NaL86wEtb9k2n6ylPOgBwIDAQAB
+AoGBAJBzhV+rNl10qcXVrj2noJN+oYsVNE0Pt55hhb22dl7J3TvlOXmHm/xn1CHw
+KR8hC0GtEfs+Hv3CbyhdabtJs2L7QxO5VjgLO+onBmAOw1iPF9DjbMcAlFJnoOWI
+HYwANOWGp2jRxL5cHUfrBVCgUISen3VUZEnQkr4n/Zty/QEBAkEA/XIZ3oh5MiFA
+o4IaFaFQpBc6K/e6fnM0217scaPvfZiYS1k9Fx/UQTAGsxJOnhnsi04WgHPMS5wB
+RP4/PiIGIQJBAPi7yIKKS4E8hWBZL+79TI8Zm2uehGCB8V6m9k7e3I82To9Tgcow
+qZHsAPtN50fg85I94L3REg2FSQlDlzbMkScCQQC2pweLv/EQNrS94eJomkRirban
+vzYxMVfzjRp737iWXGXNT7feNXsjq7f4UAZGnMpDrvg6hLnD999WWKE9ZwnhAkBl
+c9p9/EB9zxyrxtT5StGuUIiHJdnirz2vGLTASMB3nXP/m9UFjkGr5jIkTos2Uzel
+/50qbxtI7oNyxuHnlRrjAkASfQ51kaBcABYRiacesQi94W/kE3MkgHWkCXNb6//u
+gxk/ezALZ8neJzJudzRkX3auGwH1ne9vCM1ED5dkM54H
+-----END RSA PRIVATE KEY-----
+__EOF__
+ SSL_MAKE_VAR="SSL_CERT=$SSL_CERTIFICATE_FILE"
+ SSL_TEST_ARG="--ssl-cert $SSL_CERTIFICATE_FILE"
+fi
+
+say "Adding users for lock authentication"
+$HTPASSWD -bc $HTTPD_USERS jrandom rayjandom
+$HTPASSWD -b $HTTPD_USERS jconstant rayjandom
+$HTPASSWD -b $HTTPD_USERS __dumpster__ __loadster__
+$HTPASSWD -b $HTTPD_USERS JRANDOM rayjandom
+$HTPASSWD -b $HTTPD_USERS JCONSTANT rayjandom
+
+say "Adding groups for mod_authz_svn tests"
+cat > "$HTTPD_GROUPS" <<__EOF__
+random: jrandom
+constant: jconstant
+__EOF__
+
+touch $HTTPD_MIME_TYPES
+
+cat > "$HTTPD_DONTDOTHAT" <<__EOF__
+[recursive-actions]
+/ = deny
+
+__EOF__
+
+cat > "$HTTPD_CFG" <<__EOF__
+$LOAD_MOD_MPM
+$LOAD_MOD_SSL
+$LOAD_MOD_LOG_CONFIG
+$LOAD_MOD_MIME
+$LOAD_MOD_ALIAS
+$LOAD_MOD_UNIXD
+$LOAD_MOD_DAV
+LoadModule dav_svn_module "$MOD_DAV_SVN"
+$LOAD_MOD_AUTH
+$LOAD_MOD_AUTHN_CORE
+$LOAD_MOD_AUTHN_FILE
+$LOAD_MOD_AUTHZ_CORE
+$LOAD_MOD_AUTHZ_USER
+$LOAD_MOD_AUTHZ_GROUPFILE
+$LOAD_MOD_AUTHZ_HOST
+$LOAD_MOD_ACCESS_COMPAT
+LoadModule authz_svn_module "$MOD_AUTHZ_SVN"
+LoadModule dontdothat_module "$MOD_DONTDOTHAT"
+
+__EOF__
+
+if "$HTTPD" -v | grep '/2\.[012]' >/dev/null; then
+ cat >> "$HTTPD_CFG" <<__EOF__
+LockFile lock
+User $(id -un)
+Group $(id -gn)
+__EOF__
+else
+HTTPD_LOCK="$HTTPD_ROOT/lock"
+mkdir "$HTTPD_LOCK" \
+ || fail "couldn't create lock directory '$HTTPD_LOCK'"
+ cat >> "$HTTPD_CFG" <<__EOF__
+# worker and prefork MUST have a mpm-accept lockfile in 2.3.0+
+<IfModule worker.c>
+ Mutex "file:$HTTPD_LOCK" mpm-accept
+</IfModule>
+<IfModule prefork.c>
+ Mutex "file:$HTTPD_LOCK" mpm-accept
+</IfModule>
+__EOF__
+fi
+
+if [ ${USE_SSL:+set} ]; then
+cat >> "$HTTPD_CFG" <<__EOF__
+SSLEngine on
+SSLCertificateFile $SSL_CERTIFICATE_FILE
+SSLCertificateKeyFile $SSL_CERTIFICATE_KEY_FILE
+__EOF__
+fi
+
+cat >> "$HTTPD_CFG" <<__EOF__
+Listen $HTTPD_PORT
+ServerName localhost
+PidFile "$HTTPD_PID"
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%f\"" common
+CustomLog "$HTTPD_ACCESS_LOG" common
+ErrorLog "$HTTPD_ERROR_LOG"
+LogLevel debug
+ServerRoot "$HTTPD_ROOT"
+DocumentRoot "$HTTPD_ROOT"
+ScoreBoardFile "$HTTPD_ROOT/run"
+CoreDumpDirectory "$HTTPD_ROOT"
+TypesConfig "$HTTPD_MIME_TYPES"
+StartServers 4
+MaxRequestsPerChild 0
+<IfModule worker.c>
+ ThreadsPerChild 8
+</IfModule>
+<IfModule event.c>
+ ThreadsPerChild 8
+</IfModule>
+MaxClients 32
+HostNameLookups Off
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" format
+CustomLog "$HTTPD_ROOT/req" format
+CustomLog "$HTTPD_ROOT/ops" "%t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e" env=SVN-ACTION
+
+<Directory />
+ AllowOverride none
+</Directory>
+<Directory "$HTTPD_ROOT">
+ AllowOverride none
+ #Require all granted
+</Directory>
+
+<Location /svn-test-work/repositories>
+__EOF__
+location_common() {
+cat >> "$HTTPD_CFG" <<__EOF__
+ DAV svn
+ AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
+ AuthType Basic
+ AuthName "Subversion Repository"
+ AuthUserFile $HTTPD_USERS
+ SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
+ SVNCacheRevProps ${CACHE_REVPROPS_SETTING}
+ SVNListParentPath On
+ SVNBlockRead ${BLOCK_READ_SETTING}
+__EOF__
+}
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/repositories"
+ Require valid-user
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /ddt-test-work/repositories>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/repositories"
+ Require valid-user
+ ${SVN_PATH_AUTHZ_LINE}
+ DontDoThatConfigFile "$HTTPD_DONTDOTHAT"
+</Location>
+<Location /svn-test-work/local_tmp/repos>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp/repos"
+ Require valid-user
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /authz-test-work/anon>
+ DAV svn
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz"
+ SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL}
+ SVNCacheRevProps ${CACHE_REVPROPS_SETTING}
+ SVNListParentPath On
+ # This may seem unnecessary but granting access to everyone here is necessary
+ # to exercise a bug with httpd 2.3.x+. The "Require all granted" syntax is
+ # new to 2.3.x+ which we can detect with the mod_authz_core.c module
+ # signature. Use the "Allow from all" syntax with older versions for symmetry.
+ <IfModule mod_authz_core.c>
+ Require all granted
+ </IfModule>
+ <IfModule !mod_authz_core.c>
+ Allow from all
+ </IfModule>
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /authz-test-work/mixed>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ Require valid-user
+ Satisfy Any
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /authz-test-work/mixed-noauthwhenanon>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ Require valid-user
+ AuthzSVNNoAuthWhenAnonymousAllowed On
+ SVNPathAuthz On
+</Location>
+<Location /authz-test-work/authn>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ Require valid-user
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /authz-test-work/authn-anonoff>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ Require valid-user
+ AuthzSVNAnonymous Off
+ SVNPathAuthz On
+</Location>
+<Location /authz-test-work/authn-lcuser>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ Require valid-user
+ AuthzForceUsernameCase Lower
+ ${SVN_PATH_AUTHZ_LINE}
+</Location>
+<Location /authz-test-work/authn-group>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ AuthGroupFile $HTTPD_GROUPS
+ Require group random
+ AuthzSVNAuthoritative Off
+ SVNPathAuthz On
+</Location>
+<IfModule mod_authz_core.c>
+ <Location /authz-test-work/sallrany>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ AuthzSendForbiddenOnFailure On
+ Satisfy All
+ <RequireAny>
+ Require valid-user
+ Require expr req('ALLOW') == '1'
+ </RequireAny>
+ ${SVN_PATH_AUTHZ_LINE}
+ </Location>
+ <Location /authz-test-work/sallrall>
+__EOF__
+location_common
+cat >> "$HTTPD_CFG" <<__EOF__
+ SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp"
+ AuthzSendForbiddenOnFailure On
+ Satisfy All
+ <RequireAll>
+ Require valid-user
+ Require expr req('ALLOW') == '1'
+ </RequireAll>
+ ${SVN_PATH_AUTHZ_LINE}
+ </Location>
+</IfModule>
+RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)\$ /svn-test-work/repositories/\$1
+RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)\$ /svn-test-work/repositories/\$1
+__EOF__
+
+
+# Our configure script extracts the HTTPD version from
+# headers. However, that may not be the same as the runtime version;
+# an example of this discrepancy occurs on OSX 1.9.5, where the
+# headers report 2.2.26 but the server reports 2.2.29. Since our tests
+# use the version to interpret test case results, use the actual
+# runtime version here to avoid spurious test failures.
+HTTPD_VERSION=$("$HTTPD" -V -f $HTTPD_CFG | grep '^Server version:' | sed 's|^.*/\([0-9]*\.[0-9]*\.[0-9]*\).*$|\1|')
+
+START="$HTTPD -f $HTTPD_CFG"
+printf \
+'#!/bin/sh
+if [ -d "%s" ]; then
+ printf "Stopping previous HTTPD instance..."
+ if %s -k stop; then
+ # httpd had no output; echo a newline.
+ echo ""
+ elif [ -s "%s" ]; then
+ # httpd would have printed an error terminated by a newline.
+ kill -9 "`cat %s`"
+ fi
+fi
+' >$STOPSCRIPT "$HTTPD_ROOT" "$START" "$HTTPD_PID" "$HTTPD_PID"
+chmod +x $STOPSCRIPT
+
+$START -t > /dev/null \
+ || fail "Configuration file didn't pass the check, most likely modules couldn't be loaded"
+
+# need to pause for some time to let HTTPD start
+$START &
+sleep 2
+
+say "HTTPD $HTTPD_VERSION started and listening on '$BASE_URL'..."
+#query "Ready" "y"
+
+# Perform a trivial validation of our httpd configuration by
+# downloading a file and comparing it to the original copy.
+### The file at the path "/cfg" can't be retrieved from Apache 2.3+.
+### We get a 500 ISE, with the following error in the log from httpd's
+### server/request.c:ap_process_request_internal():
+### [Wed Feb 22 13:06:55 2006] [crit] [client 127.0.0.1] configuration error: couldn't check user: /cfg
+HTTP_FETCH=wget
+HTTP_FETCH_OUTPUT="--no-check-certificate -q -O"
+type wget > /dev/null 2>&1
+if [ $? -ne 0 ]; then
+ type curl > /dev/null 2>&1
+ if [ $? -ne 0 ]; then
+ fail "Neither curl or wget found."
+ fi
+ HTTP_FETCH=curl
+ HTTP_FETCH_OUTPUT='-s -k -o'
+fi
+$HTTP_FETCH $HTTP_FETCH_OUTPUT "$HTTPD_CFG-copy" "$BASE_URL/cfg"
+diff "$HTTPD_CFG" "$HTTPD_CFG-copy" > /dev/null \
+ || fail "HTTPD doesn't operate according to the generated configuration"
+rm "$HTTPD_CFG-copy"
+
+say "HTTPD is good"
+
+if [ $# -eq 1 ] && [ "x$1" = 'x--no-tests' ]; then
+ echo "http://localhost:$HTTPD_PORT/svn-test-work/repositories"
+ exit
+fi
+
+if [ $# -eq 1 ] && [ "x$1" = 'x--lldb' ]; then
+ echo "http://localhost:$HTTPD_PORT/svn-test-work/repositories"
+ $STOPSCRIPT && lldb --one-line=run -- $START -X
+ exit
+fi
+
+if [ $# -eq 1 ] && [ "x$1" = 'x--gdb' ]; then
+ echo "http://localhost:$HTTPD_PORT/svn-test-work/repositories"
+ $STOPSCRIPT && gdb -silent -ex r -args $START -X
+ exit
+fi
+
+if type time > /dev/null ; then TIME_CMD() { time "$@"; } ; else TIME_CMD() { "$@"; } ; fi
+
+MAKE=${MAKE:-make}
+
+say "starting the tests..."
+
+CLIENT_CMD="$ABS_BUILDDIR/subversion/svn/svn"
+
+if [ "$HTTP_LIBRARY" = "" ]; then
+ say "Using default dav library"
+ "$CLIENT_CMD" --version | egrep '^[*] ra_(neon|serf)' >/dev/null \
+ || fail "Subversion client couldn't find and/or load ra_dav library"
+else
+ say "Requesting dav library '$HTTP_LIBRARY'"
+ "$CLIENT_CMD" --version | egrep "^[*] ra_$HTTP_LIBRARY" >/dev/null \
+ || fail "Subversion client couldn't find and/or load ra_dav library '$HTTP_LIBRARY'"
+fi
+
+if [ $# = 0 ]; then
+ TIME_CMD "$MAKE" check "BASE_URL=$BASE_URL" "HTTPD_VERSION=$HTTPD_VERSION" $SSL_MAKE_VAR
+ r=$?
+else
+ (cd "$ABS_BUILDDIR/subversion/tests/cmdline/"
+ TEST="$1"
+ shift
+ TIME_CMD "$ABS_SRCDIR/subversion/tests/cmdline/${TEST}_tests.py" "--url=$BASE_URL" "--httpd-version=$HTTPD_VERSION" $SSL_TEST_ARG "$@")
+ r=$?
+fi
+
+say "Finished testing..."
+
+kill $(cat "$HTTPD_PID")
+
+query 'Browse server access log' n \
+ && less "$HTTPD_ACCESS_LOG"
+
+query 'Browse server error log' n \
+ && less "$HTTPD_ERROR_LOG"
+
+query 'Delete HTTPD root directory' y \
+ && rm -fr "$HTTPD_ROOT/"
+
+say 'Done'
+
+exit $r
diff --git a/subversion/tests/cmdline/depth_tests.py b/subversion/tests/cmdline/depth_tests.py
new file mode 100755
index 0000000..1277594
--- /dev/null
+++ b/subversion/tests/cmdline/depth_tests.py
@@ -0,0 +1,3038 @@
+#!/usr/bin/env python
+#
+# depth_tests.py: Testing that operations work as expected at
+# various depths (depth-empty, depth-files,
+# depth-immediates, depth-infinity).
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import re
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+
+# For errors setting up the depthy working copies.
+class DepthSetupError(Exception):
+ def __init__ (self, args=None):
+ self.args = args
+
+def set_up_depthy_working_copies(sbox, empty=False, files=False,
+ immediates=False, infinity=False):
+ """Set up up to four working copies, at various depths. At least
+ one of depths EMPTY, FILES, IMMEDIATES, or INFINITY must be passed
+ as True. The corresponding working copy paths are returned in a
+ four-element tuple in that order, with element value of None for
+ working copies that were not created. If all args are False, raise
+ DepthSetupError."""
+
+ if not (infinity or empty or files or immediates):
+ raise DepthSetupError("At least one working copy depth must be passed.")
+
+ wc = None
+ if infinity:
+ sbox.build()
+ wc = sbox.wc_dir
+ else:
+ sbox.build(create_wc = False)
+ sbox.add_test_path(sbox.wc_dir, True)
+
+ wc_empty = None
+ if empty:
+ wc_empty = sbox.wc_dir + '-depth-empty'
+ sbox.add_test_path(wc_empty, True)
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ "co", "--depth", "empty", sbox.repo_url, wc_empty)
+
+ wc_files = None
+ if files:
+ wc_files = sbox.wc_dir + '-depth-files'
+ sbox.add_test_path(wc_files, True)
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ "co", "--depth", "files", sbox.repo_url, wc_files)
+
+ wc_immediates = None
+ if immediates:
+ wc_immediates = sbox.wc_dir + '-depth-immediates'
+ sbox.add_test_path(wc_immediates, True)
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ "co", "--depth", "immediates",
+ sbox.repo_url, wc_immediates)
+
+ return wc_empty, wc_files, wc_immediates, wc
+
+def verify_depth(msg, depth, path="."):
+ """Verifies that PATH has depth DEPTH. MSG is the failure message."""
+ if depth == "infinity":
+ # Check for absence of depth line.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None,
+ [], "info", path)
+ for line in out:
+ if line.startswith("Depth:"):
+ raise svntest.Failure(msg)
+ else:
+ expected_stdout = svntest.verify.ExpectedOutput("Depth: %s\n" % depth,
+ match_all=False)
+ svntest.actions.run_and_verify_svn(
+ expected_stdout, [], "info", path)
+
+#----------------------------------------------------------------------
+# Ensure that 'checkout --depth=empty' results in a depth-empty working copy.
+def depth_empty_checkout(sbox):
+ "depth-empty checkout"
+
+ wc_empty, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox, empty=True)
+
+ if os.path.exists(os.path.join(wc_empty, "iota")):
+ raise svntest.Failure("depth-empty checkout created file 'iota'")
+
+ if os.path.exists(os.path.join(wc_empty, "A")):
+ raise svntest.Failure("depth-empty checkout created subdir 'A'")
+
+ verify_depth("Expected depth empty for top of WC, got some other depth",
+ "empty", wc_empty)
+
+
+# Helper for two test functions.
+def depth_files_same_as_nonrecursive(sbox, opt):
+ """Run a depth-files or non-recursive checkout, depending on whether
+ passed '-N' or '--depth=files' for OPT. The two should get the same
+ result, hence this helper containing the common code between the
+ two tests."""
+
+ # This duplicates some code from set_up_depthy_working_copies(), but
+ # that's because it's abstracting out a different axis.
+
+ sbox.build(create_wc = False, read_only = True)
+ if os.path.exists(sbox.wc_dir):
+ svntest.main.safe_rmtree(sbox.wc_dir)
+
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ "co", opt, sbox.repo_url, sbox.wc_dir)
+
+ # Should create a depth-files top directory, so both iota and A
+ # should exist, and A should be empty and depth-empty.
+
+ if not os.path.exists(sbox.ospath('iota')):
+ raise svntest.Failure("'checkout %s' failed to create file 'iota'" % opt)
+
+ if os.path.exists(sbox.ospath('A')):
+ raise svntest.Failure("'checkout %s' unexpectedly created subdir 'A'" % opt)
+
+ verify_depth("Expected depth files for top of WC, got some other depth",
+ "files", sbox.wc_dir)
+
+
+def depth_files_checkout(sbox):
+ "depth-files checkout"
+ depth_files_same_as_nonrecursive(sbox, "--depth=files")
+
+
+def nonrecursive_checkout(sbox):
+ "non-recursive checkout equals depth-files"
+ depth_files_same_as_nonrecursive(sbox, "-N")
+
+
+#----------------------------------------------------------------------
+def depth_empty_update_bypass_single_file(sbox):
+ "update depth-empty wc shouldn't receive file mod"
+
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ iota_path = os.path.join(wc, 'iota')
+ svntest.main.file_append(iota_path, "new text\n")
+
+ # Commit in the "other" wc.
+ expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-empty wc, expecting not to receive the change to iota.
+ expected_output = svntest.wc.State(wc_empty, { })
+ expected_disk = svntest.wc.State('', { })
+ expected_status = svntest.wc.State(wc_empty, { '' : svntest.wc.StateItem() })
+ expected_status.tweak(contents=None, status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # And the wc should still be depth-empty.
+ verify_depth(None, "empty", wc_empty)
+
+ # Even if we explicitly ask for a depth-infinity update, we still shouldn't
+ # get the change to iota.
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "--depth=infinity", wc_empty)
+
+ # And the wc should still be depth-empty.
+ verify_depth(None, "empty", wc_empty)
+
+
+#----------------------------------------------------------------------
+def depth_immediates_get_top_file_mod_only(sbox):
+ "update depth-immediates wc gets top file mod only"
+
+ ign_a, ign_b, wc_immediates, wc \
+ = set_up_depthy_working_copies(sbox, immediates=True, infinity=True)
+
+ iota_path = os.path.join(wc, 'iota')
+ svntest.main.file_append(iota_path, "new text in iota\n")
+ mu_path = os.path.join(wc, 'A', 'mu')
+ svntest.main.file_append(mu_path, "new text in mu\n")
+
+ # Commit in the "other" wc.
+ expected_output = svntest.wc.State(wc,
+ { 'iota' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ expected_status.tweak('A/mu', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-immediates wc, expecting to receive only the
+ # change to iota.
+ expected_output = svntest.wc.State(wc_immediates,
+ { 'iota' : Item(status='U ') })
+ expected_disk = svntest.wc.State('', { })
+ expected_disk.add(\
+ {'iota' : Item(contents="This is the file 'iota'.\nnew text in iota\n"),
+ 'A' : Item(contents=None) } )
+ expected_status = svntest.wc.State(wc_immediates,
+ { '' : svntest.wc.StateItem() })
+ expected_status.tweak(contents=None, status=' ', wc_rev=2)
+ expected_status.add(\
+ {'iota' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status=' ', wc_rev=2) } )
+ svntest.actions.run_and_verify_update(wc_immediates,
+ expected_output,
+ expected_disk,
+ expected_status)
+ verify_depth(None, "immediates", wc_immediates)
+
+
+#----------------------------------------------------------------------
+def depth_empty_commit(sbox):
+ "commit a file from a depth-empty working copy"
+ # Bring iota into a depth-empty working copy, then commit a change to it.
+ wc_empty, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox,
+ empty=True)
+
+ # Form the working path of iota
+ wc_empty_iota = os.path.join(wc_empty, 'iota')
+
+ # Update 'iota' in the depth-empty working copy and modify it
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_empty_iota)
+ svntest.main.file_write(wc_empty_iota, "iota modified")
+
+ # Commit the modified changes from a depth-empty working copy
+ expected_output = svntest.wc.State(wc_empty, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.wc.State(wc_empty, { })
+ expected_status.add({
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_empty,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+def depth_empty_with_file(sbox):
+ "act on a file in a depth-empty working copy"
+ # Run 'svn up iota' to bring iota permanently into the working copy.
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ iota_path = os.path.join(wc_empty, 'iota')
+ if os.path.exists(iota_path):
+ raise svntest.Failure("'%s' exists when it shouldn't" % iota_path)
+
+ ### I'd love to do this using the recommended {expected_output,
+ ### expected_status, expected_disk} method here, but after twenty
+ ### minutes of trying to figure out how, I decided to compromise.
+
+ # Update iota by name, expecting to receive it.
+ svntest.actions.run_and_verify_svn(None, [], 'up', iota_path)
+
+ # Test that we did receive it.
+ if not os.path.exists(iota_path):
+ raise svntest.Failure("'%s' doesn't exist when it should" % iota_path)
+
+ # Commit a change to iota in the "other" wc.
+ other_iota_path = os.path.join(wc, 'iota')
+ svntest.main.file_append(other_iota_path, "new text\n")
+ expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Delete iota in the "other" wc.
+ other_iota_path = os.path.join(wc, 'iota')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', other_iota_path)
+ expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Deleting'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.remove('iota')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-empty wc just a little, expecting to receive
+ # the change in iota.
+ expected_output = svntest.wc.State(\
+ wc_empty, { 'iota' : Item(status='U ') })
+ expected_disk = svntest.wc.State(\
+ '', { 'iota' : Item(contents="This is the file 'iota'.\nnew text\n") })
+ expected_status = svntest.wc.State(wc_empty,
+ { '' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),})
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r2', wc_empty)
+
+ # Update the depth-empty wc all the way, expecting to receive the deletion
+ # of iota.
+ expected_output = svntest.wc.State(\
+ wc_empty, { 'iota' : Item(status='D ') })
+ expected_disk = svntest.wc.State('', { })
+ expected_status = svntest.wc.State(\
+ wc_empty, { '' : Item(status=' ', wc_rev=3) })
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+def depth_empty_with_dir(sbox):
+ "bring a dir into a depth-empty working copy"
+ # Run 'svn up A' to bring A permanently into the working copy.
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ A_path = os.path.join(wc_empty, 'A')
+ other_mu_path = os.path.join(wc, 'A', 'mu')
+
+ # We expect A to be added at depth infinity, so a normal 'svn up A'
+ # should be sufficient to add all descendants.
+ expected_output = svntest.wc.State(wc_empty, {
+ 'A' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A ')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 1)
+ expected_status.remove('iota')
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ A_path)
+
+ # Commit a change to A/mu in the "other" wc.
+ svntest.main.file_write(other_mu_path, "new text\n")
+ expected_output = svntest.wc.State(\
+ wc, { 'A/mu' : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('A/mu', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update "A" by name in wc_empty, expect to receive the change to A/mu.
+ expected_output = svntest.wc.State(wc_empty, { 'A/mu' : Item(status='U ') })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_disk.tweak('A/mu', contents='new text\n')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 2)
+ expected_status.remove('iota')
+ expected_status.tweak('', wc_rev=1)
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ A_path)
+
+ # Commit the deletion of A/mu from the "other" wc.
+ svntest.main.file_write(other_mu_path, "new text\n")
+ svntest.actions.run_and_verify_svn(None, [], 'rm', other_mu_path)
+ expected_output = svntest.wc.State(wc, { 'A/mu' : Item(verb='Deleting'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.remove('A/mu')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+
+ # Update "A" by name in wc_empty, expect to A/mu to disappear.
+ expected_output = svntest.wc.State(wc_empty, { 'A/mu' : Item(status='D ') })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_disk.remove('A/mu')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 3)
+ expected_status.remove('iota')
+ expected_status.remove('A/mu')
+ expected_status.tweak('', wc_rev=1)
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ A_path)
+
+
+
+#----------------------------------------------------------------------
+def depth_immediates_bring_in_file(sbox):
+ "bring a file into a depth-immediates working copy"
+
+ # Create an immediates working copy and form the paths
+ ign_a, ign_b, wc_imm, wc = set_up_depthy_working_copies(sbox,
+ immediates=True)
+ A_mu_path = os.path.join(wc_imm, 'A', 'mu')
+ gamma_path = os.path.join(wc_imm, 'A', 'D', 'gamma')
+
+ # Run 'svn up A/mu' to bring A/mu permanently into the working copy.
+ expected_output = svntest.wc.State(wc_imm, {
+ 'A/mu' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/C', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/B', 'A/D/gamma', 'A/D/G',
+ 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega', 'A/D/H', 'A/D')
+ expected_status = svntest.actions.get_virginal_state(wc_imm, 1)
+ expected_status.remove('A/C', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/B', 'A/D/gamma', 'A/D/G',
+ 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega', 'A/D/H', 'A/D')
+ svntest.actions.run_and_verify_update(wc_imm,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ A_mu_path)
+
+ # Run 'svn up A/D/gamma' to test the edge case 'Skipped'.
+ svntest.actions.run_and_verify_svn(["Skipped '"+gamma_path+"'\n", ],
+ "svn: E155007: ", 'update', gamma_path)
+ svntest.actions.run_and_verify_status(wc_imm, expected_status)
+
+#----------------------------------------------------------------------
+def depth_immediates_fill_in_dir(sbox):
+ "bring a dir into a depth-immediates working copy"
+
+ # Run 'svn up A --set-depth=infinity' to fill in A as a
+ # depth-infinity subdir.
+ ign_a, ign_b, wc_immediates, wc \
+ = set_up_depthy_working_copies(sbox, immediates=True)
+ A_path = os.path.join(wc_immediates, 'A')
+ expected_output = svntest.wc.State(wc_immediates, {
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A ')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_immediates, 1)
+ svntest.actions.run_and_verify_update(wc_immediates,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'infinity',
+ A_path)
+
+#----------------------------------------------------------------------
+def depth_mixed_bring_in_dir(sbox):
+ "bring a dir into a mixed-depth working copy"
+
+ # Run 'svn up --set-depth=immediates A' in a depth-empty working copy.
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True)
+ A_path = os.path.join(wc_empty, 'A')
+ B_path = os.path.join(wc_empty, 'A', 'B')
+ C_path = os.path.join(wc_empty, 'A', 'C')
+
+ expected_output = svntest.wc.State(wc_empty, {
+ 'A' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota', 'A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/C', 'A/D', 'A/D/gamma',
+ 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 1)
+ expected_status.remove('iota', 'A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/C', 'A/D', 'A/D/gamma',
+ 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega')
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files',
+ A_path)
+ # Check that A was added at depth=files.
+ verify_depth(None, "files", A_path)
+
+ # Now, bring in A/B at depth-immediates.
+ expected_output = svntest.wc.State(wc_empty, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota', 'A/B/E/alpha', 'A/B/E/beta', 'A/C',
+ 'A/D', 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/H/omega')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 1)
+ expected_status.remove('iota', 'A/B/E/alpha', 'A/B/E/beta', 'A/C',
+ 'A/D', 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/H/omega')
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates',
+ B_path)
+ # Check that A/B was added at depth=immediates.
+ verify_depth(None, "immediates", B_path)
+
+ # Now, bring in A/C at depth-empty.
+ expected_output = svntest.wc.State(wc_empty, {
+ 'A/C' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D', 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/H/omega')
+ expected_status = svntest.actions.get_virginal_state(wc_empty, 1)
+ expected_status.remove('iota', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D', 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/H/omega')
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty',
+ C_path)
+ # Check that A/C was added at depth=empty.
+ verify_depth(None, "empty", C_path)
+
+#----------------------------------------------------------------------
+def depth_empty_unreceive_delete(sbox):
+ "depth-empty working copy ignores a deletion"
+ # Check out a depth-empty greek tree to wc1. In wc2, delete iota and
+ # commit. Update wc1; should not receive the delete.
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ iota_path = os.path.join(wc, 'iota')
+
+ # Commit in the "other" wc.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path)
+ expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Deleting'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.remove('iota')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-empty wc, expecting not to receive the deletion of iota.
+ expected_output = svntest.wc.State(wc_empty, { })
+ expected_disk = svntest.wc.State('', { })
+ expected_status = svntest.wc.State(wc_empty, { '' : svntest.wc.StateItem() })
+ expected_status.tweak(contents=None, status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+def depth_immediates_unreceive_delete(sbox):
+ "depth-immediates working copy ignores a deletion"
+ # Check out a depth-immediates greek tree to wc1. In wc2, delete
+ # A/mu and commit. Update wc1; should not receive the delete.
+
+ ign_a, ign_b, wc_immed, wc = set_up_depthy_working_copies(sbox,
+ immediates=True,
+ infinity=True)
+
+ mu_path = os.path.join(wc, 'A', 'mu')
+
+ # Commit in the "other" wc.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', mu_path)
+ expected_output = svntest.wc.State(wc, { 'A/mu' : Item(verb='Deleting'), })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.remove('A/mu')
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-immediates wc, expecting not to receive the deletion
+ # of A/mu.
+ expected_output = svntest.wc.State(wc_immed, { })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item()
+ })
+ expected_status = svntest.wc.State(wc_immed, {
+ '' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status=' ', wc_rev=2)
+ })
+ svntest.actions.run_and_verify_update(wc_immed,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+def depth_immediates_receive_delete(sbox):
+ "depth-immediates working copy receives a deletion"
+ # Check out a depth-immediates greek tree to wc1. In wc2, delete A and
+ # commit. Update wc1 should receive the delete.
+
+ ign_a, ign_b, wc_immed, wc = set_up_depthy_working_copies(sbox,
+ immediates=True,
+ infinity=True)
+
+ A_path = os.path.join(wc, 'A')
+
+ # Commit in the "other" wc.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', A_path)
+ expected_output = svntest.wc.State(wc, { 'A' : Item(verb='Deleting'), })
+ expected_status = svntest.wc.State(wc, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-immediates wc, expecting to receive the deletion of A.
+ expected_output = svntest.wc.State(wc_immed, {
+ 'A' : Item(status='D ')
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+ expected_status = svntest.wc.State(wc_immed, {
+ '' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2)
+ })
+ svntest.actions.run_and_verify_update(wc_immed,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+def depth_immediates_subdir_propset_1(sbox):
+ "depth-immediates commit subdir propset, update"
+ ign_a, ign_b, wc_immediates, ign_c \
+ = set_up_depthy_working_copies(sbox, immediates=True)
+
+ A_path = os.path.join(wc_immediates, 'A')
+
+ # Set a property on an immediate subdirectory of the working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'pset', 'foo', 'bar',
+ A_path)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_immediates, {
+ 'A' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.wc.State(wc_immediates, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=2)
+ })
+
+ # Commit wc_immediates/A.
+ svntest.actions.run_and_verify_commit(wc_immediates,
+ expected_output,
+ expected_status,
+ [],
+ A_path)
+
+ # Create expected output tree for the update.
+ expected_output = svntest.wc.State(wc_immediates, { })
+
+ # Create expected disk tree.
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item(contents=None, props={'foo' : 'bar'}),
+ })
+
+ expected_status.tweak(contents=None, status=' ', wc_rev=2)
+
+ # Update the depth-immediates wc.
+ svntest.actions.run_and_verify_update(wc_immediates,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+def depth_immediates_subdir_propset_2(sbox):
+ "depth-immediates update receives subdir propset"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make the other working copy.
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ A_path = sbox.ospath('A')
+
+ # Set a property on an immediate subdirectory of the working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'pset', 'foo', 'bar',
+ A_path)
+ # Commit.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'logmsg', A_path)
+
+ # Update at depth=immediates in the other wc, expecting to see no errors.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'update', '--depth', 'immediates',
+ other_wc)
+
+#----------------------------------------------------------------------
+def depth_update_to_more_depth(sbox):
+ "gradually update an empty wc to depth=infinity"
+
+ wc_dir, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox, empty=True)
+
+ os.chdir(wc_dir)
+
+ # Run 'svn up --set-depth=files' in a depth-empty working copy.
+ expected_output = svntest.wc.State('', {
+ 'iota' : Item(status='A '),
+ })
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item("This is the file 'iota'.\n"),
+ })
+ svntest.actions.run_and_verify_update('',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files')
+ verify_depth(None, "files")
+
+ # Run 'svn up --set-depth=immediates' in the now depth-files working copy.
+ expected_output = svntest.wc.State('', {
+ 'A' : Item(status='A '),
+ })
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item("This is the file 'iota'.\n"),
+ 'A' : Item(),
+ })
+ svntest.actions.run_and_verify_update('',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates')
+ verify_depth(None, "immediates")
+ verify_depth(None, "empty", "A")
+
+ # Upgrade 'A' to depth-files.
+ expected_output = svntest.wc.State('', {
+ 'A/mu' : Item(status='A '),
+ })
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/mu' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item("This is the file 'iota'.\n"),
+ 'A' : Item(),
+ 'A/mu' : Item("This is the file 'mu'.\n"),
+ })
+ svntest.actions.run_and_verify_update('',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', 'A')
+ verify_depth(None, "immediates")
+ verify_depth(None, "files", "A")
+
+ # Run 'svn up --set-depth=infinity' in the working copy.
+ expected_output = svntest.wc.State('', {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A ')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state('', 1)
+ svntest.actions.run_and_verify_update('',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'infinity')
+ verify_depth("Non-infinity depth detected after an upgrade to depth-infinity",
+ "infinity")
+ verify_depth("Non-infinity depth detected after an upgrade to depth-infinity",
+ "infinity", "A")
+
+def commit_propmods_with_depth_empty_helper(sbox, depth_arg):
+ """Helper for commit_propmods_with_depth_empty().
+ DEPTH_ARG should be either '--depth=empty' or '-N'."""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+ gamma_path = os.path.join(D_path, 'gamma')
+ G_path = os.path.join(D_path, 'G')
+ pi_path = os.path.join(G_path, 'pi')
+ H_path = os.path.join(D_path, 'H')
+ chi_path = os.path.join(H_path, 'chi')
+
+ # Set some properties, modify some files.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'bar', 'bar-val', D_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'baz', 'baz-val', G_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'qux', 'qux-val', H_path)
+ svntest.main.file_append(iota_path, "new iota\n")
+ svntest.main.file_append(gamma_path, "new gamma\n")
+ svntest.main.file_append(pi_path, "new pi\n")
+ svntest.main.file_append(chi_path, "new chi\n")
+
+ # The only things that should be committed are two of the propsets.
+ expected_output = svntest.wc.State(
+ wc_dir,
+ { '' : Item(verb='Sending'),
+ 'A/D' : Item(verb='Sending'), }
+ )
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ # Expect the two propsets to be committed:
+ expected_status.tweak('', status=' ', wc_rev=2)
+ expected_status.tweak('A/D', status=' ', wc_rev=2)
+ # Expect every other change to remain uncommitted:
+ expected_status.tweak('iota', status='M ', wc_rev=1)
+ expected_status.tweak('A/D/G', status=' M', wc_rev=1)
+ expected_status.tweak('A/D/H', status=' M', wc_rev=1)
+ expected_status.tweak('A/D/gamma', status='M ', wc_rev=1)
+ expected_status.tweak('A/D/G/pi', status='M ', wc_rev=1)
+ expected_status.tweak('A/D/H/chi', status='M ', wc_rev=1)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ depth_arg,
+ wc_dir, D_path)
+
+# See also commit_tests 26: commit_nonrecursive
+def commit_propmods_with_depth_empty(sbox):
+ "commit property mods only, using --depth=empty"
+
+ sbox2 = sbox.clone_dependent()
+
+ # Run once with '-N' and once with '--depth=empty' to make sure they
+ # function identically.
+ commit_propmods_with_depth_empty_helper(sbox, '-N')
+ commit_propmods_with_depth_empty_helper(sbox2, '--depth=empty')
+
+# Test for issue #2845.
+@Issue(2845)
+def diff_in_depthy_wc(sbox):
+ "diff at various depths in non-infinity wc"
+
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ iota_path = os.path.join(wc, 'iota')
+ A_path = os.path.join(wc, 'A')
+ mu_path = os.path.join(wc, 'A', 'mu')
+ gamma_path = os.path.join(wc, 'A', 'D', 'gamma')
+
+ # Make some changes in the depth-infinity wc, and commit them
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', wc)
+ svntest.main.file_write(iota_path, "new text\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'bar', 'bar-val', A_path)
+ svntest.main.file_write(mu_path, "new text\n")
+ svntest.main.file_write(gamma_path, "new text\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', '', wc)
+
+ from svntest.verify import make_diff_header, make_diff_prop_header, \
+ make_diff_prop_deleted, make_diff_prop_added
+ diff_mu = make_diff_header('A/mu', 'revision 2', 'working copy') + [
+ "@@ -1 +1 @@\n",
+ "-new text\n",
+ "+This is the file 'mu'.\n"]
+ diff_A = make_diff_header('A', 'revision 2', 'working copy') + \
+ make_diff_prop_header('A') + \
+ make_diff_prop_deleted('bar', 'bar-val')
+ diff_iota = make_diff_header('iota', 'revision 2', 'working copy') + [
+ "@@ -1 +1 @@\n",
+ "-new text\n",
+ "+This is the file 'iota'.\n"]
+ diff_dot = make_diff_header('.', 'revision 2', 'working copy') + \
+ make_diff_prop_header('.') + \
+ make_diff_prop_deleted('foo', 'foo-val')
+
+ os.chdir(wc_empty)
+
+ expected_output = svntest.verify.UnorderedOutput(diff_dot)
+ # The diff should contain only the propchange on '.'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-rHEAD')
+
+ # Upgrade to depth-files.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'files', '-r1')
+ # The diff should contain only the propchange on '.' and the
+ # contents change on iota.
+ expected_output = svntest.verify.UnorderedOutput(diff_iota + diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-rHEAD')
+ # Do a diff at --depth empty.
+ expected_output = svntest.verify.UnorderedOutput(diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--depth', 'empty', '-rHEAD')
+
+ # Upgrade to depth-immediates.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'immediates', '-r1')
+ # The diff should contain the propchanges on '.' and 'A' and the
+ # contents change on iota.
+ expected_output = svntest.verify.UnorderedOutput(diff_A + diff_iota +
+ diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-rHEAD')
+ # Do a diff at --depth files.
+ expected_output = svntest.verify.UnorderedOutput(diff_iota + diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--depth', 'files', '-rHEAD')
+
+ # Upgrade A to depth-files.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'files', '-r1', 'A')
+ # The diff should contain everything but the contents change on
+ # gamma (which does not exist in this working copy).
+ expected_output = svntest.verify.UnorderedOutput(diff_mu + diff_A +
+ diff_iota + diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-rHEAD')
+ # Do a diff at --depth immediates.
+ expected_output = svntest.verify.UnorderedOutput(diff_A + diff_iota + diff_dot)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--depth', 'immediates', '-rHEAD')
+
+@Issue(2882)
+def commit_depth_immediates(sbox):
+ "commit some files with --depth=immediates"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Test the fix for some bugs Mike Pilato reported here:
+ #
+ # http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=128509
+ # From: "C. Michael Pilato" <cmpilato@collab.net>
+ # To: Karl Fogel <kfogel@red-bean.com>
+ # CC: dev@subversion.tigris.org
+ # References: <87d4yzcrro.fsf@red-bean.com>
+ # Subject: Re: [PATCH] Make 'svn commit --depth=foo' work.
+ # Message-ID: <46968831.2070906@collab.net>
+ # Date: Thu, 12 Jul 2007 15:59:45 -0400
+ #
+ # See also http://subversion.tigris.org/issues/show_bug.cgi?id=2882.
+ #
+ # Outline of the test:
+ # ====================
+ #
+ # Modify these three files:
+ #
+ # M A/mu
+ # M A/D/G/rho
+ # M iota
+ #
+ # Then commit some of them using --depth=immediates:
+ #
+ # svn ci -m "log msg" --depth=immediates wc_dir wc_dir/A/D/G/rho
+ #
+ # Before the bugfix, that would result in an error:
+ #
+ # subversion/libsvn_wc/lock.c:570: (apr_err=155004)
+ # svn: Working copy '/blah/blah/blah/wc' locked
+ # svn: run 'svn cleanup' to remove locks \
+ # (type 'svn help cleanup' for details)
+ #
+ # After the bugfix, it correctly commits two of the three files:
+ #
+ # Sending A/D/G/rho
+ # Sending iota
+ # Transmitting file data ..
+ # Committing transaction...
+ # Committed revision 2.
+
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ G_path = sbox.ospath('A/D/G')
+ rho_path = os.path.join(G_path, 'rho')
+
+ svntest.main.file_append(iota_path, "new text in iota\n")
+ svntest.main.file_append(mu_path, "new text in mu\n")
+ svntest.main.file_append(rho_path, "new text in rho\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' ', wc_rev=2)
+ expected_status.tweak('A/mu', status='M ', wc_rev=1)
+ expected_status.tweak('A/D/G/rho', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '--depth', 'immediates',
+ wc_dir, G_path)
+
+def depth_immediates_receive_new_dir(sbox):
+ "depth-immediates wc receives new directory"
+
+ ign_a, ign_b, wc_immed, wc = set_up_depthy_working_copies(sbox,
+ immediates=True,
+ infinity=True)
+
+ I_path = os.path.join(wc, 'I')
+ zeta_path = os.path.join(wc, 'I', 'zeta')
+ other_I_path = os.path.join(wc_immed, 'I')
+
+ os.mkdir(I_path)
+ svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n")
+
+ # Commit in the "other" wc.
+ svntest.actions.run_and_verify_svn(None, [], 'add', I_path)
+ expected_output = svntest.wc.State(wc, {
+ 'I' : Item(verb='Adding'),
+ 'I/zeta' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.add({
+ 'I' : Item(status=' ', wc_rev=2),
+ 'I/zeta' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Update the depth-immediates wc, expecting to receive just the
+ # new directory, without the file.
+ expected_output = svntest.wc.State(wc_immed, {
+ 'I' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item(),
+ 'I' : Item(),
+ })
+ expected_status = svntest.wc.State(wc_immed, {
+ '' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status=' ', wc_rev=2),
+ 'I' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_update(wc_immed,
+ expected_output,
+ expected_disk,
+ expected_status)
+ # Check that the new directory was added at depth=empty.
+ verify_depth(None, "empty", other_I_path)
+
+@Issue(2931)
+def add_tree_with_depth(sbox):
+ "add multi-subdir tree with --depth options" # For issue #2931
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ new1_path = sbox.ospath('new1')
+ new2_path = os.path.join(new1_path, 'new2')
+ new3_path = os.path.join(new2_path, 'new3')
+ new4_path = os.path.join(new3_path, 'new4')
+ os.mkdir(new1_path)
+ os.mkdir(new2_path)
+ os.mkdir(new3_path)
+ os.mkdir(new4_path)
+ # Simple case, add new1 only, set depth to files
+ svntest.actions.run_and_verify_svn(None, [],
+ "add", "--depth", "files", new1_path)
+ verify_depth(None, "infinity", new1_path)
+
+ # Force add new1 at new1 again, should include new2 at empty, the depth of
+ # new1 should not change
+ svntest.actions.run_and_verify_svn(None, [],
+ "add", "--depth", "immediates",
+ "--force", new1_path)
+ verify_depth(None, "infinity", new1_path)
+ verify_depth(None, "infinity", new2_path)
+
+ # add new4 with intermediate path, the intermediate path is added at empty
+ svntest.actions.run_and_verify_svn(None, [],
+ "add", "--depth", "immediates",
+ "--parents", new4_path)
+ verify_depth(None, "infinity", new3_path)
+ verify_depth(None, "infinity", new4_path)
+
+def upgrade_from_above(sbox):
+ "upgrade a depth=empty wc from above"
+
+ # The bug was that 'svn up --set-depth=files' worked from within the
+ # working copy, but not from without with working copy top given
+ # as an argument. Both ways would correctly cause 'iota' to
+ # appear, but only the former actually upgraded the depth of the
+ # working copy to 'files'. See this thread for details:
+ #
+ # http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=130157
+ # From: Alexander Sinyushkin <Alexander.Sinyushkin@svnkit.com>
+ # To: dev@subversion.tigris.org
+ # Subject: Problem upgrading working copy depth
+ # Date: Wed, 19 Sep 2007 23:15:24 +0700
+ # Message-ID: <46F14B1C.8010406@svnkit.com>
+
+ sbox2 = sbox.clone_dependent()
+
+ wc, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox, empty=True)
+
+ # First verify that upgrading from within works.
+ saved_cwd = os.getcwd()
+ try:
+ os.chdir(wc)
+ expected_output = svntest.wc.State('', {
+ 'iota' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update('',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth=files', '.')
+ verify_depth(None, "files")
+ finally:
+ os.chdir(saved_cwd)
+
+ # Do it again, this time from above the working copy.
+ wc, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox2, empty=True)
+ expected_output = svntest.wc.State(wc, {
+ 'iota' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+ expected_status = svntest.wc.State(wc, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth=files', wc)
+ verify_depth(None, "files", wc)
+
+def status_in_depthy_wc(sbox):
+ "status -u at various depths in non-infinity wc"
+
+ wc_empty, ign_a, ign_b, wc = set_up_depthy_working_copies(sbox, empty=True,
+ infinity=True)
+
+ iota_path = os.path.join(wc, 'iota')
+ A_path = os.path.join(wc, 'A')
+ mu_path = os.path.join(wc, 'A', 'mu')
+ gamma_path = os.path.join(wc, 'A', 'D', 'gamma')
+
+ # Make some changes in the depth-infinity wc, and commit them
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', wc)
+ svntest.main.file_write(iota_path, "new text\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'bar', 'bar-val', A_path)
+ svntest.main.file_write(mu_path, "new text\n")
+ svntest.main.file_write(gamma_path, "new text\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', '', wc)
+
+ status = [
+ "Status against revision: 2\n",
+ " * 1 .\n",
+ " * 1 iota\n",
+ " * 1 A\n",
+ " * 1 " + os.path.join('A', 'mu') + "\n",
+ ]
+
+ os.chdir(wc_empty)
+
+ expected_output = svntest.verify.UnorderedOutput(status[:2])
+ # The output should contain only the change on '.'.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u')
+
+ # Upgrade to depth-files.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'files', '-r1')
+ # The output should contain only the changes on '.' and 'iota'.
+ expected_output = svntest.verify.UnorderedOutput(status[:3])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u')
+ # Do a status -u at --depth empty.
+ expected_output = svntest.verify.UnorderedOutput(status[:2])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u', '--depth', 'empty')
+
+ # Upgrade to depth-immediates.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'immediates', '-r1')
+ # The output should contain the changes on '.', 'A' and 'iota'.
+ expected_output = svntest.verify.UnorderedOutput(status[:4])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u')
+ # Do a status -u at --depth files.
+ expected_output = svntest.verify.UnorderedOutput(status[:3])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u', '--depth', 'files')
+
+ # Upgrade A to depth-files.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'files', '-r1', 'A')
+ # The output should contain everything but the change on
+ # gamma (which does not exist in this working copy).
+ expected_output = svntest.verify.UnorderedOutput(status)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u')
+ # Do a status -u at --depth immediates.
+ expected_output = svntest.verify.UnorderedOutput(status[:4])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'st', '-u', '--depth', 'immediates')
+
+#----------------------------------------------------------------------
+
+# Issue #3039.
+@Issue(3039)
+def depthy_update_above_dir_to_be_deleted(sbox):
+ "'update -N' above a WC path deleted in repos HEAD"
+ sbox.build()
+
+ sbox_for_depth = {
+ "files" : sbox,
+ "immediates" : sbox.clone_dependent(copy_wc=True),
+ "empty" : sbox.clone_dependent(copy_wc=True),
+ }
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ "delete", "-m", "Delete A.", sbox.repo_url + "/A")
+
+ def empty_output(wc_dir):
+ return svntest.wc.State(wc_dir, { })
+
+ def output_with_A(wc_dir):
+ expected_output = empty_output(wc_dir)
+ expected_output.add({
+ "A" : Item(status="D "),
+ })
+ return expected_output
+
+ initial_disk = svntest.main.greek_state.copy()
+ disk_with_only_iota = svntest.wc.State("", {
+ "iota" : Item("This is the file 'iota'.\n"),
+ })
+
+ def status_with_dot(wc_dir):
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak("", wc_rev=2)
+ return expected_status
+
+ def status_with_iota(wc_dir):
+ expected_status = status_with_dot(wc_dir)
+ expected_status.tweak("iota", wc_rev=2)
+ return expected_status
+
+ def status_with_only_iota(wc_dir):
+ return svntest.wc.State(wc_dir, {
+ "" : Item(status=" ", wc_rev=2),
+ "iota" : Item(status=" ", wc_rev=2),
+ })
+
+ expected_trees_for_depth = {
+ "files" : (empty_output, initial_disk, status_with_iota),
+ "immediates" : (output_with_A, disk_with_only_iota, status_with_only_iota),
+ "empty" : (empty_output, initial_disk, status_with_dot),
+ }
+
+ for depth in sbox_for_depth.keys():
+ wc_dir = sbox_for_depth[depth].wc_dir
+ (expected_output_func, expected_disk, expected_status_func) = \
+ expected_trees_for_depth[depth]
+ #print depth
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output_func(wc_dir),
+ expected_disk,
+ expected_status_func(wc_dir),
+ [], False,
+ "--depth=%s" % depth, wc_dir)
+
+
+#----------------------------------------------------------------------
+
+# Tests for deselection interface (a.k.a folding subtrees).
+#----------------------------------------------------------------------
+def depth_folding_clean_trees_1(sbox):
+ "gradually fold wc from depth=infinity to empty"
+
+ # Covers the following situations:
+ #
+ # infinity->immediates (metadata only)
+ # immediates->files (metadata only)
+ # mixed(infinity+files)=>immediates
+ # infinity=>empty
+ # immediates=>empty
+ # mixed(infinity+empty)=>immediates
+ # mixed(infinity+empty/immediates)=>immediates
+ # immediates=>files
+ # files=>empty
+ # mixed(infinity+empty)=>files
+
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+
+ A_path = sbox.ospath('A')
+ C_path = os.path.join(A_path, 'C')
+ B_path = os.path.join(A_path, 'B')
+ D_path = os.path.join(A_path, 'D')
+ E_path = os.path.join(B_path, 'E')
+ F_path = os.path.join(B_path, 'F')
+ G_path = os.path.join(D_path, 'G')
+ H_path = os.path.join(D_path, 'H')
+
+ # Run 'svn up --set-depth=immediates' to directory A/B/E.
+ # This is an infinity=>immediates folding, changes on metadata only
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', E_path)
+ verify_depth(None, "immediates", E_path)
+
+ # Run 'svn up --set-depth=files' to directory A/B/E.
+ # This is an immediates=>files folding, changes on metadata only
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', E_path)
+ verify_depth(None, "files", E_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A/B.
+ # This is an mixed(infinity+files)=>immediates folding
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='D '),
+ 'A/B/E/beta' : Item(status='D '),
+ })
+ expected_status.remove('A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', B_path)
+ verify_depth(None, "immediates", B_path)
+ verify_depth(None, "empty", E_path)
+ verify_depth(None, "empty", F_path)
+
+ # Run 'svn up --set-depth=empty' to directory A/D/H
+ # This is an infinity=>empty folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D ')
+ })
+ expected_status.remove( 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega')
+ expected_disk.remove( 'A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', H_path)
+ verify_depth(None, "empty", H_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A/D
+ # This is an mixed(infinity+empty)=>immediates folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ })
+ expected_status.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_disk.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', D_path)
+ verify_depth(None, "immediates", D_path)
+ verify_depth(None, "empty", G_path)
+
+ # Run 'svn up --set-depth=empty' to directory A/D
+ # This is an immediates=>empty folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ 'A/D/gamma' : Item(status='D ')
+ })
+ expected_status.remove('A/D/gamma', 'A/D/G', 'A/D/H')
+ expected_disk.remove('A/D/gamma', 'A/D/G', 'A/D/H')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', D_path)
+ verify_depth(None, "empty", D_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A
+ # This is an mixed(infinity+empty/immediates)=>immediates folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ 'A/B/lambda' : Item(status='D ')
+ })
+ expected_status.remove('A/B/lambda', 'A/B/E', 'A/B/F')
+ expected_disk.remove('A/B/lambda', 'A/B/E', 'A/B/F')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', A_path)
+ verify_depth(None, "immediates", A_path)
+ verify_depth(None, "empty", C_path)
+ verify_depth(None, "empty", B_path)
+
+ # Run 'svn up --set-depth=files' to directory A
+ # This is an immediates=>files folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='D '),
+ 'A/C' : Item(status='D '),
+ 'A/D' : Item(status='D ')
+ })
+ expected_status.remove('A/B', 'A/C', 'A/D')
+ expected_disk.remove('A/B', 'A/C', 'A/D')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', A_path)
+ verify_depth(None, "files", A_path)
+
+ # Run 'svn up --set-depth=empty' to directory A
+ # This is an files=>empty folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='D ')
+ })
+ expected_status.remove('A/mu')
+ expected_disk.remove('A/mu')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', A_path)
+ verify_depth(None, "empty", A_path)
+
+ # Run 'svn up --set-depth=files' to wc
+ # This is an mixed(infinity+empty)=>files folding.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status='D ')
+ })
+ expected_status.remove('A')
+ expected_disk.remove('A')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', wc_dir)
+ verify_depth(None, "files", wc_dir)
+
+
+#------------------------------------------------------------------------------
+def depth_folding_clean_trees_2(sbox):
+ "gradually fold wc, focusing on depth=immediates"
+
+ # Covers the following situations:
+ #
+ # infinity=>immediates
+ # mixed(immediates+immediates)=>immediates
+ # mixed(immediates+infinity)=>immediates
+ # mixed(immediates+files)=>immediates
+ # immediates=>empty(remove the target since the parent is at files/empty)
+
+ ign_a, wc_dir, ign_b, ign_c = set_up_depthy_working_copies(sbox, files=True)
+
+ A_path = os.path.join(wc_dir, 'A')
+ D_path = os.path.join(A_path, 'D')
+ H_path = os.path.join(D_path, 'H')
+ G_path = os.path.join(D_path, 'G')
+
+ # pull in directory A at immediates
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--depth', 'immediates', A_path)
+ # check to see if it's really at immediates
+ verify_depth(None, "immediates", A_path)
+
+ # pull in directory D at infinity
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'infinity', D_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A/D.
+ # This is an infinity=>immediates folding
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D ')
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/mu' : Item(status=' ', wc_rev=1),
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/C' : Item(status=' ', wc_rev=1),
+ 'A/D' : Item(status=' ', wc_rev=1),
+ 'A/D/gamma' : Item(status=' ', wc_rev=1),
+ 'A/D/G' : Item(status=' ', wc_rev=1),
+ 'A/D/H' : Item(status=' ', wc_rev=1)
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item(contents=None),
+ 'A/mu' : Item(contents="This is the file 'mu'.\n"),
+ 'A/B' : Item(contents=None),
+ 'A/C' : Item(contents=None),
+ 'A/D' : Item(contents=None),
+ 'A/D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'A/D/G' : Item(contents=None),
+ 'A/D/H' : Item(contents=None),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', D_path)
+ verify_depth(None, "immediates", D_path)
+ verify_depth(None, "empty", G_path)
+ verify_depth(None, "empty", H_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A.
+ # This is an mixed(immediates+immediates)=>immediates folding
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ 'A/D/gamma' : Item(status='D ')
+ })
+ expected_status.remove( 'A/D/G', 'A/D/H', 'A/D/gamma')
+ expected_disk.remove( 'A/D/G', 'A/D/H', 'A/D/gamma')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', A_path)
+ verify_depth(None, "immediates", A_path)
+ verify_depth(None, "empty", D_path)
+
+ # pull in directory D at infinity
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'infinity', D_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A.
+ # This is an mixed(immediates+infinity)=>immediates folding
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='D '),
+ 'A/D/G' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', A_path)
+ verify_depth(None, "immediates", A_path)
+ verify_depth(None, "empty", D_path)
+
+ # pull in directory D at files
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'files', D_path)
+
+ # Run 'svn up --set-depth=immediates' to directory A.
+ # This is an mixed(immediates+files)=>immediates folding
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='D ')
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', A_path)
+ verify_depth(None, "immediates", A_path)
+ verify_depth(None, "empty", D_path)
+
+# Comment the following out, since cropping out the root of tree is now
+# handled by svn_depth_exclude and should have a separate test case for all
+# influenced commands.
+#
+# # Run 'svn up --set-depth=empty' to directory A.
+# # This is an immediates=>empty folding, the directory A should be deleted
+# # too since the parent directory is at files/empty
+# expected_output = svntest.wc.State(wc_dir, {
+# 'A' : Item(status='D '),
+# })
+# expected_status = svntest.wc.State(wc_dir, {
+# '' : Item(status=' ', wc_rev=1),
+# 'iota' : Item(status=' ', wc_rev=1)
+# })
+# expected_disk = svntest.wc.State('', {
+# 'iota' : Item(contents="This is the file 'iota'.\n")
+# })
+# svntest.actions.run_and_verify_update(wc_dir,
+# expected_output,
+# expected_disk,
+# expected_status,
+# [], False,
+# '--set-depth', 'empty', A_path)
+
+def depth_fold_expand_clean_trees(sbox):
+ "expand target while contracting subtree"
+ # --set-depth=immediates/files to an empty target with infinity
+ # sub-tree should both fold the subtree and expand the target
+
+ wc_dir, ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox, empty=True)
+
+ A_path = os.path.join(wc_dir, 'A')
+ B_path = os.path.join(A_path, 'B')
+ C_path = os.path.join(A_path, 'C')
+ D_path = os.path.join(A_path, 'D')
+
+ # pull in directory A at empty
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--depth', 'empty', A_path)
+ verify_depth(None, "empty", A_path)
+
+ # pull in directory D at infinity
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', D_path)
+
+ # Make the other working copy.
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ # Run 'svn up --set-depth=immediates' to directory A. This both folds
+ # directory D to empty and expands directory A to immediates
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D/gamma' : Item(status='D '),
+ 'A/D/G' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/mu' : Item(status=' ', wc_rev=1),
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/C' : Item(status=' ', wc_rev=1),
+ 'A/D' : Item(status=' ', wc_rev=1)
+ })
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(contents=None),
+ 'A/mu' : Item(contents="This is the file 'mu'.\n"),
+ 'A/B' : Item(contents=None),
+ 'A/C' : Item(contents=None),
+ 'A/D' : Item(contents=None)
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', A_path)
+ verify_depth(None, "immediates", A_path)
+ verify_depth(None, "empty", B_path)
+ verify_depth(None, "empty", C_path)
+ verify_depth(None, "empty", D_path)
+
+ # Run 'svn up --set-depth=files' to directory A in other_wc. This both
+ # removes directory D and expands directory A to files
+ expected_output = svntest.wc.State(other_wc, {
+ 'A/mu' : Item(status='A '),
+ 'A/D' : Item(status='D '),
+ })
+ expected_status = svntest.wc.State(other_wc, {
+ '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/mu' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(contents=None),
+ 'A/mu' : Item(contents="This is the file 'mu'.\n")
+ })
+ Other_A_path = os.path.join(other_wc, 'A')
+ svntest.actions.run_and_verify_update(other_wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', Other_A_path)
+ verify_depth(None, "files", Other_A_path)
+
+
+def pull_in_tree_with_depth_option(sbox):
+ """checkout and verify subtree with depth immediates"""
+
+ wc_empty,ign_a, ign_b, ign_c = set_up_depthy_working_copies(sbox,
+ empty=True)
+ A_path = os.path.join(wc_empty, 'A')
+ expected_output = svntest.wc.State(wc_empty, {
+ 'A' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A ')
+ })
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(),
+ 'A/mu' : Item("This is the file 'mu'.\n"),
+ 'A/B' : Item(),
+ 'A/C' : Item(),
+ 'A/D' : Item(),
+ })
+ expected_status = svntest.wc.State(wc_empty, {
+ '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/mu' : Item(status=' ', wc_rev=1),
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/C' : Item(status=' ', wc_rev=1),
+ 'A/D' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(wc_empty,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "--depth=immediates", A_path)
+
+ # Check that the A directory was pull ed in at depth=immediates.
+ verify_depth(None, "immediates", A_path)
+
+def fold_tree_with_unversioned_modified_items(sbox):
+ "unversioned & modified items left untouched"
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+
+ A_path = sbox.ospath('A')
+ pi_path = os.path.join(A_path, 'D', 'G', 'pi')
+ mu_path = os.path.join(A_path, 'mu')
+ unv_path = os.path.join(A_path, 'B', 'unv')
+
+ # Modify file pi
+ svntest.main.file_write(pi_path, "pi modified\n")
+ # Modify file mu
+ svntest.main.file_write(mu_path, "mu modified\n")
+ # Create an unversioned file
+ svntest.main.file_write(unv_path, "new unversioned\n")
+
+ # Fold the A dir to empty, expect the modified & unversioned ones left
+ # unversioned rather than removed, along with paths to those items.
+
+ # Directories B and D won't be deleted, because that would remove their
+ # local modifications. Their unmodified descendants are deleted though.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ 'A/B/lambda' : Item(status='D '),
+ 'A/C' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ 'A/D/gamma' : Item(status='D '),
+ })
+ # unversioned items will be ignored in in the status tree, since the
+ # run_and_verify_update() function uses a quiet version of svn status
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/D' : Item(status=' ', wc_rev='1'),
+ 'A/D/G' : Item(status=' ', wc_rev='1'),
+ 'A/D/G/pi' : Item(status='M ', wc_rev='1'),
+ 'A/B' : Item(status=' ', wc_rev='1'),
+ 'A/mu' : Item(status='M ', wc_rev='1'),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item(contents=None),
+ 'A/mu' : Item(contents="mu modified\n"),
+ 'A/B' : Item(contents=None),
+ 'A/B/unv' : Item(contents="new unversioned\n"),
+ 'A/D' : Item(contents=None),
+ 'A/D/G' : Item(contents=None),
+ 'A/D/G/pi' : Item(contents="pi modified\n")
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', A_path)
+ verify_depth(None, "empty", A_path)
+
+def depth_empty_update_on_file(sbox):
+ "depth-empty update on a file doesn't break it"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+
+ # Change iota and commit it in r2.
+ svntest.main.file_write(iota_path, 'Modified iota\n')
+ expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update iota with depth=empty.
+ expected_output = svntest.wc.State(wc_dir,
+ {'iota': Item(status='U ') })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth=empty', '-r1', iota_path)
+
+ # Check the revision and created rev.
+ expected_infos = {
+ 'Revision' : '^1$',
+ 'Last Changed Rev' : '^1$',
+ }
+ svntest.actions.run_and_verify_info([expected_infos], iota_path)
+
+
+@Issue(3544)
+def excluded_path_update_operation(sbox):
+ """make sure update handle svn_depth_exclude properly"""
+
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+ A_path = sbox.ospath('A')
+ B_path = os.path.join(A_path, 'B')
+ L_path = os.path.join(A_path, 'L')
+ E_path = os.path.join(B_path, 'E')
+ iota_path = sbox.ospath('iota')
+
+ # Simply exclude a subtree
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude', E_path)
+ # verify_depth exclude? not implemented yet
+
+ # crop path B to immediates, this just pull in A/B/E again
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='A '),
+ })
+ expected_status.add({
+ 'A/B/E' : Item(status=' ', wc_rev=1)
+ })
+ expected_disk.add({
+ 'A/B/E' : Item(contents=None),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', B_path)
+ verify_depth(None, "immediates", B_path)
+
+ # Exclude A/B/E again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', E_path)
+
+ # Exclude path B totally, in which contains an excluded subtree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='D '),
+ })
+ expected_status.remove('A/B/F', 'A/B/E', 'A/B/lambda', 'A/B')
+ expected_disk.remove('A/B/F', 'A/B/E', 'A/B/lambda', 'A/B')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude', B_path)
+
+ # Explicitly pull in excluded path B.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ B_path)
+
+ # Test issue #
+ # Exclude a file then set depth of WC to infinity, the file should return.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='D '),
+ })
+ expected_status.remove('iota')
+ expected_disk.remove('iota')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude', iota_path)
+
+ # Update the whole WC to depth=infinity.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ # This update currently fails when iota is reported as added, but shows in
+ # status as unversioned. See issue #3544 'svn update does not restore
+ # excluded files'. This test is marked as XFail until that issue is fixed.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'infinity', wc_dir)
+
+def excluded_path_misc_operation(sbox):
+ """make sure other subcommands handle exclude"""
+
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+ A_path = sbox.ospath('A')
+ B_path = os.path.join(A_path, 'B')
+ L_path = os.path.join(A_path, 'L')
+ M_path = os.path.join(A_path, 'M')
+ E_path = os.path.join(B_path, 'E')
+ LE_path = os.path.join(L_path, 'E')
+
+ # Simply exclude a subtree
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude', E_path)
+
+ # copy A/B to A/L, excluded entry should be copied too
+ expected_output = ['A '+L_path+'\n']
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'cp', B_path, L_path)
+ # verify_depth exclude? not implemented yet
+ #verify_depth(None, "empty", LE_path)
+
+ # revert A/L, with an excluded item in the tree
+ revert_paths = [L_path] + [os.path.join(L_path, child)
+ for child in ['E', 'F', 'lambda']]
+ expected_output = svntest.verify.UnorderedOutput([
+ "Reverted '%s'\n" % path for path in revert_paths])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '--depth=infinity', L_path)
+
+ # copy A/B to A/L and then cp A/L to A/M, excluded entry should be
+ # copied both times
+ expected_output = ['A '+L_path+'\n']
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'cp', B_path, L_path)
+ expected_output = ['A '+M_path+'\n']
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'cp', L_path, M_path)
+
+ # commit this copy, with an excluded item.
+ expected_output = svntest.wc.State(wc_dir, { 'A/L' : Item(verb='Adding'),
+ 'A/M' : Item(verb='Adding'), })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_status.add({
+ 'A/L' : Item(status=' ', wc_rev=2),
+ 'A/L/lambda' : Item(status=' ', wc_rev=2),
+ 'A/L/F' : Item(status=' ', wc_rev=2),
+ 'A/M' : Item(status=' ', wc_rev=2),
+ 'A/M/lambda' : Item(status=' ', wc_rev=2),
+ 'A/M/F' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Relocate wc, with excluded items in it.
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 2, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate',
+ repo_url, other_repo_url, wc_dir)
+
+ # remove the new directory A/L, with an excluded item.
+ # If successed, no error will be thrown
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', L_path)
+
+ # revert the delete
+ # If successed, no error will be thrown
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--depth=infinity', L_path)
+
+
+def excluded_receive_remote_removal(sbox):
+ """exclude flag should be cleared upon remote removal"""
+ ign_a, ign_b, ign_c, wc \
+ = set_up_depthy_working_copies(sbox, infinity=True)
+
+ A_path = os.path.join(wc, 'A')
+ B_path = os.path.join(A_path, 'B')
+ C_path = os.path.join(A_path, 'C')
+
+ # Exclude path B from wc
+ expected_output = svntest.wc.State(wc, {
+ 'A/B' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/E', 'A/B/F', 'A/B')
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.remove('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/E', 'A/B/F', 'A/B')
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "--set-depth", "exclude", B_path)
+
+ # Remove path B in the repos.
+ svntest.actions.run_and_verify_svn(None, [], "delete", "-m",
+ "Delete B.", sbox.repo_url + "/A/B")
+
+ # Update wc, should receive the removal of excluded path B
+ # and handle it silently.
+ expected_status = svntest.actions.get_virginal_state(wc, 2)
+ expected_status.remove('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/E', 'A/B/F', 'A/B')
+ svntest.actions.run_and_verify_update(wc,
+ None,
+ expected_disk,
+ expected_status)
+
+ # Introduce a new path with the same name B.
+ # This should succeed if the exclude entry is gone with the update,
+ # otherwise a name conflict will rise up.
+ expected_output = ['A '+B_path+'\n']
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'cp', C_path, B_path)
+
+
+# Regression test for r876760.
+def exclude_keeps_hidden_entries(sbox):
+ "'up --set-depth exclude' doesn't lose entries"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ os.chdir(A_path)
+
+ # the second 'up' used to cause the entry of 'C' to be lost.
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', 'C')
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', 'D')
+ # we could grep the 'entries' file, but...
+ # or we could use 'info', but info_excluded() is XFail.
+ expected_stderr = ".*svn: E150002: '.*C' is already under version control.*"
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'mkdir', 'C')
+
+
+@Issue(3792)
+def info_excluded(sbox):
+ "'info' should treat excluded item as versioned"
+
+ # The problem: 'svn info' on an excluded item would behave as if it
+ # was not versioned at all:
+ #
+ # % svn up --set-depth exclude A
+ # D A
+ # % svn info A
+ # A: (Not a versioned resource)
+ #
+ # ..\..\..\subversion\svn\info-cmd.c:562: (apr_err=200000)
+ # svn: A problem occurred; see other errors for details
+ #
+ # It should acknowledge the existence (in the repos) of ./A and print some
+ # info about it, like it does if '--set-depth empty' is used instead.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', A_path)
+
+ expected_info = {
+ 'Path' : re.escape(A_path),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ 'Depth' : 'exclude',
+ }
+ svntest.actions.run_and_verify_info([expected_info], A_path)
+
+
+
+#----------------------------------------------------------------------
+# Check that "svn resolved" visits tree-conflicts *on unversioned items*
+# according to the --depth parameter.
+
+def make_depth_tree_conflicts(sbox):
+ "Helper for tree_conflicts_resolved_depth_*"
+
+ sbox.build()
+ wc = sbox.wc_dir
+
+ j = os.path.join
+ A = j(wc, 'A')
+ m = j(A, 'mu')
+ B = j(A, 'B')
+ D = j(A, 'D')
+ g = j(D, 'gamma')
+
+ # Store node modifications as rev 2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', B)
+ svntest.main.file_append(m, "Modified mu.\n")
+ svntest.main.file_append(g, "Modified gamma.\n")
+
+ expected_output = svntest.wc.State(wc, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/B' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('A/mu', 'A/B', 'A/D/gamma',
+ wc_rev = 2)
+
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status,
+ [],
+ A)
+
+ # Go back to rev 1
+ expected_output = svntest.wc.State(wc, {
+ 'A/mu' : Item(status='U '),
+ 'A/B' : Item(status=' U'),
+ 'A/D/gamma' : Item(status='U '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r1', A)
+
+ # Perform node deletions so that items become unversioned and
+ # will have tree-conflicts upon update.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', m, B, g)
+
+ # Update so that conflicts appear
+ expected_output = svntest.wc.State(wc, {
+ 'A/mu' : Item(status=' ', treeconflict='C'),
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ 'A/D/gamma' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/mu',
+ 'A/B', 'A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D/gamma',
+ 'A/B/E', 'A/B/F')
+
+ # This test is set XFail because this (correct) status cannot be
+ # verified due to an "svn update" bug. The tree-conflict on A/B
+ # which is notified about during the update does not show in the
+ # status. When removing file 'mu' from above 'rm' command, 'B' is
+ # reported as tree-conflicted correctly. Also use these to verify:
+ # expected_output = None
+ # expected_disk = None
+ expected_status = svntest.actions.get_virginal_state(wc, 2)
+ expected_status.tweak('A/mu',
+ 'A/B', 'A/B/lambda',
+ 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F',
+ 'A/D/gamma',
+ status='D ')
+ expected_status.tweak('A/mu', 'A/B', 'A/D/gamma',
+ treeconflict='C')
+
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ wc)
+
+
+
+def tree_conflicts_resolved_depth_empty(sbox):
+ "tree conflicts resolved depth-empty"
+
+ make_depth_tree_conflicts(sbox)
+
+ wc = sbox.wc_dir
+ A = os.path.join(wc, 'A')
+
+ svntest.actions.run_and_verify_resolved([], '--depth=empty', A)
+
+
+def tree_conflicts_resolved_depth_files(sbox):
+ "tree conflicts resolved depth-files"
+
+ make_depth_tree_conflicts(sbox)
+
+ wc = sbox.wc_dir
+ j = os.path.join
+ A = j(wc, 'A')
+ m = j(A, 'mu')
+
+ svntest.actions.run_and_verify_resolved([m], '--depth=files', A)
+
+
+def tree_conflicts_resolved_depth_immediates(sbox):
+ "tree conflicts resolved depth-immediates"
+
+ make_depth_tree_conflicts(sbox)
+
+ wc = sbox.wc_dir
+ j = os.path.join
+ A = j(wc, 'A')
+ m = j(A, 'mu')
+ B = j(A, 'B')
+
+ svntest.actions.run_and_verify_resolved([m, B], '--depth=immediates', A)
+
+
+def tree_conflicts_resolved_depth_infinity(sbox):
+ "tree conflicts resolved depth-infinity"
+
+ make_depth_tree_conflicts(sbox)
+
+ wc = sbox.wc_dir
+ j = os.path.join
+ A = j(wc, 'A')
+ m = j(A, 'mu')
+ B = j(A, 'B')
+ g = j(A, 'D', 'gamma')
+
+ svntest.actions.run_and_verify_resolved([m, B, g], '--depth=infinity', A)
+
+def update_excluded_path_sticky_depths(sbox):
+ """set-depth from excluded to all other depths"""
+
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+ A_path = sbox.ospath('A')
+ B_path = os.path.join(A_path, 'B')
+
+ # Exclude the subtree 'A/B'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/E',
+ 'A/B/F', 'A/B')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/E',
+ 'A/B/F', 'A/B')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude', B_path)
+
+ # Update to depth 'empty' for the excluded path A/B
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ })
+ expected_status.add({
+ 'A/B' : Item(status=' ', wc_rev=1)
+ })
+ expected_disk.add({
+ 'A/B' : Item(contents=None),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', B_path)
+ verify_depth(None, "empty", B_path)
+ expected_info = {
+ 'Path' : re.escape(B_path),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ 'Depth' : 'empty',
+ }
+ svntest.actions.run_and_verify_info([expected_info], B_path)
+
+ # Exclude A/B again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', B_path)
+
+ # Update to depth 'files' for the excluded path A/B
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ })
+ expected_status.add({
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/B/lambda' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk.add({
+ 'A/B' : Item(contents=None),
+ 'A/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'files', B_path)
+ verify_depth(None, "files", B_path)
+ expected_info = {
+ 'Path' : re.escape(B_path),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ 'Depth' : 'files',
+ }
+ svntest.actions.run_and_verify_info([expected_info], B_path)
+
+ # Exclude A/B again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', B_path)
+
+ # Update to depth 'immediates' for the excluded path A/B
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_status.add({
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/B/lambda' : Item(status=' ', wc_rev=1),
+ 'A/B/E' : Item(status=' ', wc_rev=1),
+ 'A/B/F' : Item(status=' ', wc_rev=1),
+ })
+ expected_disk.add({
+ 'A/B' : Item(contents=None),
+ 'A/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'A/B/E' : Item(contents=None),
+ 'A/B/F' : Item(contents=None),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'immediates', B_path)
+ verify_depth(None, "immediates", B_path)
+ expected_info = {
+ 'Path' : re.escape(B_path),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ 'Depth' : 'immediates',
+ }
+ svntest.actions.run_and_verify_info([expected_info], B_path)
+
+ # Exclude A/B again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', B_path)
+
+ # Update to depth 'infinity' for the excluded path A/B
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'infinity', B_path)
+ verify_depth(None, "infinity", B_path)
+ expected_info = {
+ 'Path' : re.escape(B_path),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ # 'Depth' value is absent for 'infinity'
+ }
+ svntest.actions.run_and_verify_info([expected_info], B_path)
+
+
+def update_depth_empty_root_of_infinite_children(sbox):
+ """update depth=empty root of depth=infinite children"""
+
+ wc_dir, ign_a, ign_b, wc_other = set_up_depthy_working_copies(sbox,
+ empty=True,
+ infinity=True)
+ A_path = os.path.join(wc_dir, 'A')
+
+ # Update A to depth 'infinity'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'infinity', A_path)
+
+ # Tweak some files in the full working copy and commit.
+ svntest.main.file_append(os.path.join(wc_other, 'A', 'B', 'E', 'alpha'),
+ "Modified alpha.\n")
+ svntest.main.file_append(os.path.join(wc_other, 'A', 'D', 'G', 'rho'),
+ "Modified rho.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '', wc_other)
+
+ # Now update the original working copy and make sure we get those changes.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='U '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('iota')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_disk.tweak('A/B/E/alpha', contents="This is the file 'alpha'.\nModified alpha.\n")
+ expected_disk.tweak('A/D/G/rho', contents="This is the file 'rho'.\nModified rho.\n")
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+def sparse_update_with_dash_dash_parents(sbox):
+ """update --parents"""
+
+ sbox.build(create_wc = False)
+ sbox.add_test_path(sbox.wc_dir, True)
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ pi_path = sbox.ospath('A/D/G/pi')
+ omega_path = sbox.ospath('A/D/H/omega')
+
+ # Start with a depth=empty root checkout.
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [],
+ "co", "--depth", "empty", sbox.repo_url, sbox.wc_dir)
+
+ # Now, let's use --parents to pull in some scattered file children.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(contents=None),
+ 'A/B' : Item(contents=None),
+ 'A/B/E' : Item(contents=None),
+ 'A/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ })
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/B/E' : Item(status=' ', wc_rev=1),
+ 'A/B/E/alpha' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--parents', alpha_path)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ })
+ expected_disk.add({
+ 'A/D' : Item(contents=None),
+ 'A/D/G' : Item(contents=None),
+ 'A/D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ })
+ expected_status.add({
+ 'A/D' : Item(status=' ', wc_rev=1),
+ 'A/D/G' : Item(status=' ', wc_rev=1),
+ 'A/D/G/pi' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--parents', pi_path)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ })
+ expected_disk.add({
+ 'A/D/H' : Item(contents=None),
+ 'A/D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ })
+ expected_status.add({
+ 'A/D/H' : Item(status=' ', wc_rev=1),
+ 'A/D/H/omega' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--parents', omega_path)
+
+def update_below_depth_empty(sbox):
+ "update below depth empty shouldn't be applied"
+ sbox.build()
+
+ repo_url = sbox.repo_url
+ A = sbox.ospath('A')
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/C' : Item(status='D '),
+ 'A/B' : Item(status='D '),
+ 'A/mu' : Item(status='D '),
+ 'A/D' : Item(status='D '),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None,
+ None,
+ [], False,
+ '--set-depth', 'empty', A)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', repo_url + '/iota',
+ repo_url + '/A/B',
+ '-m', 'remote copy')
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ })
+
+ # This update should just update the revision of the working copy
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None,
+ None)
+
+# Test for issue #4136.
+@Issue(4136)
+def commit_then_immediates_update(sbox):
+ "deep commit followed by update --depth immediates"
+ sbox.build()
+
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+ mu_path = sbox.ospath('A/mu')
+
+ # Modify A/mu and commit the changes.
+ svntest.main.file_write(mu_path, "modified mu\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now, update --depth immediates in the root of the working copy.
+ expected_output = svntest.wc.State(wc_dir, { })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents="modified mu\n")
+ expected_status = svntest.wc.State(wc_dir, { '' : svntest.wc.StateItem() })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', wc_rev=2, status=' ')
+ expected_status.tweak('A', wc_rev=2, status=' ')
+ expected_status.tweak('A/mu', wc_rev=2, status=' ')
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "--depth=immediates", wc_dir)
+
+def revert_depth_files(sbox):
+ "depth immediate+files should revert deleted files"
+
+ sbox.build(read_only = True)
+
+ expected_output = "Reverted '" + re.escape(sbox.ospath('A/mu')) + "'"
+
+ # Apply an unrelated delete one level to deep
+ sbox.simple_rm('A/D/gamma')
+
+ sbox.simple_rm('A/mu')
+ # Expect reversion of just 'mu'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '--depth=immediates', sbox.ospath('A'))
+
+ # Apply an unrelated directory delete
+ sbox.simple_rm('A/D')
+
+ sbox.simple_rm('A/mu')
+ # Expect reversion of just 'mu'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '--depth=files', sbox.ospath('A'))
+
+@Issue(4257)
+def spurious_nodes_row(sbox):
+ "update produces no spurious rows"
+
+ sbox.build(read_only = True)
+ return
+
+ val1 = svntest.wc.sqlite_stmt(sbox.wc_dir, "select count(*) from nodes")
+ expected_output = svntest.wc.State(sbox.wc_dir, { })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "--depth=empty", sbox.wc_dir)
+ val2 = svntest.wc.sqlite_stmt(sbox.wc_dir, "select count(*) from nodes")
+ if (val1 != val2):
+ # ra_neon added a spurious not-present row that does not show up in status
+ raise svntest.Failure("count changed from '%s' to '%s'" % (val1, val2))
+
+def commit_excluded(sbox):
+ "commit an excluded node"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ "--set-depth=exclude",
+ sbox.ospath('A/D/G'))
+
+ sbox.simple_copy('A/D', 'D')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'D' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'D' : Item(status=' ', wc_rev='2'),
+ 'D/H' : Item(status=' ', wc_rev='2'),
+ 'D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'D/gamma' : Item(status=' ', wc_rev='2')
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'D/G' : Item(status='A '),
+ 'D/G/pi' : Item(status='A '),
+ 'D/G/tau' : Item(status='A '),
+ 'D/G/rho' : Item(status='A ')
+ })
+
+ expected_status.tweak(wc_rev=2)
+
+ expected_status.add({
+ 'D' : Item(status=' ', wc_rev='2'),
+ 'D/G' : Item(status=' ', wc_rev='2'),
+ 'D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'D/H' : Item(status=' ', wc_rev='2'),
+ 'D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'D/gamma' : Item(status=' ', wc_rev='2'),
+ 'A/D/G' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/pi' : Item(status=' ', wc_rev='2')
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ "--set-depth=infinity", wc_dir)
+
+@Issue(4636)
+@XFail()
+def fold_tree_with_deleted_moved_items(sbox):
+ "deleted & moved items left untouched"
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+
+ A_path = sbox.ospath('A')
+
+ # Delete file lambda, move file pi and directory C
+ sbox.simple_rm('A/B/lambda')
+ sbox.simple_move('A/D/G/pi', 'A/D/G/pi_moved')
+ sbox.simple_move('A/C', 'A/C_moved')
+
+ # Fold the A dir to empty, expect the deleted & moved items ones left
+ # and visible in status, rather than gone without a trace.
+
+ # Directories B and D won't be deleted, because that would remove their
+ # local modifications. Their unmodified descendants are deleted though.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ 'A/D/gamma' : Item(status='D '),
+ 'A/mu' : Item(status='D '),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=1),
+ 'iota' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status=' ', wc_rev=1),
+ 'A/B' : Item(status=' ', wc_rev=1),
+ 'A/B/lambda' : Item(status='D ', wc_rev=1),
+ 'A/C' : Item(status='D ', wc_rev=1, moved_to='A/C_moved'),
+ 'A/C_moved' : Item(status='A ', wc_rev='-', copied='+',
+ moved_from='A/C'),
+ 'A/D' : Item(status=' ', wc_rev=1),
+ 'A/D/G' : Item(status=' ', wc_rev=1),
+ 'A/D/G/pi' : Item(status='D ', wc_rev=1, moved_to='A/D/G/pi_moved'),
+ 'A/D/G/pi_moved' : Item(status='A ', wc_rev='-', copied='+',
+ moved_from='A/D/G/pi'),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ 'A' : Item(contents=None),
+ 'A/B' : Item(contents=None),
+ 'A/C_moved' : Item(contents=None),
+ 'A/D' : Item(contents=None),
+ 'A/D/G' : Item(contents=None),
+ 'A/D/G/pi_moved' : Item(contents="This is the file 'pi'.\n"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--set-depth', 'empty', A_path)
+ verify_depth(None, "empty", A_path)
+
+@Issue(4642)
+@XFail()
+def fold_tree_with_unversioned_items(sbox):
+ "unversioned files in excluded directory"
+ ign_a, ign_b, ign_c, wc_dir = set_up_depthy_working_copies(sbox,
+ infinity=True)
+
+ # create an unversioned directory within a versioned one
+ A_path = sbox.ospath('A')
+ A_local_path = os.path.join(A_path, 'A_local')
+ os.mkdir(A_local_path)
+
+ # Set A to be excluded.
+ svntest.main.run_svn(None, 'update', '--set-depth=exclude', A_path)
+
+ # try a simple update afterwards
+ sbox.simple_update()
+
+#----------------------------------------------------------------------
+# list all tests here, starting with None:
+test_list = [ None,
+ depth_empty_checkout,
+ depth_files_checkout,
+ nonrecursive_checkout,
+ depth_empty_update_bypass_single_file,
+ depth_immediates_get_top_file_mod_only,
+ depth_empty_commit,
+ depth_empty_with_file,
+ depth_empty_with_dir,
+ depth_immediates_bring_in_file,
+ depth_immediates_fill_in_dir,
+ depth_mixed_bring_in_dir,
+ depth_empty_unreceive_delete,
+ depth_immediates_unreceive_delete,
+ depth_immediates_receive_delete,
+ depth_update_to_more_depth,
+ depth_immediates_subdir_propset_1,
+ depth_immediates_subdir_propset_2,
+ commit_propmods_with_depth_empty,
+ diff_in_depthy_wc,
+ commit_depth_immediates,
+ depth_immediates_receive_new_dir,
+ add_tree_with_depth,
+ upgrade_from_above,
+ status_in_depthy_wc,
+ depthy_update_above_dir_to_be_deleted,
+ depth_folding_clean_trees_1,
+ depth_folding_clean_trees_2,
+ depth_fold_expand_clean_trees,
+ pull_in_tree_with_depth_option,
+ fold_tree_with_unversioned_modified_items,
+ depth_empty_update_on_file,
+ excluded_path_update_operation,
+ excluded_path_misc_operation,
+ excluded_receive_remote_removal,
+ exclude_keeps_hidden_entries,
+ info_excluded,
+ tree_conflicts_resolved_depth_empty,
+ tree_conflicts_resolved_depth_files,
+ tree_conflicts_resolved_depth_immediates,
+ tree_conflicts_resolved_depth_infinity,
+ update_excluded_path_sticky_depths,
+ update_depth_empty_root_of_infinite_children,
+ sparse_update_with_dash_dash_parents,
+ update_below_depth_empty,
+ commit_then_immediates_update,
+ revert_depth_files,
+ spurious_nodes_row,
+ commit_excluded,
+ fold_tree_with_deleted_moved_items,
+ fold_tree_with_unversioned_items,
+ ]
+
+if __name__ == "__main__":
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/diff_tests.py b/subversion/tests/cmdline/diff_tests.py
new file mode 100755
index 0000000..905bce3
--- /dev/null
+++ b/subversion/tests/cmdline/diff_tests.py
@@ -0,0 +1,5267 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# diff_tests.py: some basic diff tests
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, time, shutil, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import err, wc
+
+from prop_tests import binary_mime_type_on_text_file_warning
+from svntest.verify import make_diff_header, make_no_diff_deleted_header, \
+ make_diff_header, make_no_diff_deleted_header, \
+ make_git_diff_header, make_diff_prop_header, \
+ make_diff_prop_val, make_diff_prop_deleted, \
+ make_diff_prop_added, make_diff_prop_modified
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Generate expected output
+
+
+######################################################################
+# Diff output checker
+#
+# Looks for the correct filenames and a suitable number of +/- lines
+# depending on whether this is an addition, modification or deletion.
+
+def check_diff_output(diff_output, name, diff_type):
+ "check diff output"
+
+# On Windows, diffs still display / rather than \ in paths
+ if svntest.main.windows == 1:
+ name = name.replace('\\', '/')
+ i_re = re.compile('^Index:')
+ d_re = re.compile('^Index: (\\./)?' + name)
+ p_re = re.compile('^--- (\\./)?' + name)
+ add_re = re.compile('^\\+')
+ sub_re = re.compile('^-')
+
+ i = 0
+ while i < len(diff_output) - 4:
+
+ # identify a possible diff
+ if (d_re.match(diff_output[i])
+ and p_re.match(diff_output[i+2])):
+
+ # count lines added and deleted
+ i += 4
+ add_lines = 0
+ sub_lines = 0
+ while i < len(diff_output) and not i_re.match(diff_output[i]):
+ if add_re.match(diff_output[i][0]):
+ add_lines += 1
+ if sub_re.match(diff_output[i][0]):
+ sub_lines += 1
+ i += 1
+
+ #print "add:", add_lines
+ #print "sub:", sub_lines
+ # check if this looks like the right sort of diff
+ if add_lines > 0 and sub_lines == 0 and diff_type == 'A':
+ return 0
+ if sub_lines > 0 and add_lines == 0 and diff_type == 'D':
+ return 0
+ if add_lines > 0 and sub_lines > 0 and diff_type == 'M':
+ return 0
+
+ else:
+ i += 1
+
+ # no suitable diff found
+ return 1
+
+def count_diff_output(diff_output):
+ "count the number of file diffs in the output"
+
+ i_re = re.compile('Index:')
+ diff_count = 0
+ i = 0
+ while i < len(diff_output) - 4:
+ if i_re.match(diff_output[i]):
+ i += 4
+ diff_count += 1
+ else:
+ i += 1
+
+ return diff_count
+
+def verify_expected_output(diff_output, expected):
+ "verify given line exists in diff output"
+ for line in diff_output:
+ if line.find(expected) != -1:
+ break
+ else:
+ raise svntest.Failure
+
+def verify_excluded_output(diff_output, excluded):
+ "verify given line does not exist in diff output as diff line"
+ for line in diff_output:
+ if re.match("^(\\+|-)%s" % re.escape(excluded), line):
+ logger.warn('Sought: %s' % excluded)
+ logger.warn('Found: %s' % line)
+ raise svntest.Failure
+
+def extract_diff_path(line):
+ l2 = line[(line.find("(")+1):]
+ l3 = l2[0:(l2.find(")"))]
+ return l3
+
+######################################################################
+# diff on a repository subset and check the output
+
+def diff_check_repo_subset(wc_dir, repo_subset, check_fn, do_diff_r):
+ "diff and check for part of the repository"
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ repo_subset)
+ if check_fn(diff_output):
+ return 1
+
+ if do_diff_r:
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', 'HEAD',
+ repo_subset)
+ if check_fn(diff_output):
+ return 1
+
+ os.chdir(was_cwd)
+
+ return 0
+
+######################################################################
+# Changes makers and change checkers
+
+def update_a_file():
+ "update a file"
+ svntest.main.file_write(os.path.join('A', 'B', 'E', 'alpha'), "new atext")
+ # svntest.main.file_append(, "new atext")
+ return 0
+
+def check_update_a_file(diff_output):
+ "check diff for update a file"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'E', 'alpha'),
+ 'M')
+
+def diff_check_update_a_file_repo_subset(wc_dir):
+ "diff and check update a file for a repository subset"
+
+ repo_subset = os.path.join('A', 'B')
+ if diff_check_repo_subset(wc_dir, repo_subset, check_update_a_file, 1):
+ return 1
+
+ repo_subset = os.path.join('A', 'B', 'E', 'alpha')
+ if diff_check_repo_subset(wc_dir, repo_subset, check_update_a_file, 1):
+ return 1
+
+ return 0
+
+
+#----------------------------------------------------------------------
+
+def add_a_file():
+ "add a file"
+ svntest.main.file_append(os.path.join('A', 'B', 'E', 'theta'), "theta")
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'B', 'E', 'theta'))
+ return 0
+
+def check_add_a_file(diff_output):
+ "check diff for add a file"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'E', 'theta'),
+ 'A')
+
+def check_add_a_file_reverse(diff_output):
+ "check diff for add a file"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'E', 'theta'),
+ 'D')
+
+def diff_check_add_a_file_repo_subset(wc_dir):
+ "diff and check add a file for a repository subset"
+
+ repo_subset = os.path.join('A', 'B')
+ if diff_check_repo_subset(wc_dir, repo_subset, check_add_a_file, 1):
+ return 1
+
+ repo_subset = os.path.join('A', 'B', 'E', 'theta')
+ ### TODO: diff -r HEAD doesn't work for added file
+ if diff_check_repo_subset(wc_dir, repo_subset, check_add_a_file, 0):
+ return 1
+
+def update_added_file():
+ svntest.main.file_append(os.path.join('A', 'B', 'E', 'theta'), "net ttext")
+ "update added file"
+ return 0
+
+def check_update_added_file(diff_output):
+ "check diff for update of added file"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'E', 'theta'),
+ 'M')
+
+#----------------------------------------------------------------------
+
+def add_a_file_in_a_subdir():
+ "add a file in a subdir"
+ os.mkdir(os.path.join('A', 'B', 'T'))
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'B', 'T'))
+ svntest.main.file_append(os.path.join('A', 'B', 'T', 'phi'), "phi")
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'B', 'T', 'phi'))
+ return 0
+
+def check_add_a_file_in_a_subdir(diff_output):
+ "check diff for add a file in a subdir"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'T', 'phi'),
+ 'A')
+
+def check_add_a_file_in_a_subdir_reverse(diff_output):
+ "check diff for add a file in a subdir"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'B', 'T', 'phi'),
+ 'D')
+
+def diff_check_add_a_file_in_a_subdir_repo_subset(wc_dir):
+ "diff and check add a file in a subdir for a repository subset"
+
+ repo_subset = os.path.join('A', 'B', 'T')
+ ### TODO: diff -r HEAD doesn't work for added subdir
+ if diff_check_repo_subset(wc_dir, repo_subset,
+ check_add_a_file_in_a_subdir, 0):
+ return 1
+
+ repo_subset = os.path.join('A', 'B', 'T', 'phi')
+ ### TODO: diff -r HEAD doesn't work for added file in subdir
+ if diff_check_repo_subset(wc_dir, repo_subset,
+ check_add_a_file_in_a_subdir, 0):
+ return 1
+
+#----------------------------------------------------------------------
+
+def replace_a_file():
+ "replace a file"
+ svntest.main.run_svn(None, 'rm', os.path.join('A', 'D', 'G', 'rho'))
+ svntest.main.file_append(os.path.join('A', 'D', 'G', 'rho'), "new rho")
+ svntest.main.run_svn(None, 'add', os.path.join('A', 'D', 'G', 'rho'))
+ return 0
+
+def check_replace_a_file(diff_output):
+ "check diff for replace a file"
+ return check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'rho'),
+ 'M')
+
+#----------------------------------------------------------------------
+
+def update_three_files():
+ "update three files"
+ svntest.main.file_write(os.path.join('A', 'D', 'gamma'), "new gamma")
+ svntest.main.file_write(os.path.join('A', 'D', 'G', 'tau'), "new tau")
+ svntest.main.file_write(os.path.join('A', 'D', 'H', 'psi'), "new psi")
+ return 0
+
+def check_update_three_files(diff_output):
+ "check update three files"
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'gamma'),
+ 'M'):
+ return 1
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'tau'),
+ 'M'):
+ return 1
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'H', 'psi'),
+ 'M'):
+ return 1
+ return 0
+
+
+######################################################################
+# make a change, check the diff, commit the change, check the diff
+
+def change_diff_commit_diff(wc_dir, revision, change_fn, check_fn):
+ "make a change, diff, commit, update and diff again"
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ svntest.main.run_svn(None,
+ 'up', '-r', 'HEAD')
+
+ change_fn()
+
+ # diff without revision doesn't use an editor
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff')
+ if check_fn(diff_output):
+ raise svntest.Failure
+
+ # diff with revision runs an editor
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', 'HEAD')
+ if check_fn(diff_output):
+ raise svntest.Failure
+
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+ svntest.main.run_svn(None,
+ 'up')
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', revision)
+ if check_fn(diff_output):
+ raise svntest.Failure
+
+ os.chdir(was_cwd)
+
+######################################################################
+# check the diff
+
+def just_diff(wc_dir, rev_check, check_fn):
+ "update and check that the given diff is seen"
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', rev_check)
+ if check_fn(diff_output):
+ raise svntest.Failure
+ os.chdir(was_cwd)
+
+######################################################################
+# update, check the diff
+
+def update_diff(wc_dir, rev_up, rev_check, check_fn):
+ "update and check that the given diff is seen"
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ svntest.main.run_svn(None,
+ 'up', '-r', rev_up)
+
+ os.chdir(was_cwd)
+
+ just_diff(wc_dir, rev_check, check_fn)
+
+######################################################################
+# check a pure repository rev1:rev2 diff
+
+def repo_diff(wc_dir, rev1, rev2, check_fn):
+ "check that the given pure repository diff is seen"
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None,
+ 'diff', '-r',
+ repr(rev2) + ':'
+ + repr(rev1))
+ if check_fn(diff_output):
+ raise svntest.Failure
+
+ os.chdir(was_cwd)
+
+######################################################################
+# Tests
+#
+
+# test 1
+def diff_update_a_file(sbox):
+ "update a file"
+
+ sbox.build()
+
+ change_diff_commit_diff(sbox.wc_dir, 1,
+ update_a_file,
+ check_update_a_file)
+
+# test 2
+def diff_add_a_file(sbox):
+ "add a file"
+
+ sbox.build()
+
+ change_diff_commit_diff(sbox.wc_dir, 1,
+ add_a_file,
+ check_add_a_file)
+
+#test 3
+def diff_add_a_file_in_a_subdir(sbox):
+ "add a file in an added directory"
+
+ sbox.build()
+
+ change_diff_commit_diff(sbox.wc_dir, 1,
+ add_a_file_in_a_subdir,
+ check_add_a_file_in_a_subdir)
+
+# test 4
+def diff_replace_a_file(sbox):
+ "replace a file with a file"
+
+ sbox.build()
+
+ change_diff_commit_diff(sbox.wc_dir, 1,
+ replace_a_file,
+ check_replace_a_file)
+
+# test 5
+def diff_multiple_reverse(sbox):
+ "multiple revisions diff'd forwards and backwards"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # rev 2
+ change_diff_commit_diff(wc_dir, 1,
+ add_a_file,
+ check_add_a_file)
+
+ #rev 3
+ change_diff_commit_diff(wc_dir, 2,
+ add_a_file_in_a_subdir,
+ check_add_a_file_in_a_subdir)
+
+ #rev 4
+ change_diff_commit_diff(wc_dir, 3,
+ update_a_file,
+ check_update_a_file)
+
+ # check diffs both ways
+ update_diff(wc_dir, 4, 1, check_update_a_file)
+ just_diff(wc_dir, 1, check_add_a_file_in_a_subdir)
+ just_diff(wc_dir, 1, check_add_a_file)
+ update_diff(wc_dir, 1, 4, check_update_a_file)
+ just_diff(wc_dir, 4, check_add_a_file_in_a_subdir_reverse)
+ just_diff(wc_dir, 4, check_add_a_file_reverse)
+
+ # check pure repository diffs
+ repo_diff(wc_dir, 4, 1, check_update_a_file)
+ repo_diff(wc_dir, 4, 1, check_add_a_file_in_a_subdir)
+ repo_diff(wc_dir, 4, 1, check_add_a_file)
+ repo_diff(wc_dir, 1, 4, check_update_a_file)
+ repo_diff(wc_dir, 1, 4, check_add_a_file_in_a_subdir_reverse)
+ repo_diff(wc_dir, 1, 4, check_add_a_file_reverse)
+
+# test 6
+def diff_non_recursive(sbox):
+ "non-recursive behaviour"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ change_diff_commit_diff(wc_dir, 1,
+ update_three_files,
+ check_update_three_files)
+
+ # The changes are in: ./A/D/gamma
+ # ./A/D/G/tau
+ # ./A/D/H/psi
+ # When checking D recursively there are three changes. When checking
+ # D non-recursively there is only one change. When checking G
+ # recursively, there is only one change even though D is the anchor
+
+ # full diff has three changes
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', sbox.ospath('A/D'))
+
+ if count_diff_output(diff_output) != 3:
+ raise svntest.Failure
+
+ # non-recursive has one change
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', '-N', sbox.ospath('A/D'))
+
+ if count_diff_output(diff_output) != 1:
+ raise svntest.Failure
+
+ # diffing a directory doesn't pick up other diffs in the anchor
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', sbox.ospath('A/D/G'))
+
+ if count_diff_output(diff_output) != 1:
+ raise svntest.Failure
+
+
+# test 7
+def diff_repo_subset(sbox):
+ "diff only part of the repository"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ update_a_file()
+ add_a_file()
+ add_a_file_in_a_subdir()
+
+ os.chdir(was_cwd)
+
+ if diff_check_update_a_file_repo_subset(wc_dir):
+ raise svntest.Failure
+
+ if diff_check_add_a_file_repo_subset(wc_dir):
+ raise svntest.Failure
+
+ if diff_check_add_a_file_in_a_subdir_repo_subset(wc_dir):
+ raise svntest.Failure
+
+
+# test 8
+def diff_non_version_controlled_file(sbox):
+ "non version controlled files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.file_append(sbox.ospath('A/D/foo'), "a new file")
+
+ svntest.actions.run_and_verify_svn(None,
+ 'svn: E155010: .*foo\' was not found.',
+ 'diff', sbox.ospath('A/D/foo'))
+
+# test 9
+def diff_pure_repository_update_a_file(sbox):
+ "pure repository diff update a file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # rev 2
+ update_a_file()
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ # rev 3
+ add_a_file_in_a_subdir()
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ # rev 4
+ add_a_file()
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ # rev 5
+ update_added_file()
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ svntest.main.run_svn(None,
+ 'up', '-r', '2')
+
+ url = sbox.repo_url
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-c', '2', url)
+ if check_update_a_file(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2')
+ if check_update_a_file(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-c', '3', url)
+ if check_add_a_file_in_a_subdir(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '2:3')
+ if check_add_a_file_in_a_subdir(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-c', '5', url)
+ if check_update_added_file(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '4:5')
+ if check_update_added_file(diff_output): raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', 'head')
+ if check_add_a_file_in_a_subdir_reverse(diff_output): raise svntest.Failure
+
+
+# test 10
+def diff_only_property_change(sbox):
+ "diff when property was changed but text was not"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_output = \
+ make_diff_header("iota", "revision 1", "revision 2") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:eol-style", "native")
+
+ expected_reverse_output = \
+ make_diff_header("iota", "revision 2", "revision 1") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_deleted("svn:eol-style", "native")
+
+ expected_rev1_output = \
+ make_diff_header("iota", "revision 1", "working copy") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:eol-style", "native")
+
+ os.chdir(sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'svn:eol-style', 'native', 'iota')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'empty-msg')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', '1:2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-c', '2')
+
+ svntest.actions.run_and_verify_svn(expected_reverse_output, [],
+ 'diff', '-r', '2:1')
+
+ svntest.actions.run_and_verify_svn(expected_reverse_output, [],
+ 'diff', '-c', '-2')
+
+ svntest.actions.run_and_verify_svn(expected_rev1_output, [],
+ 'diff', '-r', '1')
+
+ svntest.actions.run_and_verify_svn(expected_rev1_output, [],
+ 'diff', '-r', 'PREV', 'iota')
+
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #1019: make sure we don't try to display
+# diffs when the file is marked as a binary type. This tests all 3
+# uses of 'svn diff': wc-wc, wc-repos, repos-repos.
+@Issue(1019)
+def dont_diff_binary_file(sbox):
+ "don't diff file marked as binary type"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a binary file to the project.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ # Write PNG file data into 'A/theta'.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit the new binary file, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update the whole working copy to HEAD (rev 2)
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item(theta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Make a local mod to the binary file.
+ svntest.main.file_append(theta_path, "some extra junk")
+
+ # First diff use-case: plain old 'svn diff wc' will display any
+ # local changes in the working copy. (diffing working
+ # vs. text-base)
+
+ re_nodisplay = re.compile('^Cannot display:')
+
+ exit_code, stdout, stderr = svntest.main.run_svn(None, 'diff', wc_dir)
+
+ for line in stdout:
+ if (re_nodisplay.match(line)):
+ break
+ else:
+ raise svntest.Failure
+
+ # Second diff use-case: 'svn diff -r1 wc' compares the wc against a
+ # the first revision in the repository.
+
+ exit_code, stdout, stderr = svntest.main.run_svn(None,
+ 'diff', '-r', '1', wc_dir)
+
+ for line in stdout:
+ if (re_nodisplay.match(line)):
+ break
+ else:
+ raise svntest.Failure
+
+ # Now commit the local mod, creating rev 3.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Third diff use-case: 'svn diff -r2:3 wc' will compare two
+ # repository trees.
+
+ exit_code, stdout, stderr = svntest.main.run_svn(None, 'diff',
+ '-r', '2:3', wc_dir)
+
+ for line in stdout:
+ if (re_nodisplay.match(line)):
+ break
+ else:
+ raise svntest.Failure
+
+
+def diff_nonextant_urls(sbox):
+ "svn diff errors against a non-existent URL"
+
+ sbox.build(create_wc = False)
+ non_extant_url = sbox.repo_url + '/A/does_not_exist'
+ extant_url = sbox.repo_url + '/A/mu'
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ 1, 'diff', '--old', non_extant_url, '--new', extant_url)
+
+ for line in err_output:
+ if re.search('was not found in the repository at revision', line):
+ break
+ else:
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ 1, 'diff', '--old', extant_url, '--new', non_extant_url)
+
+ for line in err_output:
+ if re.search('was not found in the repository at revision', line):
+ break
+ else:
+ raise svntest.Failure
+
+def diff_head_of_moved_file(sbox):
+ "diff against the head of a moved file"
+
+ sbox.build()
+ mu_path = sbox.ospath('A/mu')
+ new_mu_path = mu_path + '.new'
+
+ svntest.main.run_svn(None, 'mv', mu_path, new_mu_path)
+
+ # Modify the file to ensure that the diff is non-empty.
+ svntest.main.file_append(new_mu_path, "\nActually, it's a new mu.")
+
+ mu_new = sbox.ospath('A/mu.new').replace('\\','/')
+
+ expected_output = [
+ 'Index: %s\n' % mu_new,
+ '===================================================================\n',
+ '--- %s\t(.../mu)\t(revision 1)\n' % mu_new,
+ '+++ %s\t(.../mu.new)\t(working copy)\n' % mu_new,
+ '@@ -1 +1,3 @@\n',
+ ' This is the file \'mu\'.\n',
+ '+\n',
+ '+Actually, it\'s a new mu.\n',
+ '\ No newline at end of file\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', 'HEAD', new_mu_path)
+
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #977: make 'svn diff -r BASE:N' compare a
+# repository tree against the wc's text-bases, rather than the wc's
+# working files. This is a long test, which checks many variations.
+@Issue(977)
+def diff_base_to_repos(sbox):
+ "diff text-bases against repository"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ newfile_path = sbox.ospath('A/D/newfile')
+ mu_path = sbox.ospath('A/mu')
+
+ # Make changes to iota, commit r2, update to HEAD (r2).
+ svntest.main.file_append(iota_path, "some rev2 iota text.\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents=\
+ "This is the file 'iota'.\nsome rev2 iota text.\n")
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status)
+
+ # Now make another local mod to iota.
+ svntest.main.file_append(iota_path, "an iota local mod.\n")
+
+ # If we run 'svn diff -r 1', we should see diffs that include *both*
+ # the rev2 changes and local mods. That's because the working files
+ # are being compared to the repository.
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '1', wc_dir)
+
+ # Makes diff output look the same on all platforms.
+ def strip_eols(lines):
+ return [x.replace("\r", "").replace("\n", "") for x in lines]
+
+ expected_output_lines = make_diff_header(iota_path, "revision 1",
+ "working copy") + [
+ "@@ -1 +1,3 @@\n",
+ " This is the file 'iota'.\n",
+ "+some rev2 iota text.\n",
+ "+an iota local mod.\n"]
+
+ if strip_eols(diff_output) != strip_eols(expected_output_lines):
+ raise svntest.Failure
+
+ # If we run 'svn diff -r BASE:1', we should see diffs that only show
+ # the rev2 changes and NOT the local mods. That's because the
+ # text-bases are being compared to the repository.
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'BASE:1', wc_dir)
+
+ expected_output_lines = make_diff_header(iota_path, "working copy",
+ "revision 1") + [
+ "@@ -1,2 +1 @@\n",
+ " This is the file 'iota'.\n",
+ "-some rev2 iota text.\n"]
+
+ if strip_eols(diff_output) != strip_eols(expected_output_lines):
+ raise svntest.Failure
+
+ # But that's not all folks... no, no, we're just getting started
+ # here! There are so many other tests to do.
+
+ # For example, we just ran 'svn diff -rBASE:1'. The output should
+ # look exactly the same as 'svn diff -r2:1'. (If you remove the
+ # header commentary)
+ exit_code, diff_output2, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '2:1', wc_dir)
+
+ diff_output[2:4] = []
+ diff_output2[2:4] = []
+
+ if (diff_output2 != diff_output):
+ raise svntest.Failure
+
+ # and similarly, does 'svn diff -r1:2' == 'svn diff -r1:BASE' ?
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '1:2', wc_dir)
+
+ exit_code, diff_output2, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '1:BASE', wc_dir)
+
+ diff_output[2:4] = []
+ diff_output2[2:4] = []
+
+ if (diff_output2 != diff_output):
+ raise svntest.Failure
+
+ # Now we schedule an addition and a deletion.
+ svntest.main.file_append(newfile_path, "Contents of newfile\n")
+ svntest.main.run_svn(None, 'add', newfile_path)
+ svntest.main.run_svn(None, 'rm', mu_path)
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_output.add({
+ 'A/D/newfile' : Item(status='A ', wc_rev=0),
+ })
+ expected_output.tweak('A/mu', status='D ')
+ expected_output.tweak('iota', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # once again, verify that -r1:2 and -r1:BASE look the same, as do
+ # -r2:1 and -rBASE:1. None of these diffs should mention the
+ # scheduled addition or deletion.
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '1:2', wc_dir)
+
+ exit_code, diff_output2, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '1:BASE', wc_dir)
+
+ exit_code, diff_output3, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '2:1', wc_dir)
+
+ exit_code, diff_output4, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'BASE:1', wc_dir)
+
+ diff_output[2:4] = []
+ diff_output2[2:4] = []
+ diff_output3[2:4] = []
+ diff_output4[2:4] = []
+
+ if (diff_output != diff_output2):
+ raise svntest.Failure
+
+ if (diff_output3 != diff_output4):
+ raise svntest.Failure
+
+ # Great! So far, so good. Now we commit our three changes (a local
+ # mod, an addition, a deletion) and update to HEAD (r3).
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Deleting'),
+ 'A/D/newfile' : Item(verb='Adding')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('iota', wc_rev=3)
+ expected_status.remove('A/mu')
+ expected_status.add({
+ 'A/D/newfile' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents="This is the file 'iota'.\n" + \
+ "some rev2 iota text.\nan iota local mod.\n")
+ expected_disk.add({'A/D/newfile' : Item("Contents of newfile\n")})
+ expected_disk.remove('A/mu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/mu')
+ expected_status.add({
+ 'A/D/newfile' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status)
+
+ # Now 'svn diff -r3:2' should == 'svn diff -rBASE:2', showing the
+ # removal of changes to iota, the adding of mu, and deletion of newfile.
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', '3:2', wc_dir)
+
+ exit_code, diff_output2, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'BASE:2', wc_dir)
+
+ # to do the comparison, remove all output lines starting with +++ or ---
+ re_infoline = re.compile('^(\+\+\+|---).*$')
+ list1 = []
+ list2 = []
+
+ for line in diff_output:
+ if not re_infoline.match(line):
+ list1.append(line)
+
+ for line in diff_output2:
+ if not re_infoline.match(line):
+ list2.append(line)
+
+ # Two files in diff may be in any order.
+ list1 = svntest.verify.UnorderedOutput(list1)
+
+ svntest.verify.compare_and_display_lines('', '', list1, list2)
+
+
+#----------------------------------------------------------------------
+# This is a simple regression test for issue #891, whereby ra_neon's
+# REPORT request would fail, because the object no longer exists in HEAD.
+@Issue(891)
+def diff_deleted_in_head(sbox):
+ "repos-repos diff on item deleted from HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ mu_path = sbox.ospath('A/mu')
+
+ # Make a change to mu, commit r2, update.
+ svntest.main.file_append(mu_path, "some rev2 mu text.\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents="This is the file 'mu'.\nsome rev2 mu text.\n")
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status)
+
+ # Now delete the whole directory 'A', and commit as r3.
+ svntest.main.run_svn(None, 'rm', A_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A', 'A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/F', 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho',
+ 'A/D/G/pi', 'A/D/G/tau', 'A/D/H', 'A/D/H/psi',
+ 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/mu',
+ 'A/C')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Doing an 'svn diff -r1:2' on the URL of directory A should work,
+ # especially over the DAV layer.
+ the_url = sbox.repo_url + '/A'
+ diff_output = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '-r',
+ '1:2', the_url + "@2")
+
+
+#----------------------------------------------------------------------
+@Issue(2873)
+def diff_targets(sbox):
+ "select diff targets"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ update_a_file()
+ add_a_file()
+
+ update_path = os.path.join('A', 'B', 'E', 'alpha')
+ add_path = os.path.join('A', 'B', 'E', 'theta')
+ parent_path = os.path.join('A', 'B', 'E')
+ update_url = sbox.repo_url + '/A/B/E/alpha'
+ parent_url = sbox.repo_url + '/A/B/E'
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ update_path,
+ add_path)
+ if check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ update_path)
+ if check_update_a_file(diff_output) or not check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '--old', parent_path, 'alpha', 'theta')
+
+ if check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '--old', parent_path, 'theta')
+
+ if not check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'ci',
+ '-m', 'log msg')
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(1, 'diff', '-r1:2',
+ update_path,
+ add_path)
+
+ if check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(1,
+ 'diff', '-r1:2',
+ add_path)
+
+ if not check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ 1, 'diff', '-r1:2', '--old', parent_path, 'alpha', 'theta')
+
+ if check_update_a_file(diff_output) or check_add_a_file(diff_output):
+ raise svntest.Failure
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r1:2', '--old', parent_path, 'alpha')
+
+ if check_update_a_file(diff_output) or not check_add_a_file(diff_output):
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+def diff_branches(sbox):
+ "diff for branches"
+
+ sbox.build()
+
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg',
+ A_url, A2_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ A_alpha = sbox.ospath('A/B/E/alpha')
+ A2_alpha = sbox.ospath('A2/B/E/alpha')
+
+ svntest.main.file_append(A_alpha, "\nfoo\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+ svntest.main.file_append(A2_alpha, "\nbar\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+ svntest.main.file_append(A_alpha, "zig\n")
+
+ # Compare repository file on one branch against repository file on
+ # another branch
+ rel_path = os.path.join('B', 'E', 'alpha')
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '--old', A_url, '--new', A2_url, rel_path)
+
+ verify_expected_output(diff_output, "-foo")
+ verify_expected_output(diff_output, "+bar")
+
+ # Same again but using whole branch
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '--old', A_url, '--new', A2_url)
+
+ verify_expected_output(diff_output, "-foo")
+ verify_expected_output(diff_output, "+bar")
+
+ # Compare two repository files on different branches
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'diff', A_url + '/B/E/alpha', A2_url + '/B/E/alpha')
+
+ verify_expected_output(diff_output, "-foo")
+ verify_expected_output(diff_output, "+bar")
+
+ # Compare two versions of a file on a single branch
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'diff', A_url + '/B/E/alpha@2', A_url + '/B/E/alpha@3')
+
+ verify_expected_output(diff_output, "+foo")
+
+ # Compare identical files on different branches
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ [], [],
+ 'diff', A_url + '/B/E/alpha@2', A2_url + '/B/E/alpha@3')
+
+
+#----------------------------------------------------------------------
+def diff_repos_and_wc(sbox):
+ "diff between repos URLs and WC paths"
+
+ sbox.build()
+
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg',
+ A_url, A2_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ A_alpha = sbox.ospath('A/B/E/alpha')
+ A2_alpha = sbox.ospath('A2/B/E/alpha')
+
+ svntest.main.file_append(A_alpha, "\nfoo\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+ svntest.main.file_append(A2_alpha, "\nbar\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+ svntest.main.file_append(A_alpha, "zig\n")
+
+ # Compare working file on one branch against repository file on
+ # another branch
+ A_path = sbox.ospath('A')
+ rel_path = os.path.join('B', 'E', 'alpha')
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'diff', '--old', A2_url, '--new', A_path, rel_path)
+
+ verify_expected_output(diff_output, "-bar")
+ verify_expected_output(diff_output, "+foo")
+ verify_expected_output(diff_output, "+zig")
+
+ # Same again but using whole branch
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'diff', '--old', A2_url, '--new', A_path)
+
+ verify_expected_output(diff_output, "-bar")
+ verify_expected_output(diff_output, "+foo")
+ verify_expected_output(diff_output, "+zig")
+
+#----------------------------------------------------------------------
+@Issue(1311)
+def diff_file_urls(sbox):
+ "diff between two file URLs"
+
+ sbox.build()
+
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + '/iota'
+ iota_copy_path = sbox.ospath('A/iota')
+ iota_copy_url = sbox.repo_url + '/A/iota'
+ iota_copy2_url = sbox.repo_url + '/A/iota2'
+
+ # Put some different text into iota, and commit.
+ os.remove(iota_path)
+ svntest.main.file_append(iota_path, "foo\nbar\nsnafu\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', iota_path)
+
+ # Now, copy the file elsewhere, twice.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg',
+ iota_url, iota_copy_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg',
+ iota_url, iota_copy2_url)
+
+ # Update (to get the copies)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ # Now, make edits to one of the copies of iota, and commit.
+ os.remove(iota_copy_path)
+ svntest.main.file_append(iota_copy_path, "foo\nsnafu\nabcdefg\nopqrstuv\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', iota_copy_path)
+
+ # Finally, do a diff between the first and second copies of iota,
+ # and verify that we got the expected lines. And then do it in reverse!
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ iota_copy_url,
+ iota_copy2_url)
+
+ verify_expected_output(out, "+bar")
+ verify_expected_output(out, "-abcdefg")
+ verify_expected_output(out, "-opqrstuv")
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ iota_copy2_url,
+ iota_copy_url)
+
+ verify_expected_output(out, "-bar")
+ verify_expected_output(out, "+abcdefg")
+ verify_expected_output(out, "+opqrstuv")
+
+#----------------------------------------------------------------------
+def diff_prop_change_local_edit(sbox):
+ "diff a property change plus a local edit"
+
+ sbox.build()
+
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + '/iota'
+
+ # Change a property on iota, and commit.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'pname', 'pvalue', iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', iota_path)
+
+ # Make local edits to iota.
+ svntest.main.file_append(iota_path, "\nMore text.\n")
+
+ # diff r1:COMMITTED should show the property change but not the local edit.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ '-r1:COMMITTED',
+ iota_path)
+ for line in out:
+ if line.find("+More text.") != -1:
+ raise svntest.Failure
+ verify_expected_output(out, "+pvalue")
+
+ # diff r1:BASE should show the property change but not the local edit.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '-r1:BASE',
+ iota_path)
+ for line in out:
+ if line.find("+More text.") != -1:
+ raise svntest.Failure # fails at r7481
+ verify_expected_output(out, "+pvalue") # fails at r7481
+
+ # diff r1:WC should show the local edit as well as the property change.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '-r1',
+ iota_path)
+ verify_expected_output(out, "+More text.") # fails at r7481
+ verify_expected_output(out, "+pvalue")
+
+#----------------------------------------------------------------------
+def check_for_omitted_prefix_in_path_component(sbox):
+ "check for omitted prefix in path component"
+
+ sbox.build()
+ svntest.actions.do_sleep_for_timestamps()
+
+ prefix_path = sbox.ospath('prefix_mydir')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', prefix_path)
+ other_prefix_path = sbox.ospath('prefix_other')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', other_prefix_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+
+ file_path = os.path.join(prefix_path, "test.txt")
+ svntest.main.file_write(file_path, "Hello\nThere\nIota\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', file_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', sbox.wc_dir)
+
+
+ prefix_url = sbox.repo_url + "/prefix_mydir"
+ other_prefix_url = sbox.repo_url + "/prefix_other/mytag"
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg', prefix_url,
+ other_prefix_url)
+
+ svntest.main.file_write(file_path, "Hello\nWorld\nIota\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', prefix_path)
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', prefix_url,
+ other_prefix_url)
+
+ src = extract_diff_path(out[2])
+ dest = extract_diff_path(out[3])
+
+ good_src = ".../prefix_mydir"
+ good_dest = ".../prefix_other/mytag"
+
+ if ((src != good_src) or (dest != good_dest)):
+ logger.warn("src is '%s' instead of '%s' and dest is '%s' instead of '%s'" %
+ (src, good_src, dest, good_dest))
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def diff_renamed_file(sbox):
+ "diff a file that has been renamed"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ pi_path = os.path.join('A', 'D', 'G', 'pi')
+ pi2_path = os.path.join('A', 'D', 'pi2')
+ svntest.main.file_write(pi_path, "new pi")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg')
+
+ svntest.main.file_append(pi_path, "even more pi")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg')
+
+ svntest.main.run_svn(None, 'mv', pi_path, pi2_path)
+
+ # Repos->WC diff of the file
+ exit_code, diff_output, err_output = svntest.main.run_svn(None,
+ 'diff', '-r', '1',
+ pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'M') :
+ raise svntest.Failure
+
+ # Repos->WC diff of the file showing copies as adds
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1',
+ '--show-copies-as-adds', pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'A') :
+ raise svntest.Failure
+
+ svntest.main.file_append(pi2_path, "new pi")
+
+ # Repos->WC of the containing directory
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', os.path.join('A', 'D'))
+
+ if check_diff_output(diff_output,
+ pi_path,
+ 'D') :
+ raise svntest.Failure
+
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'M') :
+ raise svntest.Failure
+
+ # Repos->WC of the containing directory showing copies as adds
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', '--show-copies-as-adds', os.path.join('A', 'D'))
+
+ if check_diff_output(diff_output,
+ pi_path,
+ 'D') :
+ raise svntest.Failure
+
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'A') :
+ raise svntest.Failure
+
+ # WC->WC of the file
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'M') :
+ raise svntest.Failure
+
+ # WC->WC of the file showing copies as adds
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff',
+ '--show-copies-as-adds', pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'A') :
+ raise svntest.Failure
+
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg')
+
+ # Repos->WC diff of file after the rename
+ exit_code, diff_output, err_output = svntest.main.run_svn(None,
+ 'diff', '-r', '1',
+ pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'M') :
+ raise svntest.Failure
+
+ # Repos->WC diff of file after the rename. The local file is not
+ # a copy anymore (it has schedule "normal"), so --show-copies-as-adds
+ # should have no effect.
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1',
+ '--show-copies-as-adds', pi2_path)
+ if check_diff_output(diff_output,
+ pi2_path,
+ 'M') :
+ raise svntest.Failure
+
+ # Repos->repos diff after the rename
+ ### --show-copies-as-adds has no effect
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '2:3',
+ pi2_path)
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'pi'),
+ 'M') :
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def diff_within_renamed_dir(sbox):
+ "diff a file within a renamed directory"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ svntest.main.run_svn(None, 'mv', os.path.join('A', 'D', 'G'),
+ os.path.join('A', 'D', 'I'))
+ # svntest.main.run_svn(None, 'ci', '-m', 'log_msg')
+ svntest.main.file_write(os.path.join('A', 'D', 'I', 'pi'), "new pi")
+
+ # Check a repos->wc diff
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', os.path.join('A', 'D', 'I', 'pi'))
+
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'M') :
+ raise svntest.Failure
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg')
+
+ # Check repos->wc after commit
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', os.path.join('A', 'D', 'I', 'pi'))
+
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'M') :
+ raise svntest.Failure
+
+ # Test the diff while within the moved directory
+ os.chdir(os.path.join('A','D','I'))
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None,
+ 'diff', '-r', '1')
+
+ if check_diff_output(diff_output, 'pi', 'M') :
+ raise svntest.Failure
+
+ # Test a repos->repos diff while within the moved directory
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2')
+
+ if check_diff_output(diff_output, 'pi', 'M') :
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def diff_prop_on_named_dir(sbox):
+ "diff a prop change on a dir named explicitly"
+
+ # Diff of a property change or addition should contain a "+" line.
+ # Diff of a property change or deletion should contain a "-" line.
+ # On a diff between repository revisions (not WC) of a dir named
+ # explicitly, the "-" line was missing. (For a file, and for a dir
+ # recursed into, the result was correct.)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ os.chdir(sbox.wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'p', 'v', 'A')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', 'p', 'A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '')
+
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r2:3', 'A')
+ # Check that the result contains a "-" line.
+ verify_expected_output(diff_output, "-v")
+
+#----------------------------------------------------------------------
+def diff_keywords(sbox):
+ "ensure that diff won't show keywords"
+
+ sbox.build()
+
+ iota_path = sbox.ospath('iota')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps',
+ 'svn:keywords',
+ 'Id Rev Date',
+ iota_path)
+
+ fp = open(iota_path, 'w')
+ fp.write("$Date$\n")
+ fp.write("$Id$\n")
+ fp.write("$Rev$\n")
+ fp.write("$Date::%s$\n" % (' ' * 80))
+ fp.write("$Id::%s$\n" % (' ' * 80))
+ fp.write("$Rev::%s$\n" % (' ' * 80))
+ fp.close()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'keywords', sbox.wc_dir)
+
+ svntest.main.file_append(iota_path, "bar\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'added bar', sbox.wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'prev:head', sbox.wc_dir)
+
+ verify_expected_output(diff_output, "+bar")
+ verify_excluded_output(diff_output, "$Date:")
+ verify_excluded_output(diff_output, "$Rev:")
+ verify_excluded_output(diff_output, "$Id:")
+
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'head:prev', sbox.wc_dir)
+
+ verify_expected_output(diff_output, "-bar")
+ verify_excluded_output(diff_output, "$Date:")
+ verify_excluded_output(diff_output, "$Rev:")
+ verify_excluded_output(diff_output, "$Id:")
+
+ # Check fixed length keywords will show up
+ # when the length of keyword has changed
+ fp = open(iota_path, 'w')
+ fp.write("$Date$\n")
+ fp.write("$Id$\n")
+ fp.write("$Rev$\n")
+ fp.write("$Date::%s$\n" % (' ' * 79))
+ fp.write("$Id::%s$\n" % (' ' * 79))
+ fp.write("$Rev::%s$\n" % (' ' * 79))
+ fp.close()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'keywords 2', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ exit_code, diff_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'prev:head', sbox.wc_dir)
+
+ # these should show up
+ verify_expected_output(diff_output, "+$Id:: ")
+ verify_expected_output(diff_output, "-$Id:: ")
+ verify_expected_output(diff_output, "-$Rev:: ")
+ verify_expected_output(diff_output, "+$Rev:: ")
+ verify_expected_output(diff_output, "-$Date:: ")
+ verify_expected_output(diff_output, "+$Date:: ")
+ # ... and these won't
+ verify_excluded_output(diff_output, "$Date: ")
+ verify_excluded_output(diff_output, "$Rev: ")
+ verify_excluded_output(diff_output, "$Id: ")
+
+
+def diff_force(sbox):
+ "show diffs for binary files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+
+ # Append a line to iota and make it binary.
+ svntest.main.file_append(iota_path, "new line")
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream', iota_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit iota, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Add another line, while keeping he file as binary.
+ svntest.main.file_append(iota_path, "another line")
+
+ # Commit creating rev 3.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Check that we get diff when the first, the second and both files
+ # are marked as binary. First we'll use --force. Then we'll use
+ # the configuration option 'diff-ignore-content-type'.
+
+ re_nodisplay = re.compile('^Cannot display:')
+
+ for opt in ['--force',
+ '--config-option=config:miscellany:diff-ignore-content-type=yes']:
+ for range in ['-r1:2', '-r2:1', '-r2:3']:
+ exit_code, stdout, stderr = svntest.main.run_svn(None, 'diff', range,
+ iota_path, opt)
+ for line in stdout:
+ if (re_nodisplay.match(line)):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Regression test for issue #2333: Renaming a directory should produce
+# deletion and addition diffs for each included file.
+@Issue(2333)
+def diff_renamed_dir(sbox):
+ "diff a renamed directory"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ svntest.main.run_svn(None, 'mv', os.path.join('A', 'D', 'G'),
+ os.path.join('A', 'D', 'I'))
+
+ # Check a wc->wc diff
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '--show-copies-as-adds', os.path.join('A', 'D'))
+
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # Check a repos->wc diff of the moved-here node before commit
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', '--show-copies-as-adds',
+ os.path.join('A', 'D', 'I'))
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # Check a repos->wc diff of the moved-away node before commit
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', os.path.join('A', 'D', 'G'))
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+
+ # Commit
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg')
+
+ # Check repos->wc after commit
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r', '1', os.path.join('A', 'D'))
+
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # Test a repos->repos diff after commit
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2')
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('A', 'D', 'I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # repos->repos with explicit URL arg
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2',
+ '^/A')
+ if check_diff_output(diff_output,
+ os.path.join('D', 'G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('D', 'I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # Go to the parent of the moved directory
+ os.chdir(os.path.join('A','D'))
+
+ # repos->wc diff in the parent
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1')
+
+ if check_diff_output(diff_output,
+ os.path.join('G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # repos->repos diff in the parent
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2')
+
+ if check_diff_output(diff_output,
+ os.path.join('G', 'pi'),
+ 'D') :
+ raise svntest.Failure
+ if check_diff_output(diff_output,
+ os.path.join('I', 'pi'),
+ 'A') :
+ raise svntest.Failure
+
+ # Go to the move target directory
+ os.chdir('I')
+
+ # repos->wc diff while within the moved directory (should be empty)
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1')
+ if diff_output:
+ raise svntest.Failure
+
+ # repos->repos diff while within the moved directory (should be empty)
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff',
+ '-r', '1:2')
+
+ if diff_output:
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+def diff_property_changes_to_base(sbox):
+ "diff to BASE with local property mods"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Each of these returns an expected diff as a list of lines.
+ def add_diff_A(r1, r2):
+ return (make_diff_header("A", r1, r2) +
+ make_diff_prop_header("A") +
+ make_diff_prop_added("dirprop", "r2value"))
+
+ def add_diff_iota(r1, r2):
+ return (make_diff_header("iota", r1, r2) +
+ make_diff_prop_header("iota") +
+ make_diff_prop_added("fileprop", "r2value"))
+
+ def del_diff_A(r1, r2):
+ return (make_diff_header("A", r1, r2) +
+ make_diff_prop_header("A") +
+ make_diff_prop_deleted("dirprop", "r2value"))
+
+ def del_diff_iota(r1, r2):
+ return (make_diff_header("iota", r1, r2) +
+ make_diff_prop_header("iota") +
+ make_diff_prop_deleted("fileprop", "r2value"))
+
+ # Each of these is an expected diff as a list of lines.
+ expected_output_r1_r2 = (add_diff_A('revision 1', 'revision 2') +
+ add_diff_iota('revision 1', 'revision 2'))
+ expected_output_r2_r1 = (del_diff_A('revision 2', 'revision 1') +
+ del_diff_iota('revision 2', 'revision 1'))
+ expected_output_r1 = (add_diff_A('revision 1', 'working copy') +
+ add_diff_iota('revision 1', 'working copy'))
+ expected_output_base_r1 = (del_diff_A('working copy', 'revision 1') +
+ del_diff_iota('working copy', 'revision 1'))
+
+ os.chdir(sbox.wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'fileprop', 'r2value', 'iota')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'dirprop', 'r2value', 'A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'empty-msg')
+
+ # Check that forward and reverse repos-repos diffs are as expected.
+ expected = svntest.verify.UnorderedOutput(expected_output_r1_r2)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '1:2')
+
+ expected = svntest.verify.UnorderedOutput(expected_output_r2_r1)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '2:1')
+
+ # Now check repos->WORKING, repos->BASE, and BASE->repos.
+ # (BASE is r1, and WORKING has no local mods, so this should produce
+ # the same output as above).
+ expected = svntest.verify.UnorderedOutput(expected_output_r1)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '1')
+
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '1:BASE')
+
+ expected = svntest.verify.UnorderedOutput(expected_output_base_r1)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', 'BASE:1')
+
+ # Modify some properties.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'fileprop', 'workingvalue', 'iota')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'dirprop', 'workingvalue', 'A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset',
+ 'fileprop', 'workingvalue', 'A/mu')
+
+ # Check that the earlier diffs against BASE are unaffected by the
+ # presence of local mods (with the exception of diff header changes).
+ expected = svntest.verify.UnorderedOutput(expected_output_r1)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '1:BASE')
+
+ expected = svntest.verify.UnorderedOutput(expected_output_base_r1)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', 'BASE:1')
+
+def diff_schedule_delete(sbox):
+ "scheduled deleted"
+
+ sbox.build()
+
+ expected_output_r2_working = make_diff_header("foo", "revision 2",
+ "nonexistent") + [
+ "@@ -1 +0,0 @@\n",
+ "-xxx\n"
+ ]
+
+ expected_output_r2_base = make_diff_header("foo", "revision 2",
+ "nonexistent") + [
+ "@@ -1 +0,0 @@\n",
+ "-xxx\n",
+ ]
+ expected_output_base_r2 = make_diff_header("foo", "nonexistent",
+ "revision 2") + [
+ "@@ -0,0 +1 @@\n",
+ "+xxx\n",
+ ]
+
+ expected_output_r1_base = make_diff_header("foo", "nonexistent",
+ "working copy") + [
+ "@@ -0,0 +1,2 @@\n",
+ "+xxx\n",
+ "+yyy\n"
+ ]
+ expected_output_base_r1 = make_diff_header("foo", "working copy",
+ "nonexistent") + [
+ "@@ -1,2 +0,0 @@\n",
+ "-xxx\n",
+ "-yyy\n"
+ ]
+ expected_output_base_working = expected_output_base_r1[:]
+ expected_output_base_working[2] = "--- foo\t(revision 3)\n"
+ expected_output_base_working[3] = "+++ foo\t(nonexistent)\n"
+
+ wc_dir = sbox.wc_dir
+ os.chdir(wc_dir)
+
+ svntest.main.file_append('foo', "xxx\n")
+ svntest.main.run_svn(None, 'add', 'foo')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg r2')
+
+ svntest.main.file_append('foo', "yyy\n")
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg r3')
+
+ # Update everyone's BASE to r3, and mark 'foo' as schedule-deleted.
+ svntest.main.run_svn(None,
+ 'up')
+ svntest.main.run_svn(None, 'rm', 'foo')
+
+ # A file marked as schedule-delete should act as if were not present
+ # in WORKING, but diffs against BASE should remain unaffected.
+
+ # 1. repos-wc diff: file not present in repos.
+ svntest.actions.run_and_verify_svn([], [],
+ 'diff', '-r', '1')
+ svntest.actions.run_and_verify_svn(expected_output_r1_base, [],
+ 'diff', '-r', '1:BASE')
+ svntest.actions.run_and_verify_svn(expected_output_base_r1, [],
+ 'diff', '-r', 'BASE:1')
+
+ # 2. repos-wc diff: file present in repos.
+ svntest.actions.run_and_verify_svn(expected_output_r2_working, [],
+ 'diff', '-r', '2')
+ svntest.actions.run_and_verify_svn(expected_output_r2_base, [],
+ 'diff', '-r', '2:BASE')
+ svntest.actions.run_and_verify_svn(expected_output_base_r2, [],
+ 'diff', '-r', 'BASE:2')
+
+ # 3. wc-wc diff.
+ svntest.actions.run_and_verify_svn(expected_output_base_working, [],
+ 'diff')
+
+#----------------------------------------------------------------------
+def diff_mime_type_changes(sbox):
+ "repos-wc diffs with local svn:mime-type prop mods"
+
+ sbox.build()
+
+ expected_output_r1_wc = make_diff_header("iota", "revision 1",
+ "working copy") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+revision 2 text.\n" ]
+
+ expected_output_wc_r1 = make_diff_header("iota", "working copy",
+ "revision 1") + [
+ "@@ -1,2 +1 @@\n",
+ " This is the file 'iota'.\n",
+ "-revision 2 text.\n" ]
+
+
+ os.chdir(sbox.wc_dir)
+
+ # Append some text to iota (r2).
+ svntest.main.file_append('iota', "revision 2 text.\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Check that forward and reverse repos-BASE diffs are as expected.
+ svntest.actions.run_and_verify_svn(expected_output_r1_wc, [],
+ 'diff', '-r', '1:BASE')
+
+ svntest.actions.run_and_verify_svn(expected_output_wc_r1, [],
+ 'diff', '-r', 'BASE:1')
+
+ # Mark iota as a binary file in the working copy.
+ svntest.actions.run_and_verify_svn2(None,
+ binary_mime_type_on_text_file_warning, 0,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream', 'iota')
+
+ # Check that the earlier diffs against BASE are unaffected by the
+ # presence of local svn:mime-type property mods.
+ svntest.actions.run_and_verify_svn(expected_output_r1_wc, [],
+ 'diff', '-r', '1:BASE')
+
+ svntest.actions.run_and_verify_svn(expected_output_wc_r1, [],
+ 'diff', '-r', 'BASE:1')
+
+ # Commit the change (r3) (so that BASE has the binary MIME type), then
+ # mark iota as a text file again in the working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', 'svn:mime-type', 'iota')
+
+ # Now diffs against BASE will fail, but diffs against WORKNG should be
+ # fine.
+ svntest.actions.run_and_verify_svn(expected_output_r1_wc, [],
+ 'diff', '-r', '1')
+
+
+#----------------------------------------------------------------------
+# Test a repos-WORKING diff, with different versions of the same property
+# at repository, BASE, and WORKING.
+def diff_prop_change_local_propmod(sbox):
+ "diff a property change plus a local prop edit"
+
+ sbox.build()
+
+ expected_output_r2_wc = \
+ make_diff_header("A", "revision 2", "working copy") + \
+ make_diff_prop_header("A") + \
+ make_diff_prop_modified("dirprop", "r2value", "workingvalue") + \
+ make_diff_prop_added("newdirprop", "newworkingvalue") + \
+ make_diff_header("iota", "revision 2", "working copy") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_modified("fileprop", "r2value", "workingvalue") + \
+ make_diff_prop_added("newfileprop", "newworkingvalue")
+
+ os.chdir(sbox.wc_dir)
+
+ # Set a property on A/ and iota, and commit them (r2).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'dirprop',
+ 'r2value', 'A')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'fileprop',
+ 'r2value', 'iota')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Change the property values on A/ and iota, and commit them (r3).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'dirprop',
+ 'r3value', 'A')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'fileprop',
+ 'r3value', 'iota')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Finally, change the property values one last time.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'dirprop',
+ 'workingvalue', 'A')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'fileprop',
+ 'workingvalue', 'iota')
+ # And also add some properties that only exist in WORKING.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'newdirprop',
+ 'newworkingvalue', 'A')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'newfileprop',
+ 'newworkingvalue', 'iota')
+
+ # Now, if we diff r2 to WORKING, we've got three property values
+ # to consider: r2value (in the repository), r3value (in BASE), and
+ # workingvalue (in WORKING).
+ # The diff should only show the r2->WORKING change.
+ #
+ # We also need to make sure that the 'new' (WORKING only) properties
+ # are included in the output, since they won't be listed in a simple
+ # BASE->r2 diff.
+ expected = svntest.verify.UnorderedOutput(expected_output_r2_wc)
+ svntest.actions.run_and_verify_svn(expected, [],
+ 'diff', '-r', '2')
+
+
+#----------------------------------------------------------------------
+# repos->wc and BASE->repos diffs that add files or directories with
+# properties should show the added properties.
+def diff_repos_wc_add_with_props(sbox):
+ "repos-wc diff showing added entries with props"
+
+ sbox.build()
+
+ diff_foo = [
+ "@@ -0,0 +1 @@\n",
+ "+content\n",
+ ] + make_diff_prop_header("foo") + \
+ make_diff_prop_added("propname", "propvalue")
+ diff_X = \
+ make_diff_prop_header("X") + \
+ make_diff_prop_added("propname", "propvalue")
+ diff_X_bar = [
+ "@@ -0,0 +1 @@\n",
+ "+content\n",
+ ] + make_diff_prop_header("X/bar") + \
+ make_diff_prop_added("propname", "propvalue")
+
+ diff_X_r1_base = make_diff_header("X", "nonexistent",
+ "working copy") + diff_X
+ diff_X_base_r3 = make_diff_header("X", "nonexistent",
+ "revision 3") + diff_X
+ diff_foo_r1_base = make_diff_header("foo", "nonexistent",
+ "revision 3") + diff_foo
+ diff_foo_base_r3 = make_diff_header("foo", "nonexistent",
+ "revision 3") + diff_foo
+ diff_X_bar_r1_base = make_diff_header("X/bar", "nonexistent",
+ "revision 3") + diff_X_bar
+ diff_X_bar_base_r3 = make_diff_header("X/bar", "nonexistent",
+ "revision 3") + diff_X_bar
+
+ expected_output_r1_base = svntest.verify.UnorderedOutput(diff_X_r1_base +
+ diff_X_bar_r1_base +
+ diff_foo_r1_base)
+ expected_output_base_r3 = svntest.verify.UnorderedOutput(diff_foo_base_r3 +
+ diff_X_bar_base_r3 +
+ diff_X_base_r3)
+
+ os.chdir(sbox.wc_dir)
+
+ # Create directory X, file foo, and file X/bar, and commit them (r2).
+ os.makedirs('X')
+ svntest.main.file_append('foo', "content\n")
+ svntest.main.file_append(os.path.join('X', 'bar'), "content\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', 'X', 'foo')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Set a property on all three items, and commit them (r3).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'propname',
+ 'propvalue', 'X', 'foo',
+ os.path.join('X', 'bar'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Now, if we diff r1 to WORKING or BASE, we should see the content
+ # addition for foo and X/bar, and property additions for all three.
+ svntest.actions.run_and_verify_svn(expected_output_r1_base, [],
+ 'diff', '-r', '1')
+ svntest.actions.run_and_verify_svn(expected_output_r1_base, [],
+ 'diff', '-r', '1:BASE')
+
+ # Update the BASE and WORKING revisions to r1.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1')
+
+ # If we diff BASE to r3, we should see the same output as above.
+ svntest.actions.run_and_verify_svn(expected_output_base_r3, [],
+ 'diff', '-r', 'BASE:3')
+
+
+#----------------------------------------------------------------------
+# repos-wc diffs on a non-recursively checked out wc that would normally
+# (if recursively checked out) include a directory that is not present in
+# the repos version should not segfault.
+def diff_nonrecursive_checkout_deleted_dir(sbox):
+ "nonrecursive diff + deleted directories"
+ sbox.build()
+
+ url = sbox.repo_url
+ A_url = url + '/A'
+ A_prime_url = url + '/A_prime'
+
+ svntest.main.run_svn(None,
+ 'cp', '-m', 'log msg', A_url, A_prime_url)
+
+ svntest.main.run_svn(None,
+ 'mkdir', '-m', 'log msg', A_prime_url + '/Q')
+
+ wc = sbox.add_wc_path('wc')
+
+ svntest.main.run_svn(None,
+ 'co', '-N', A_prime_url, wc)
+
+ os.chdir(wc)
+
+ # We don't particular care about the output here, just that it doesn't
+ # segfault.
+ svntest.main.run_svn(None,
+ 'diff', '-r1')
+
+
+#----------------------------------------------------------------------
+# repos->WORKING diffs that include directories with local mods that are
+# not present in the repos version should work as expected (and not, for
+# example, show an extraneous BASE->WORKING diff for the added directory
+# after the repos->WORKING output).
+def diff_repos_working_added_dir(sbox):
+ "repos->WORKING diff showing added modifed dir"
+
+ sbox.build()
+
+ expected_output_r1_BASE = make_diff_header("X/bar", "nonexistent",
+ "revision 2") + [
+ "@@ -0,0 +1 @@\n",
+ "+content\n" ]
+ expected_output_r1_WORKING = make_diff_header("X/bar", "nonexistent",
+ "working copy") + [
+ "@@ -0,0 +1,2 @@\n",
+ "+content\n",
+ "+more content\n" ]
+
+ os.chdir(sbox.wc_dir)
+
+ # Create directory X and file X/bar, and commit them (r2).
+ os.makedirs('X')
+ svntest.main.file_append(os.path.join('X', 'bar'), "content\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', 'X')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log_msg')
+
+ # Make a local modification to X/bar.
+ svntest.main.file_append(os.path.join('X', 'bar'), "more content\n")
+
+ # Now, if we diff r1 to WORKING or BASE, we should see the content
+ # addition for X/bar, and (for WORKING) the local modification.
+ svntest.actions.run_and_verify_svn(expected_output_r1_BASE, [],
+ 'diff', '-r', '1:BASE')
+ svntest.actions.run_and_verify_svn(expected_output_r1_WORKING, [],
+ 'diff', '-r', '1')
+
+
+#----------------------------------------------------------------------
+# A base->repos diff of a moved file used to output an all-lines-deleted diff
+def diff_base_repos_moved(sbox):
+ "base->repos diff of moved file"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ oldfile = 'iota'
+ newfile = 'kappa'
+
+ # Move, modify and commit a file
+ svntest.main.run_svn(None, 'mv', oldfile, newfile)
+ svntest.main.file_write(newfile, "new content\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '')
+
+ # Check that a base->repos diff with copyfrom shows deleted and added lines.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'diff', '-rBASE:1', newfile)
+
+ if check_diff_output(out, newfile, 'M'):
+ raise svntest.Failure
+
+ # Diff should recognise that the item's name has changed, and mention both
+ # the current and the old name in parentheses, in the right order.
+ if (out[2][:3] != '---' or out[2].find('kappa)') == -1 or
+ out[3][:3] != '+++' or out[3].find('iota)') == -1):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# A diff of an added file within an added directory should work, and
+# shouldn't produce an error.
+def diff_added_subtree(sbox):
+ "wc->repos diff of added subtree"
+
+ sbox.build()
+
+ os.chdir(sbox.wc_dir)
+
+ # Roll the wc back to r0 (i.e. an empty wc).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r0')
+
+ # We shouldn't get any errors when we request a diff showing the
+ # addition of the greek tree. The diff contains additions of files
+ # and directories with parents that don't currently exist in the wc,
+ # which is what we're testing here.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'diff', '-r', 'BASE:1')
+
+#----------------------------------------------------------------------
+def basic_diff_summarize(sbox):
+ "basic diff summarize"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ p = sbox.ospath
+
+ # Diff summarize of a newly added file
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'iota': Item(status='A '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('iota'), '-c1')
+
+ # Reverse summarize diff of a newly added file
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'iota': Item(status='D '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('iota'), '-c-1')
+
+ # Diff summarize of a newly added directory
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'A/D': Item(status='A '),
+ 'A/D/gamma': Item(status='A '),
+ 'A/D/H': Item(status='A '),
+ 'A/D/H/chi': Item(status='A '),
+ 'A/D/H/psi': Item(status='A '),
+ 'A/D/H/omega': Item(status='A '),
+ 'A/D/G': Item(status='A '),
+ 'A/D/G/pi': Item(status='A '),
+ 'A/D/G/rho': Item(status='A '),
+ 'A/D/G/tau': Item(status='A '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('A/D'), '-c1')
+
+ # Reverse summarize diff of a newly added directory
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'A/D': Item(status='D '),
+ 'A/D/gamma': Item(status='D '),
+ 'A/D/H': Item(status='D '),
+ 'A/D/H/chi': Item(status='D '),
+ 'A/D/H/psi': Item(status='D '),
+ 'A/D/H/omega': Item(status='D '),
+ 'A/D/G': Item(status='D '),
+ 'A/D/G/pi': Item(status='D '),
+ 'A/D/G/rho': Item(status='D '),
+ 'A/D/G/tau': Item(status='D '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('A/D'), '-c-1')
+
+ # Add props to some items that will be deleted, and commit.
+ sbox.simple_propset('prop', 'val',
+ 'A/C',
+ 'A/D/gamma',
+ 'A/D/H/chi')
+ sbox.simple_commit() # r2
+ sbox.simple_update()
+
+ # Content modification.
+ svntest.main.file_append(p('A/mu'), 'new text\n')
+
+ # Prop modification.
+ sbox.simple_propset('prop', 'val', 'iota')
+
+ # Both content and prop mods.
+ svntest.main.file_append(p('A/D/G/tau'), 'new text\n')
+ sbox.simple_propset('prop', 'val', 'A/D/G/tau')
+
+ # File addition.
+ svntest.main.file_append(p('newfile'), 'new text\n')
+ svntest.main.file_append(p('newfile2'), 'new text\n')
+ sbox.simple_add('newfile',
+ 'newfile2')
+ sbox.simple_propset('prop', 'val', 'newfile')
+
+ # File deletion.
+ sbox.simple_rm('A/B/lambda',
+ 'A/D/gamma')
+
+ # Directory addition.
+ os.makedirs(p('P'))
+ os.makedirs(p('Q/R'))
+ svntest.main.file_append(p('Q/newfile'), 'new text\n')
+ svntest.main.file_append(p('Q/R/newfile'), 'new text\n')
+ sbox.simple_add('P',
+ 'Q')
+ sbox.simple_propset('prop', 'val',
+ 'P',
+ 'Q/newfile')
+
+ # Directory deletion.
+ sbox.simple_rm('A/D/H',
+ 'A/C')
+
+ # Commit, because diff-summarize handles repos-repos only.
+ #svntest.main.run_svn(False, 'st', wc_dir)
+ sbox.simple_commit() # r3
+
+ # Get the differences between two versions of a file.
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'iota': Item(status=' M'),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('iota'), '-c3')
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('iota'), '-c-3')
+
+ # wc-wc diff summary for a directory.
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(status='M '),
+ 'iota': Item(status=' M'),
+ 'A/D/G/tau': Item(status='MM'),
+ 'newfile': Item(status='A '),
+ 'newfile2': Item(status='A '),
+ 'P': Item(status='A '),
+ 'Q': Item(status='A '),
+ 'Q/newfile': Item(status='A '),
+ 'Q/R': Item(status='A '),
+ 'Q/R/newfile': Item(status='A '),
+ 'A/B/lambda': Item(status='D '),
+ 'A/C': Item(status='D '),
+ 'A/D/gamma': Item(status='D '),
+ 'A/D/H': Item(status='D '),
+ 'A/D/H/chi': Item(status='D '),
+ 'A/D/H/psi': Item(status='D '),
+ 'A/D/H/omega': Item(status='D '),
+ })
+
+ expected_reverse_diff = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(status='M '),
+ 'iota': Item(status=' M'),
+ 'A/D/G/tau': Item(status='MM'),
+ 'newfile': Item(status='D '),
+ 'newfile2': Item(status='D '),
+ 'P': Item(status='D '),
+ 'Q': Item(status='D '),
+ 'Q/newfile': Item(status='D '),
+ 'Q/R': Item(status='D '),
+ 'Q/R/newfile': Item(status='D '),
+ 'A/B/lambda': Item(status='A '),
+ 'A/C': Item(status='A '),
+ 'A/D/gamma': Item(status='A '),
+ 'A/D/H': Item(status='A '),
+ 'A/D/H/chi': Item(status='A '),
+ 'A/D/H/psi': Item(status='A '),
+ 'A/D/H/omega': Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ wc_dir, '-c3')
+ svntest.actions.run_and_verify_diff_summarize(expected_reverse_diff,
+ wc_dir, '-c-3')
+
+ # Get the differences between a deep newly added dir Issue(4421)
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'Q/R' : Item(status='A '),
+ 'Q/R/newfile' : Item(status='A '),
+ })
+ expected_reverse_diff = svntest.wc.State(wc_dir, {
+ 'Q/R' : Item(status='D '),
+ 'Q/R/newfile' : Item(status='D '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ p('Q/R'), '-c3')
+ svntest.actions.run_and_verify_diff_summarize(expected_reverse_diff,
+ p('Q/R'), '-c-3')
+
+#----------------------------------------------------------------------
+def diff_weird_author(sbox):
+ "diff with svn:author that has < in it"
+
+ sbox.build()
+
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ svntest.main.file_write(sbox.ospath('A/mu'),
+ "new content\n")
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/mu': Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak("A/mu", wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.run_svn(None,
+ "propset", "--revprop", "-r", "2", "svn:author",
+ "J. Random <jrandom@example.com>", sbox.repo_url)
+
+ svntest.actions.run_and_verify_svn(["J. Random <jrandom@example.com>\n"],
+ [],
+ "pget", "--revprop", "-r" "2",
+ "svn:author", sbox.repo_url)
+
+ expected_output = make_diff_header("A/mu", "revision 1", "revision 2") + [
+ "@@ -1 +1 @@\n",
+ "-This is the file 'mu'.\n",
+ "+new content\n"
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r1:2', sbox.repo_url)
+
+# test for issue 2121, use -x -w option for ignoring whitespace during diff
+@Issue(2121)
+def diff_ignore_whitespace(sbox):
+ "ignore whitespace when diffing"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None,)
+
+ # only whitespace changes, should return no changes
+ svntest.main.file_write(file_path,
+ " A a \n"
+ " B b \n"
+ " C c \n")
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'diff', '-x', '-w', file_path)
+
+ # some changes + whitespace
+ svntest.main.file_write(file_path,
+ " A a \n"
+ "Xxxx X\n"
+ " Bb b \n"
+ " C c \n")
+ expected_output = make_diff_header(file_path, "revision 2",
+ "working copy") + [
+ "@@ -1,3 +1,4 @@\n",
+ " Aa\n",
+ "-Bb\n",
+ "+Xxxx X\n",
+ "+ Bb b \n",
+ " Cc\n" ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-x', '-w', file_path)
+
+def diff_ignore_eolstyle(sbox):
+ "ignore eol styles when diffing"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # commit only eol changes
+ svntest.main.file_write(file_path,
+ "Aa\r"
+ "Bb\r"
+ "Cc")
+
+ expected_output = make_diff_header(file_path, "revision 2",
+ "working copy") + [
+ "@@ -1,3 +1,3 @@\n",
+ " Aa\n",
+ " Bb\n",
+ "-Cc\n",
+ "+Cc\n",
+ "\ No newline at end of file\n" ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-x', '--ignore-eol-style',
+ file_path)
+
+# test for issue 2600, diff revision of a file in a renamed folder
+@Issue(2600)
+def diff_in_renamed_folder(sbox):
+ "diff a revision of a file in a renamed folder"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ D_path = sbox.ospath('A/D')
+ kappa_path = os.path.join(D_path, "C", "kappa")
+
+ # add a new file to a renamed (moved in this case) folder.
+ svntest.main.run_svn(None, 'mv', C_path, D_path)
+
+ svntest.main.file_append(kappa_path, "this is file kappa.\n")
+ svntest.main.run_svn(None, 'add', kappa_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Deleting'),
+ 'A/D/C' : Item(verb='Adding'),
+ 'A/D/C/kappa' : Item(verb='Adding'),
+ })
+ ### right now, we cannot denote that kappa is a local-add rather than a
+ ### child of the A/D/C copy. thus, it appears in the status output as a
+ ### (M)odified child.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/C/kappa' : Item(verb='Sending'),
+ })
+
+ # modify the file two times so we have something to diff.
+ for i in range(3, 5):
+ svntest.main.file_append(kappa_path, str(i) + "\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ expected_output = make_diff_header(kappa_path, "revision 3",
+ "revision 4") + [
+ "@@ -1,2 +1,3 @@\n",
+ " this is file kappa.\n",
+ " 3\n",
+ "+4\n"
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r3:4', kappa_path)
+
+def diff_with_depth(sbox):
+ "test diffs at various depths"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ B_path = os.path.join('A', 'B')
+
+ sbox.simple_propset('foo1', 'bar1', '.')
+ sbox.simple_propset('foo2', 'bar2', 'iota')
+ sbox.simple_propset('foo3', 'bar3', 'A')
+ sbox.simple_propset('foo4', 'bar4', 'A/B')
+
+ def create_expected_diffs(r1, r2):
+ diff_dot = \
+ make_diff_header(".", r1, r2) + \
+ make_diff_prop_header(".") + \
+ make_diff_prop_added("foo1", "bar1")
+ diff_iota = \
+ make_diff_header('iota', r1, r2) + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("foo2", "bar2")
+ diff_A = \
+ make_diff_header('A', r1, r2) + \
+ make_diff_prop_header("A") + \
+ make_diff_prop_added("foo3", "bar3")
+ diff_AB = \
+ make_diff_header(B_path, r1, r2) + \
+ make_diff_prop_header("A/B") + \
+ make_diff_prop_added("foo4", "bar4")
+
+ expected = {}
+ expected['empty'] = svntest.verify.UnorderedOutput(diff_dot)
+ expected['files'] = svntest.verify.UnorderedOutput(diff_dot +
+ diff_iota)
+ expected['immediates'] = svntest.verify.UnorderedOutput(diff_dot +
+ diff_iota +
+ diff_A)
+ expected['infinity'] = svntest.verify.UnorderedOutput(diff_dot +
+ diff_iota +
+ diff_A +
+ diff_AB)
+ return expected
+
+ # Test wc-wc diff.
+ expected_diffs = create_expected_diffs("revision 1", "working copy")
+ for depth in ['empty', 'files', 'immediates', 'infinity']:
+ svntest.actions.run_and_verify_svn(expected_diffs[depth], [],
+ 'diff', '--depth', depth)
+
+ # Commit the changes.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '')
+
+ # Test repos-repos diff.
+ expected_diffs = create_expected_diffs("revision 1", "revision 2")
+ for depth in ['empty', 'files', 'immediates', 'infinity']:
+ svntest.actions.run_and_verify_svn(expected_diffs[depth], [],
+ 'diff', '-c2', '--depth', depth)
+
+ def create_expected_repos_wc_diffs():
+ diff_AB = \
+ make_diff_header("A/B", "revision 2", "working copy") + \
+ make_diff_prop_header("A/B") + \
+ make_diff_prop_modified("foo4", "bar4", "baz4")
+ diff_A = \
+ make_diff_header("A", "revision 2", "working copy") + \
+ make_diff_prop_header("A") + \
+ make_diff_prop_modified("foo3", "bar3", "baz3")
+ diff_mu = \
+ make_diff_header("A/mu", "revision 2", "working copy") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'mu'.\n",
+ "+new text\n",]
+ diff_iota = \
+ make_diff_header("iota", "revision 2", "working copy") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+new text\n",
+ ] + make_diff_prop_header("iota") + \
+ make_diff_prop_modified("foo2", "bar2", "baz2")
+ diff_dot = \
+ make_diff_header(".", "revision 2", "working copy") + \
+ make_diff_prop_header(".") + \
+ make_diff_prop_modified("foo1", "bar1", "baz1")
+
+ expected = {}
+ expected['empty'] = svntest.verify.UnorderedOutput(diff_dot)
+ expected['files'] = svntest.verify.UnorderedOutput(diff_iota +
+ diff_dot)
+ expected['immediates'] = svntest.verify.UnorderedOutput(diff_A +
+ diff_iota +
+ diff_dot)
+ expected['infinity'] = svntest.verify.UnorderedOutput(diff_AB +
+ diff_A +
+ diff_mu +
+ diff_iota +
+ diff_dot)
+ return expected
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r1')
+
+ sbox.simple_propset('foo1', 'baz1', '.')
+ sbox.simple_propset('foo2', 'baz2', 'iota')
+ sbox.simple_propset('foo3', 'baz3', 'A')
+ sbox.simple_propset('foo4', 'baz4', 'A/B')
+ svntest.main.file_append(os.path.join('A', 'mu'), "new text\n")
+ svntest.main.file_append('iota', "new text\n")
+
+ # Test wc-repos diff.
+ expected_diffs = create_expected_repos_wc_diffs()
+ for depth in ['empty', 'files', 'immediates', 'infinity']:
+ svntest.actions.run_and_verify_svn(expected_diffs[depth], [],
+ 'diff', '-rHEAD', '--depth', depth)
+
+# test for issue 2920: ignore eol-style on empty lines
+@Issue(2920)
+def diff_ignore_eolstyle_empty_lines(sbox):
+ "ignore eol styles when diffing empty lines"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "\n"
+ "Bb\n"
+ "\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # sleep to guarantee timestamp change
+ time.sleep(1.1)
+
+ # commit only eol changes
+ svntest.main.file_write(file_path,
+ "Aa\012"
+ "\012"
+ "Bb\r"
+ "\r"
+ "Cc\012",
+ mode="wb")
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'diff', '-x', '--ignore-eol-style',
+ file_path)
+
+def diff_backward_repos_wc_copy(sbox):
+ "backward repos->wc diff with copied file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ os.chdir(wc_dir)
+
+ # copy a file
+ mu_path = os.path.join('A', 'mu')
+ mucp_path = os.path.join('A', 'mucopy')
+ svntest.main.run_svn(None, 'cp', mu_path, mucp_path)
+
+ # commit r2 and update back to r1
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+ svntest.main.run_svn(None, 'up', '-r1')
+
+ # diff r2 against working copy
+ diff_repos_wc = make_diff_header("A/mucopy", "revision 2", "nonexistent")
+ diff_repos_wc += [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'mu'.\n",
+ ]
+
+ svntest.actions.run_and_verify_svn(diff_repos_wc, [],
+ 'diff', '-r' , '2')
+
+#----------------------------------------------------------------------
+
+def diff_summarize_xml(sbox):
+ "xml diff summarize"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A content modification.
+ svntest.main.file_append(sbox.ospath('A/mu'), "New mu content")
+
+ # A prop modification.
+ svntest.main.run_svn(None,
+ "propset", "prop", "val",
+ sbox.ospath('iota'))
+
+ # Both content and prop mods.
+ tau_path = sbox.ospath('A/D/G/tau')
+ svntest.main.file_append(tau_path, "tautau")
+ svntest.main.run_svn(None,
+ "propset", "prop", "val", tau_path)
+
+ # A file addition.
+ newfile_path = sbox.ospath('newfile')
+ svntest.main.file_append(newfile_path, 'newfile')
+ svntest.main.run_svn(None, 'add', newfile_path)
+
+ # A file deletion.
+ svntest.main.run_svn(None, "delete", os.path.join(wc_dir, 'A', 'B',
+ 'lambda'))
+
+ # A directory addition
+ svntest.main.run_svn(None, "mkdir", sbox.ospath('newdir'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(verb='Sending'),
+ 'iota': Item(verb='Sending'),
+ 'newfile': Item(verb='Adding'),
+ 'A/D/G/tau': Item(verb='Sending'),
+ 'A/B/lambda': Item(verb='Deleting'),
+ 'newdir': Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newfile': Item(status=' ', wc_rev=2),
+ 'newdir': Item(status=' ', wc_rev=2),
+ })
+ expected_status.tweak("A/mu", "iota", "A/D/G/tau", "newfile", "newdir",
+ wc_rev=2)
+ expected_status.remove("A/B/lambda")
+
+ # 3) Test working copy summarize
+ paths = ['A/mu', 'iota', 'A/D/G/tau', 'newfile', 'A/B/lambda',
+ 'newdir',]
+ items = ['modified', 'none', 'modified', 'added', 'deleted', 'added',]
+ kinds = ['file','file','file','file','file', 'dir',]
+ props = ['none', 'modified', 'modified', 'none', 'none', 'none',]
+
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ [], wc_dir, paths, items, props, kinds, wc_dir)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # 1) Test --xml without --summarize
+ svntest.actions.run_and_verify_svn(
+ None, ".*--xml' option only valid with '--summarize' option",
+ 'diff', wc_dir, '--xml')
+
+ # 2) Test --xml on invalid revision
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ ".*No such revision 5555555",
+ None, wc_dir, None, None, None, '-r0:5555555', wc_dir)
+
+ # 4) Test --summarize --xml on -c2
+ paths_iota = ['iota',]
+ items_iota = ['none',]
+ kinds_iota = ['file',]
+ props_iota = ['modified',]
+
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ [], wc_dir, paths_iota, items_iota, props_iota, kinds_iota, '-c2',
+ sbox.ospath('iota'))
+
+ # 5) Test --summarize --xml on -r1:2
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ [], wc_dir, paths, items, props, kinds, '-r1:2', wc_dir)
+
+ # 6) Same as test #5 but ran against a URL instead of a WC path
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ [], sbox.repo_url, paths, items, props, kinds, '-r1:2', sbox.repo_url)
+
+def diff_file_depth_empty(sbox):
+ "svn diff --depth=empty FILE_WITH_LOCAL_MODS"
+ # The bug was that no diff output would be generated. Check that some is.
+ sbox.build()
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "new text in iota")
+ exit_code, out, err = svntest.main.run_svn(None, 'diff',
+ '--depth', 'empty', iota_path)
+ if err:
+ raise svntest.Failure
+ if len(out) < 4:
+ raise svntest.Failure
+
+# This used to abort with ra_serf.
+def diff_wrong_extension_type(sbox):
+ "'svn diff -x wc -r#' should return error"
+
+ sbox.build(read_only = True)
+ svntest.actions.run_and_verify_svn([], err.INVALID_DIFF_OPTION,
+ 'diff', '-x', sbox.wc_dir, '-r', '1')
+
+# Check the order of the arguments for an external diff tool
+def diff_external_diffcmd(sbox):
+ "svn diff --diff-cmd provides the correct arguments"
+
+ sbox.build(read_only = True)
+ os.chdir(sbox.wc_dir)
+
+ iota_path = 'iota'
+ svntest.main.file_append(iota_path, "new text in iota")
+
+ # Create a small diff mock object that prints its arguments to stdout.
+ # (This path needs an explicit directory component to avoid searching.)
+ diff_script_path = os.path.join('.', 'diff')
+ # TODO: make the create function return the actual script name, and rename
+ # it to something more generic.
+ svntest.main.create_python_hook_script(diff_script_path, 'import sys\n'
+ 'for arg in sys.argv[1:]:\n print(arg)\n')
+ if sys.platform == 'win32':
+ diff_script_path = "%s.bat" % diff_script_path
+
+ expected_output = svntest.verify.ExpectedOutput([
+ "Index: iota\n",
+ "===================================================================\n",
+ "-u\n",
+ "-L\n",
+ "iota\t(revision 1)\n",
+ "-L\n",
+ "iota\t(working copy)\n",
+ os.path.abspath(svntest.wc.text_base_path("iota")) + "\n",
+ os.path.abspath("iota") + "\n"])
+
+ # Check that the output of diff corresponds with the expected arguments,
+ # in the correct order.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--diff-cmd', diff_script_path,
+ iota_path)
+
+
+#----------------------------------------------------------------------
+# Diffing an unrelated repository URL against working copy with
+# local modifications (i.e. not committed). This is issue #3295 (diff
+# local changes against arbitrary URL@REV ignores local add).
+
+# Helper
+def make_file_edit_del_add(dir):
+ "make a file mod (M), a deletion (D) and an addition (A)."
+ alpha = os.path.join(dir, 'B', 'E', 'alpha')
+ beta = os.path.join(dir, 'B', 'E', 'beta')
+ theta = os.path.join(dir, 'B', 'E', 'theta')
+
+ # modify alpha, remove beta and add theta.
+ svntest.main.file_append(alpha, "Edited file alpha.\n")
+ svntest.main.run_svn(None, 'remove', beta)
+ svntest.main.file_append(theta, "Created file theta.\n")
+
+ svntest.main.run_svn(None, 'add', theta)
+
+
+@Issue(3295)
+def diff_url_against_local_mods(sbox):
+ "diff URL against working copy with local mods"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ A = 'A'
+ A_url = sbox.repo_url + '/A'
+
+ # First, just make a copy.
+ A2 = 'A2'
+ A2_url = sbox.repo_url + '/A2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'log msg',
+ A_url, A2_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up')
+
+ # In A, add, remove and change a file, and commit.
+ make_file_edit_del_add(A)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'committing A')
+
+ # In A2, do the same changes but leave uncommitted.
+ make_file_edit_del_add(A2)
+
+ # Diff Path of A against working copy of A2.
+ # Output using arbritrary diff handling should be empty.
+ expected_output = []
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', A, '--new', A2)
+
+ # Diff URL of A against working copy of A2. Output should be empty.
+ expected_output = []
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', A_url, '--new', A2)
+
+
+#----------------------------------------------------------------------
+# Diff against old revision of the parent directory of a removed and
+# locally re-added file.
+@Issue(3797)
+def diff_preexisting_rev_against_local_add(sbox):
+ "diff -r1 of dir with removed-then-readded file"
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ beta = os.path.join('A', 'B', 'E', 'beta')
+
+ # remove
+ svntest.main.run_svn(None, 'remove', beta)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'removing beta')
+
+ # re-add, without committing
+ svntest.main.file_append(beta, "Re-created file beta.\n")
+ svntest.main.run_svn(None, 'add', beta)
+
+ # diff against -r1, the diff should show both removal and re-addition
+ exit_code, diff_output, err_output = svntest.main.run_svn(
+ None, 'diff', '-r1', 'A')
+
+ verify_expected_output(diff_output, "-This is the file 'beta'.")
+ verify_expected_output(diff_output, "+Re-created file beta.")
+
+def diff_git_format_wc_wc(sbox):
+ "create a diff in git unidiff format for wc-wc"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ new_path = sbox.ospath('new')
+ lambda_path = sbox.ospath('A/B/lambda')
+ lambda_copied_path = sbox.ospath('A/B/lambda_copied')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ alpha_copied_path = sbox.ospath('A/B/E/alpha_copied')
+
+ svntest.main.file_append(iota_path, "Changed 'iota'.\n")
+ svntest.main.file_append(new_path, "This is the file 'new'.\n")
+ svntest.main.run_svn(None, 'add', new_path)
+ svntest.main.run_svn(None, 'rm', mu_path)
+ svntest.main.run_svn(None, 'cp', lambda_path, lambda_copied_path)
+ svntest.main.run_svn(None, 'cp', alpha_path, alpha_copied_path)
+ svntest.main.file_append(alpha_copied_path, "This is a copy of 'alpha'.\n")
+
+ ### We're not testing moved paths
+
+ expected_output = make_git_diff_header(
+ alpha_copied_path, "A/B/E/alpha_copied",
+ "revision 1", "working copy",
+ copyfrom_path="A/B/E/alpha",
+ copyfrom_rev='1', cp=True,
+ text_changes=True) + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'alpha'.\n",
+ "+This is a copy of 'alpha'.\n",
+ ] + make_git_diff_header(lambda_copied_path,
+ "A/B/lambda_copied",
+ "revision 1", "working copy",
+ copyfrom_path="A/B/lambda",
+ copyfrom_rev='1', cp=True,
+ text_changes=False) \
+ + make_git_diff_header(mu_path, "A/mu", "revision 1",
+ "nonexistent",
+ delete=True) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'mu'.\n",
+ ] + make_git_diff_header(iota_path, "iota", "revision 1",
+ "working copy") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Changed 'iota'.\n",
+ ] + make_git_diff_header(new_path, "new", "nonexistent",
+ "working copy", add=True) + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'new'.\n",
+ ]
+
+ expected = expected_output
+
+ svntest.actions.run_and_verify_svn(expected, [], 'diff',
+ '--git', wc_dir)
+
+@Issue(4294)
+def diff_git_format_wc_wc_dir_mv(sbox):
+ "create a diff in git unidff format for wc dir mv"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ g_path = sbox.ospath('A/D/G')
+ g2_path = sbox.ospath('A/D/G2')
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ tau_path = sbox.ospath('A/D/G/tau')
+ new_pi_path = sbox.ospath('A/D/G2/pi')
+ new_rho_path = sbox.ospath('A/D/G2/rho')
+ new_tau_path = sbox.ospath('A/D/G2/tau')
+
+ svntest.main.run_svn(None, 'mv', g_path, g2_path)
+
+ expected_output = make_git_diff_header(pi_path, "A/D/G/pi",
+ "revision 1", "nonexistent",
+ delete=True) \
+ + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'pi'.\n"
+ ] + make_git_diff_header(rho_path, "A/D/G/rho",
+ "revision 1", "nonexistent",
+ delete=True) \
+ + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'rho'.\n"
+ ] + make_git_diff_header(tau_path, "A/D/G/tau",
+ "revision 1", "nonexistent",
+ delete=True) \
+ + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'tau'.\n"
+ ] + make_git_diff_header(new_pi_path, "A/D/G2/pi", None, None, cp=True,
+ copyfrom_path="A/D/G/pi", copyfrom_rev='1', text_changes=False) \
+ + make_git_diff_header(new_rho_path, "A/D/G2/rho", None, None, cp=True,
+ copyfrom_path="A/D/G/rho", copyfrom_rev='1', text_changes=False) \
+ + make_git_diff_header(new_tau_path, "A/D/G2/tau", None, None, cp=True,
+ copyfrom_path="A/D/G/tau", copyfrom_rev='1', text_changes=False)
+
+ expected = expected_output
+
+ svntest.actions.run_and_verify_svn(expected, [], 'diff',
+ '--git', wc_dir)
+
+def diff_git_format_url_wc(sbox):
+ "create a diff in git unidiff format for url-wc"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ new_path = sbox.ospath('new')
+ svntest.main.file_append(iota_path, "Changed 'iota'.\n")
+ svntest.main.file_append(new_path, "This is the file 'new'.\n")
+ svntest.main.run_svn(None, 'add', new_path)
+ svntest.main.run_svn(None, 'rm', mu_path)
+
+ ### We're not testing copied or moved paths
+
+ svntest.main.run_svn(None, 'commit', '-m', 'Committing changes', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ expected_output = make_git_diff_header(new_path, "new", "nonexistent",
+ "revision 2", add=True) + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'new'.\n",
+ ] + make_git_diff_header(mu_path, "A/mu", "revision 1", "nonexistent",
+ delete=True) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'mu'.\n",
+ ] + make_git_diff_header(iota_path, "iota", "revision 1",
+ "working copy") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Changed 'iota'.\n",
+ ]
+
+ expected = svntest.verify.UnorderedOutput(expected_output)
+
+ svntest.actions.run_and_verify_svn(expected, [], 'diff',
+ '--git',
+ '--old', repo_url + '@1', '--new',
+ wc_dir)
+
+def diff_git_format_url_url(sbox):
+ "create a diff in git unidiff format for url-url"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ new_path = sbox.ospath('new')
+ svntest.main.file_append(iota_path, "Changed 'iota'.\n")
+ svntest.main.file_append(new_path, "This is the file 'new'.\n")
+ svntest.main.run_svn(None, 'add', new_path)
+ svntest.main.run_svn(None, 'rm', mu_path)
+
+ ### We're not testing copied or moved paths. When we do, we will not be
+ ### able to identify them as copies/moves until we have editor-v2.
+
+ svntest.main.run_svn(None, 'commit', '-m', 'Committing changes', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ expected_output = make_git_diff_header("A/mu", "A/mu", "revision 1",
+ "nonexistent",
+ delete=True) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'mu'.\n",
+ ] + make_git_diff_header("new", "new", "nonexistent", "revision 2",
+ add=True) + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'new'.\n",
+ ] + make_git_diff_header("iota", "iota", "revision 1",
+ "revision 2") + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Changed 'iota'.\n",
+ ]
+
+ expected = svntest.verify.UnorderedOutput(expected_output)
+
+ svntest.actions.run_and_verify_svn(expected, [], 'diff',
+ '--git',
+ '--old', repo_url + '@1', '--new',
+ repo_url + '@2')
+
+# Regression test for an off-by-one error when printing intermediate context
+# lines.
+def diff_prop_missing_context(sbox):
+ "diff for property has missing context"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ prop_val = "".join([
+ "line 1\n",
+ "line 2\n",
+ "line 3\n",
+ "line 4\n",
+ "line 5\n",
+ "line 6\n",
+ "line 7\n",
+ ])
+ svntest.main.run_svn(None,
+ "propset", "prop", prop_val, iota_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ prop_val = "".join([
+ "line 3\n",
+ "line 4\n",
+ "line 5\n",
+ "line 6\n",
+ ])
+ svntest.main.run_svn(None,
+ "propset", "prop", prop_val, iota_path)
+ expected_output = make_diff_header(iota_path, 'revision 2',
+ 'working copy') + \
+ make_diff_prop_header(iota_path) + [
+ "Modified: prop\n",
+ "## -1,7 +1,4 ##\n",
+ "-line 1\n",
+ "-line 2\n",
+ " line 3\n",
+ " line 4\n",
+ " line 5\n",
+ " line 6\n",
+ "-line 7\n",
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', iota_path)
+
+def diff_prop_multiple_hunks(sbox):
+ "diff for property with multiple hunks"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ prop_val = "".join([
+ "line 1\n",
+ "line 2\n",
+ "line 3\n",
+ "line 4\n",
+ "line 5\n",
+ "line 6\n",
+ "line 7\n",
+ "line 8\n",
+ "line 9\n",
+ "line 10\n",
+ "line 11\n",
+ "line 12\n",
+ "line 13\n",
+ ])
+ svntest.main.run_svn(None,
+ "propset", "prop", prop_val, iota_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ prop_val = "".join([
+ "line 1\n",
+ "line 2\n",
+ "line 3\n",
+ "Add a line here\n",
+ "line 4\n",
+ "line 5\n",
+ "line 6\n",
+ "line 7\n",
+ "line 8\n",
+ "line 9\n",
+ "line 10\n",
+ "And add a line here\n",
+ "line 11\n",
+ "line 12\n",
+ "line 13\n",
+ ])
+ svntest.main.run_svn(None,
+ "propset", "prop", prop_val, iota_path)
+ expected_output = make_diff_header(iota_path, 'revision 2',
+ 'working copy') + \
+ make_diff_prop_header(iota_path) + [
+ "Modified: prop\n",
+ "## -1,6 +1,7 ##\n",
+ " line 1\n",
+ " line 2\n",
+ " line 3\n",
+ "+Add a line here\n",
+ " line 4\n",
+ " line 5\n",
+ " line 6\n",
+ "## -8,6 +9,7 ##\n",
+ " line 8\n",
+ " line 9\n",
+ " line 10\n",
+ "+And add a line here\n",
+ " line 11\n",
+ " line 12\n",
+ " line 13\n",
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', iota_path)
+def diff_git_empty_files(sbox):
+ "create a diff in git format for empty files"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ new_path = sbox.ospath('new')
+ svntest.main.file_write(iota_path, "")
+
+ # Now commit the local mod, creating rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.file_write(new_path, "")
+ svntest.main.run_svn(None, 'add', new_path)
+ svntest.main.run_svn(None, 'rm', iota_path)
+
+ expected_output = make_git_diff_header(new_path, "new", "nonexistent",
+ "working copy",
+ add=True, text_changes=False) + [
+ ] + make_git_diff_header(iota_path, "iota", "revision 2", "nonexistent",
+ delete=True, text_changes=False)
+
+ # Two files in diff may be in any order.
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--git', wc_dir)
+
+def diff_git_with_props(sbox):
+ "create a diff in git format showing prop changes"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ new_path = sbox.ospath('new')
+ svntest.main.file_write(iota_path, "")
+
+ # Now commit the local mod, creating rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.file_write(new_path, "")
+ svntest.main.run_svn(None, 'add', new_path)
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', 'native', new_path)
+ svntest.main.run_svn(None, 'propset', 'svn:keywords', 'Id', iota_path)
+
+ expected_output = make_git_diff_header(new_path, "new",
+ "nonexistent", "working copy",
+ add=True, text_changes=False) + \
+ make_diff_prop_header("new") + \
+ make_diff_prop_added("svn:eol-style", "native") + \
+ make_git_diff_header(iota_path, "iota",
+ "revision 2", "working copy",
+ text_changes=False) + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:keywords", "Id")
+
+ # Files in diff may be in any order.
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--git', wc_dir)
+
+@Issue(4010)
+def diff_correct_wc_base_revnum(sbox):
+ "diff WC-WC shows the correct base rev num"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_write(iota_path, "")
+
+ # Commit a local mod, creating rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Child's base is now 2; parent's is still 1.
+ # Make a local mod.
+ svntest.main.run_svn(None, 'propset', 'svn:keywords', 'Id', iota_path)
+
+ expected_output = make_git_diff_header(iota_path, "iota",
+ "revision 2", "working copy") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:keywords", "Id")
+
+ # Diff the parent.
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--git', wc_dir)
+
+ # The same again, but specifying the target explicitly. This should
+ # give the same output.
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--git', iota_path)
+
+def diff_git_with_props_on_dir(sbox):
+ "diff in git format showing prop changes on dir"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Now commit the local mod, creating rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ '.' : Item(verb='Sending'),
+ 'A' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ '' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.tweak('A', wc_rev=2)
+
+ sbox.simple_propset('k','v', '', 'A')
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+ expected_output = make_git_diff_header("A", "A", "revision 1",
+ "revision 2",
+ add=False, text_changes=False) + \
+ make_diff_prop_header("A") + \
+ make_diff_prop_added("k", "v") + \
+ make_git_diff_header(".", "", "revision 1",
+ "revision 2",
+ add=False, text_changes=False) + \
+ make_diff_prop_header("") + \
+ make_diff_prop_added("k", "v")
+
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '-c2', '--git')
+ os.chdir(was_cwd)
+
+@Issue(3826)
+def diff_abs_localpath_from_wc_folder(sbox):
+ "diff absolute localpath from wc folder"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ B_abs_path = os.path.abspath(sbox.ospath('A/B'))
+ os.chdir(os.path.abspath(A_path))
+ svntest.actions.run_and_verify_svn(None, [], 'diff', B_abs_path)
+
+@Issue(3449)
+def no_spurious_conflict(sbox):
+ "no spurious conflict on update"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.do_sleep_for_timestamps()
+
+ data_dir = os.path.join(os.path.dirname(sys.argv[0]), 'diff_tests_data')
+ shutil.copyfile(os.path.join(data_dir, '3449_spurious_v1'),
+ sbox.ospath('3449_spurious'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', sbox.ospath('3449_spurious'))
+ sbox.simple_commit()
+ shutil.copyfile(os.path.join(data_dir, '3449_spurious_v2'),
+ sbox.ospath('3449_spurious'))
+ sbox.simple_commit()
+ shutil.copyfile(os.path.join(data_dir, '3449_spurious_v3'),
+ sbox.ospath('3449_spurious'))
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '-r2', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c4', '^/', wc_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('', status=' M')
+ expected_status.add({
+ '3449_spurious' : Item(status='M ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # This update produces a conflict in 1.6
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '--accept', 'postpone', wc_dir)
+ expected_status.tweak(wc_rev=4)
+ expected_status.tweak('3449_spurious', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def diff_two_working_copies(sbox):
+ "diff between two working copies"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a pristine working copy that will remain mostly unchanged
+ wc_dir_old = sbox.add_wc_path('old')
+ svntest.main.run_svn(None, 'co', sbox.repo_url, wc_dir_old)
+ # Add a property to A/B/F in the pristine working copy
+ svntest.main.run_svn(None, 'propset', 'newprop', 'propval-old\n',
+ os.path.join(wc_dir_old, 'A', 'B', 'F'))
+
+ # Make changes to the first working copy:
+
+ # removed nodes
+ sbox.simple_rm('A/mu')
+ sbox.simple_rm('A/D/H')
+
+ # new nodes
+ sbox.simple_mkdir('newdir')
+ svntest.main.file_append(sbox.ospath('newdir/newfile'), 'new text\n')
+ sbox.simple_add('newdir/newfile')
+ sbox.simple_mkdir('newdir/newdir2') # should not show up in the diff
+
+ # modified nodes
+ sbox.simple_propset('newprop', 'propval', 'A/D')
+ sbox.simple_propset('newprop', 'propval', 'A/D/gamma')
+ svntest.main.file_append(sbox.ospath('A/B/lambda'), 'new text\n')
+
+ # replaced nodes (files vs. directories) with property mods
+ sbox.simple_rm('A/B/F')
+ svntest.main.file_append(sbox.ospath('A/B/F'), 'new text\n')
+ sbox.simple_add('A/B/F')
+ sbox.simple_propset('newprop', 'propval-new\n', 'A/B/F')
+ sbox.simple_rm('A/D/G/pi')
+ sbox.simple_mkdir('A/D/G/pi')
+ sbox.simple_propset('newprop', 'propval', 'A/D/G/pi')
+
+ src_label = os.path.basename(wc_dir_old)
+ dst_label = os.path.basename(wc_dir)
+ expected_output = make_diff_header('newdir/newfile', 'nonexistent',
+ 'working copy',
+ src_label, dst_label) + [
+ "@@ -0,0 +1 @@\n",
+ "+new text\n",
+ ] + make_diff_header('A/mu', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'mu'.\n",
+ ] + make_diff_header('A/B/F', 'nonexistent',
+ 'working copy',
+ src_label, dst_label) + [
+ "@@ -0,0 +1 @@\n",
+ "+new text\n",
+ ] + make_diff_prop_header('A/B/F') + \
+ make_diff_prop_added("newprop",
+ "propval-new\n") + \
+ make_diff_header('A/B/lambda', 'working copy',
+ 'working copy',
+ src_label, dst_label) + [
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'lambda'.\n",
+ "+new text\n",
+ ] + make_diff_header('A/D', 'working copy', 'working copy',
+ src_label, dst_label) + \
+ make_diff_prop_header('A/D') + \
+ make_diff_prop_added("newprop", "propval") + \
+ make_diff_header('A/D/gamma', 'working copy',
+ 'working copy',
+ src_label, dst_label) + \
+ make_diff_prop_header('A/D/gamma') + \
+ make_diff_prop_added("newprop", "propval") + \
+ make_diff_header('A/D/G/pi', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'pi'.\n",
+ ] + make_diff_header('A/D/G/pi', 'nonexistent',
+ 'working copy',
+ src_label, dst_label) + \
+ make_diff_prop_header('A/D/G/pi') + \
+ make_diff_prop_added("newprop", "propval") + \
+ make_diff_header('A/D/H/chi', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'chi'.\n",
+ ] + make_diff_header('A/D/H/omega', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'omega'.\n",
+ ] + make_diff_header('A/D/H/psi', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'psi'.\n",
+ ] + make_diff_header('A/B/F', 'working copy',
+ 'nonexistent',
+ src_label, dst_label) + \
+ make_diff_prop_header('A/B/F') + \
+ make_diff_prop_deleted('newprop', 'propval-old\n')
+
+
+ # Files in diff may be in any order. #### Not any more, but test order is wrong.
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', wc_dir_old,
+ '--new', wc_dir)
+
+def diff_deleted_url(sbox):
+ "diff -cN of URL deleted in rN"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # remove A/D/H in r2
+ sbox.simple_rm("A/D/H")
+ sbox.simple_commit()
+
+ # A diff of r2 with target A/D/H should show the removed children
+ expected_output = make_diff_header("chi", "revision 1", "nonexistent") + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'chi'.\n",
+ ] + make_diff_header("omega", "revision 1",
+ "nonexistent") + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'omega'.\n",
+ ] + make_diff_header("psi", "revision 1",
+ "nonexistent") + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'psi'.\n",
+ ]
+
+ # Files in diff may be in any order.
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-c2',
+ sbox.repo_url + '/A/D/H')
+
+def diff_arbitrary_files_and_dirs(sbox):
+ "diff arbitrary files and dirs"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # diff iota with A/mu
+ expected_output = make_diff_header("iota", "working copy", "working copy",
+ "iota", "A/mu") + [
+ "@@ -1 +1 @@\n",
+ "-This is the file 'iota'.\n",
+ "+This is the file 'mu'.\n"
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', sbox.ospath('iota'),
+ '--new', sbox.ospath('A/mu'))
+
+ # diff A/B/E with A/D
+ expected_output = make_diff_header("G/pi", "nonexistent", "working copy",
+ "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'pi'.\n"
+ ] + make_diff_header("G/rho", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'rho'.\n"
+ ] + make_diff_header("G/tau", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'tau'.\n"
+ ] + make_diff_header("H/chi", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'chi'.\n"
+ ] + make_diff_header("H/omega", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'omega'.\n"
+ ] + make_diff_header("H/psi", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'psi'.\n"
+ ] + make_diff_header("alpha", "working copy",
+ "nonexistent", "B/E", "D") + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'alpha'.\n"
+ ] + make_diff_header("beta", "working copy",
+ "nonexistent", "B/E", "D") + [
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n"
+ ] + make_diff_header("gamma", "nonexistent",
+ "working copy", "B/E", "D") + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'gamma'.\n"
+ ]
+
+ # Files in diff may be in any order.
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', sbox.ospath('A/B/E'),
+ '--new', sbox.ospath('A/D'))
+
+def diff_properties_only(sbox):
+ "diff --properties-only"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_output = \
+ make_diff_header("iota", "revision 1", "revision 2") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:eol-style", "native")
+
+ expected_reverse_output = \
+ make_diff_header("iota", "revision 2", "revision 1") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_deleted("svn:eol-style", "native")
+
+ expected_rev1_output = \
+ make_diff_header("iota", "revision 1", "working copy") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_added("svn:eol-style", "native")
+
+ # Make a property change and a content change to 'iota'
+ # Only the property change should be displayed by diff --properties-only
+ sbox.simple_propset('svn:eol-style', 'native', 'iota')
+ svntest.main.file_append(sbox.ospath('iota'), 'new text')
+
+ sbox.simple_commit() # r2
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--properties-only', '-r', '1:2',
+ sbox.repo_url + '/iota')
+
+ svntest.actions.run_and_verify_svn(expected_reverse_output, [],
+ 'diff', '--properties-only', '-r', '2:1',
+ sbox.repo_url + '/iota')
+
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn(expected_rev1_output, [],
+ 'diff', '--properties-only', '-r', '1',
+ 'iota')
+
+ svntest.actions.run_and_verify_svn(expected_rev1_output, [],
+ 'diff', '--properties-only',
+ '-r', 'PREV', 'iota')
+
+def diff_properties_no_newline(sbox):
+ "diff props; check no-newline-at-end messages"
+
+ sbox.build()
+ old_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ no_nl = "\\ No newline at end of property\n"
+ propchange_header = "Modified: p.*\n"
+
+ subtests = [
+ ('p1', 'val1', 'val2' ),
+ ('p2', 'val1', 'val2\n'),
+ ('p3', 'val1\n', 'val2' ),
+ ('p4', 'val1\n', 'val2\n'),
+ ]
+
+ # The "before" state.
+ for pname, old_val, new_val in subtests:
+ sbox.simple_propset(pname, old_val, 'iota')
+ sbox.simple_commit() # r2
+
+ # Test one change at a time. (Because, with multiple changes, the order
+ # may not be predictable.)
+ for pname, old_val, new_val in subtests:
+ expected_output = \
+ make_diff_header("iota", "revision 2", "working copy") + \
+ make_diff_prop_header("iota") + \
+ make_diff_prop_modified(pname, old_val, new_val)
+
+ sbox.simple_propset(pname, new_val, 'iota')
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff')
+ svntest.actions.run_and_verify_svn(None, [], 'revert', 'iota')
+
+ os.chdir(old_cwd)
+
+def diff_arbitrary_same(sbox):
+ "diff arbitrary files and dirs but same"
+
+ sbox.build(read_only = True)
+
+ sbox.simple_propset('k', 'v', 'A', 'A/mu', 'A/D/G/pi')
+
+ svntest.main.file_write(sbox.ospath('A/mu'), "new mu")
+
+ sbox.simple_copy('A', 'A2')
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'diff',
+ '--old', sbox.ospath('A'),
+ '--new', sbox.ospath('A2'))
+
+ svntest.actions.run_and_verify_svn([], [],
+ 'diff', '--summarize',
+ '--old', sbox.ospath('A'),
+ '--new', sbox.ospath('A2'))
+
+def simple_ancestry(sbox):
+ "diff some simple ancestry changes"
+
+ sbox.build()
+ sbox.simple_copy('A/B/E', 'A/B/E_copied')
+ sbox.simple_copy('A/D/G/pi', 'A/D/G/pi-2')
+ sbox.simple_copy('A/D/G/rho', 'A/D/G/rho-2')
+ sbox.simple_rm('A/B/F', 'A/B/E', 'A/D/G/rho', 'A/D/G/tau')
+ sbox.simple_add_text('new', 'new')
+
+ line = '===================================================================\n'
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Index: %s (added)\n' % sbox.path('new'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E_copied/beta'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E_copied/alpha'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/pi-2'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/rho'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/rho-2'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/tau'),
+ line,
+ ])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.wc_dir,
+ '-r', '1',
+ '--notice-ancestry',
+ '--no-diff-deleted',
+ '--show-copies-as-adds',
+ '--no-diff-added')
+
+ # And try the same thing in reverse
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Index: %s (deleted)\n' % sbox.path('new'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E_copied/beta'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E_copied/alpha'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/pi-2'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/rho'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/rho-2'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/tau'),
+ line,
+ ])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.wc_dir,
+ '-r', 'HEAD',
+ '--notice-ancestry',
+ '--no-diff-deleted',
+ '--show-copies-as-adds',
+ '--no-diff-added')
+
+ # Now introduce a replacements and some delete-deletes
+ sbox.simple_update()
+ sbox.simple_mkdir('A/B/E')
+ sbox.simple_add_text('New alpha', 'A/B/E/alpha')
+ sbox.simple_add_text('New beta', 'A/B/E/beta')
+ sbox.simple_add_text('New rho', 'A/D/G/rho')
+ sbox.simple_add_text('New tau', 'A/D/G/tau')
+ sbox.simple_rm('A/B/E_copied', 'A/D/G/pi-2', 'A/D/G/rho-2')
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Index: %s (added)\n' % sbox.path('new'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/rho'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/rho'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/D/G/tau'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/D/G/tau'),
+ line,
+ ])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.wc_dir,
+ '-r', '1',
+ '--notice-ancestry',
+ '--no-diff-deleted',
+ '--show-copies-as-adds',
+ '--no-diff-added')
+
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.wc_dir,
+ '-r', '1',
+ '--notice-ancestry',
+ '--no-diff-deleted',
+ '--show-copies-as-adds',
+ '--no-diff-added')
+
+def local_tree_replace(sbox):
+ "diff a replaced tree"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_add_text('extra', 'A/B/F/extra')
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--keep-local',
+ sbox.ospath('A/B'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', sbox.ospath('A/B'))
+
+ # And now check with ancestry
+
+ line = '===================================================================\n'
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Index: %s (deleted)\n' % sbox.path('A/B/lambda'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (deleted)\n' % sbox.path('A/B/F/extra'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/lambda'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/alpha'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/E/beta'),
+ line,
+ 'Index: %s (added)\n' % sbox.path('A/B/F/extra'),
+ line,
+ ])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', wc_dir,
+ '-r', '2',
+ '--notice-ancestry',
+ '--show-copies-as-adds',
+ '--no-diff-added',
+ '--no-diff-deleted')
+
+ # Now create patches to verify the tree ordering
+ patch = os.path.abspath(os.path.join(wc_dir, 'ancestry.patch'))
+
+ cwd = os.getcwd()
+ os.chdir(wc_dir)
+ _, out, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '.',
+ '-r', '2',
+ '--notice-ancestry',
+ '--show-copies-as-adds')
+ svntest.main.file_append(patch, ''.join(out))
+ os.chdir(cwd)
+
+ # And try to apply it
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'D %s\n' % sbox.ospath('A/B/F/extra'),
+ 'D %s\n' % sbox.ospath('A/B/F'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ 'D %s\n' % sbox.ospath('A/B/E/alpha'),
+ 'D %s\n' % sbox.ospath('A/B/E'),
+ 'D %s\n' % sbox.ospath('A/B/lambda'),
+ 'D %s\n' % sbox.ospath('A/B'),
+ 'A %s\n' % sbox.ospath('A/B'),
+ 'A %s\n' % sbox.ospath('A/B/lambda'),
+ 'A %s\n' % sbox.ospath('A/B/F'),
+ 'A %s\n' % sbox.ospath('A/B/F/extra'),
+ 'A %s\n' % sbox.ospath('A/B/E'),
+ 'A %s\n' % sbox.ospath('A/B/E/beta'),
+ 'A %s\n' % sbox.ospath('A/B/E/alpha'),
+ ])
+ # And this currently fails because the ordering is broken, but also
+ # because it hits an issue in 'svn patch'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'patch', patch, wc_dir)
+
+def diff_dir_replaced_by_file(sbox):
+ "diff a directory replaced by a file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_rm('A/B/E')
+ sbox.simple_add_text('text', 'A/B/E')
+
+ expected_output = [
+ 'Index: %s\n' % sbox.path('A/B/E/alpha'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'),
+ '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'),
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'alpha\'.\n',
+ 'Index: %s\n' % sbox.path('A/B/E/beta'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/beta'),
+ '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'),
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'beta\'.\n',
+ 'Index: %s\n' % sbox.path('A/B/E'),
+ '===================================================================\n',
+ '--- %s\t(nonexistent)\n' % sbox.path('A/B/E'),
+ '+++ %s\t(working copy)\n' % sbox.path('A/B/E'),
+ '@@ -0,0 +1 @@\n',
+ '+text\n',
+ '\ No newline at end of file\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', wc_dir)
+
+def diff_dir_replaced_by_dir(sbox):
+ "diff a directory replaced by a directory tree"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_rm('A/B/E')
+ sbox.simple_mkdir('A/B/E')
+ sbox.simple_propset('a', 'b\n', 'A/B/E')
+ sbox.simple_add_text('New beta\n', 'A/B/E/beta')
+
+ # First check with ancestry (Tree replace)
+
+ expected_output = [
+ 'Index: %s\n' % sbox.path('A/B/E/alpha'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'),
+ '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'),
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'alpha\'.\n',
+ 'Index: %s\n' % sbox.path('A/B/E/beta'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/beta'),
+ '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'),
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'beta\'.\n',
+ 'Index: %s\n' % sbox.path('A/B/E/beta'),
+ '===================================================================\n',
+ '--- %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'),
+ '+++ %s\t(working copy)\n' % sbox.path('A/B/E/beta'),
+ '@@ -0,0 +1 @@\n',
+ '+New beta\n',
+ 'Index: %s\n' % sbox.path('A/B/E'),
+ '===================================================================\n',
+ '--- %s\t(nonexistent)\n' % sbox.path('A/B/E'),
+ '+++ %s\t(working copy)\n' % sbox.path('A/B/E'),
+ '\n',
+ 'Property changes on: %s\n' % sbox.path('A/B/E'),
+ '___________________________________________________________________\n',
+ 'Added: a\n',
+ '## -0,0 +1 ##\n',
+ '+b\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--notice-ancestry', wc_dir)
+
+ # And summarized. Currently produces directory adds after their children
+ expected_output = svntest.verify.UnorderedOutput([
+ 'D %s\n' % sbox.ospath('A/B/E/alpha'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ 'D %s\n' % sbox.ospath('A/B/E'),
+ 'A %s\n' % sbox.ospath('A/B/E'),
+ 'A %s\n' % sbox.ospath('A/B/E/beta'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--summarize', wc_dir,
+ '--notice-ancestry')
+
+ # And now without (file delete, change + properties)
+ expected_output = [
+ 'Index: %s\n' % sbox.path('A/B/E/alpha'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'),
+ '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'),
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'alpha\'.\n',
+ 'Index: %s\n' % sbox.path('A/B/E/beta'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E/beta'),
+ '+++ %s\t(working copy)\n' % sbox.path('A/B/E/beta'),
+ '@@ -1 +1 @@\n',
+ '-This is the file \'beta\'.\n',
+ '+New beta\n',
+ 'Index: %s\n' % sbox.path('A/B/E'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('A/B/E'),
+ '+++ %s\t(working copy)\n' % sbox.path('A/B/E'),
+ '\n',
+ 'Property changes on: %s\n' % sbox.path('A/B/E'),
+ '___________________________________________________________________\n',
+ 'Added: a\n',
+ '## -0,0 +1 ##\n',
+ '+b\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', wc_dir)
+
+ expected_output = [
+ 'D %s\n' % sbox.ospath('A/B/E/alpha'),
+ 'M %s\n' % sbox.ospath('A/B/E/beta'),
+ ' M %s\n' % sbox.ospath('A/B/E'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--summarize', wc_dir)
+
+
+@Issue(4366)
+def diff_repos_empty_file_addition(sbox):
+ "repos diff of rev which adds empty file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add and commit an empty file.
+ svntest.main.file_append(sbox.ospath('newfile'), "")
+ svntest.main.run_svn(None, 'add', sbox.ospath('newfile'))
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'newfile': Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output,
+ expected_status)
+
+ # Now diff the revision that added the empty file.
+ expected_output = [
+ 'Index: newfile\n',
+ '===================================================================\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-c', '2', sbox.repo_url)
+
+def diff_missing_tree_conflict_victim(sbox):
+ "diff with missing tree-conflict victim in wc"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Produce an 'incoming edit vs. local missing' tree conflict:
+ # r2: edit iota and commit the change
+ svntest.main.file_append(sbox.ospath('iota'), "This is a change to iota.\n")
+ sbox.simple_propset('k', 'v', 'A/C')
+ sbox.simple_commit()
+ # now remove iota
+ sbox.simple_rm('iota', 'A/C')
+ sbox.simple_commit()
+ # update to avoid mixed-rev wc warning
+ sbox.simple_update()
+ # merge r2 into wc and verify that a tree conflict is flagged on iota
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ 'A/C' : Item(status=' ', treeconflict='C')
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {})
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota','A/C')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('iota', 'A/C',
+ status='! ', treeconflict='C', wc_rev=None)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(wc_dir, '1', '2',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, False,
+ '--ignore-ancestry', wc_dir)
+
+ # 'svn diff' should show no change for the working copy
+ # This currently fails because svn errors out with a 'node not found' error
+ expected_output = [ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir)
+
+@Issue(4396)
+def diff_local_missing_obstruction(sbox):
+ "diff local missing and obstructed files"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ os.unlink(sbox.ospath('iota'))
+ os.unlink(sbox.ospath('A/mu'))
+ os.mkdir(sbox.ospath('A/mu'))
+
+ # Expect no output for missing and obstructed files
+ expected_output = [
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir)
+
+ sbox.simple_propset('K', 'V', 'iota', 'A/mu')
+ sbox.simple_append('IotA', 'Content')
+
+ # But do expect a proper property diff
+ expected_output = [
+ 'Index: %s\n' % (sbox.path('A/mu'),),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % (sbox.path('A/mu'),),
+ '+++ %s\t(working copy)\n' % (sbox.path('A/mu'),),
+ '\n',
+ 'Property changes on: %s\n' % (sbox.path('A/mu'),),
+ '___________________________________________________________________\n',
+ 'Added: K\n',
+ '## -0,0 +1 ##\n',
+ '+V\n',
+ '\ No newline at end of property\n',
+ 'Index: %s\n' % (sbox.path('iota'),),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % (sbox.path('iota'),),
+ '+++ %s\t(working copy)\n' % (sbox.path('iota'),),
+ '\n',
+ 'Property changes on: %s\n' % (sbox.path('iota'),),
+ '___________________________________________________________________\n',
+ 'Added: K\n',
+ '## -0,0 +1 ##\n',
+ '+V\n',
+ '\ No newline at end of property\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir)
+
+ # Create an external. This produces an error in 1.8.0.
+ sbox.simple_propset('svn:externals', 'AA/BB ' + sbox.repo_url + '/A', '.')
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'diff', wc_dir)
+
+
+@Issue(4444)
+def diff_move_inside_copy(sbox):
+ "diff copied-along child that contains a moved file"
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ d_path = 'A/D'
+ d_copy = 'A/D-copy'
+ h_path = 'A/D-copy/H'
+ chi_path = '%s/chi' % h_path
+ chi_moved = '%s/chi-moved' % h_path
+
+ sbox.simple_copy(d_path, d_copy)
+ sbox.simple_move(chi_path, chi_moved)
+ sbox.simple_append(chi_moved, 'a new line')
+
+ # Bug: Diffing the copied-along parent directory asserts
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'diff', sbox.ospath(h_path))
+@XFail()
+@Issue(4464)
+def diff_repo_wc_copies(sbox):
+ "diff repo to wc of a copy"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_copy = sbox.ospath('iota_copy')
+ iota_url = sbox.repo_url + '/iota'
+
+ sbox.simple_copy('iota', 'iota_copy')
+ expected_output = make_diff_header(iota_copy, "nonexistent", "working copy",
+ iota_url, iota_copy) + [
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'iota'.\n" ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--show-copies-as-adds',
+ iota_url, iota_copy)
+
+@Issue(4460)
+def diff_repo_wc_file_props(sbox):
+ "diff repo to wc file target with props"
+ sbox.build()
+ iota = sbox.ospath('iota')
+
+ # add a mime-type and a line to iota to test the binary check
+ sbox.simple_propset('svn:mime-type', 'text/plain', 'iota')
+ sbox.simple_append('iota','second line\n')
+
+ # test that we get the line and the property add
+ expected_output = make_diff_header(iota, 'revision 1', 'working copy') + \
+ [ '@@ -1 +1,2 @@\n',
+ " This is the file 'iota'.\n",
+ "+second line\n", ] + \
+ make_diff_prop_header(iota) + \
+ make_diff_prop_added('svn:mime-type', 'text/plain')
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r1', iota)
+
+ # reverse the diff, should get a property delete and line delete
+ expected_output = make_diff_header(iota, 'working copy', 'revision 1') + \
+ [ '@@ -1,2 +1 @@\n',
+ " This is the file 'iota'.\n",
+ "-second line\n", ] + \
+ make_diff_prop_header(iota) + \
+ make_diff_prop_deleted('svn:mime-type', 'text/plain')
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--old', iota,
+ '--new', iota + '@1')
+
+ # copy iota to test with --show-copies as adds
+ sbox.simple_copy('iota', 'iota_copy')
+ iota_copy = sbox.ospath('iota_copy')
+
+ # test that we get all lines as added and the property added
+ # TODO: We only test that this test doesn't error out because of Issue #4464
+ # if and when that issue is fixed this test should check output
+ svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '--show-copies-as-adds', '-r1', iota_copy)
+
+ # reverse the diff, should get all lines as a delete and no property
+ # TODO: We only test that this test doesn't error out because of Issue #4464
+ # if and when that issue is fixed this test should check output
+ svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '--show-copies-as-adds',
+ '--old', iota_copy,
+ '--new', iota + '@1')
+
+ # revert and commit with the eol-style of LF and then update so
+ # that we can see a change on either windows or *nix.
+ sbox.simple_revert('iota', 'iota_copy')
+ sbox.simple_propset('svn:eol-style', 'LF', 'iota')
+ sbox.simple_commit() #r2
+ sbox.simple_update()
+
+ # now that we have a LF file on disk switch to CRLF
+ sbox.simple_propset('svn:eol-style', 'CRLF', 'iota')
+
+ # test that not only the property but also the file changes
+ # i.e. that the line endings substitution works
+ if svntest.main.is_os_windows():
+ # test suite normalizes crlf output into just lf on Windows.
+ # so we have to assume it worked because there is an add and
+ # remove line with the same content. Fortunately, it doesn't
+ # do this on *nix so we can be pretty sure that it works right.
+ # TODO: Provide a way to handle this better
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+ expected_output = make_diff_header(iota, 'revision 1', 'working copy') + \
+ [ '@@ -1 +1 @@\n',
+ "-This is the file 'iota'.\n",
+ "+This is the file 'iota'." + crlf ] + \
+ make_diff_prop_header(iota) + \
+ make_diff_prop_added('svn:eol-style', 'CRLF')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r1', iota)
+
+
+@Issue(4460)
+def diff_repo_repo_added_file_mime_type(sbox):
+ "diff repo to repo added file with mime-type"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ newfile = sbox.ospath('newfile')
+
+ # add a file with a mime-type
+ sbox.simple_append('newfile', "This is the file 'newfile'.\n")
+ sbox.simple_add('newfile')
+ sbox.simple_propset('svn:mime-type', 'text/plain', 'newfile')
+ sbox.simple_commit() # r2
+
+ # try to diff across the addition
+ expected_output = make_diff_header(newfile, 'nonexistent', 'revision 2') + \
+ [ '@@ -0,0 +1 @@\n',
+ "+This is the file 'newfile'.\n" ] + \
+ make_diff_prop_header(newfile) + \
+ make_diff_prop_added('svn:mime-type', 'text/plain')
+
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '-r1:2', newfile)
+
+ # reverse the diff to diff across a deletion
+ expected_output = make_diff_header(newfile, 'revision 2', 'nonexistent') + \
+ [ '@@ -1 +0,0 @@\n',
+ "-This is the file 'newfile'.\n",
+ '\n',
+ 'Property changes on: %s\n' % sbox.path('newfile'),
+ '__________________________________________________' +
+ '_________________\n',
+ 'Deleted: svn:mime-type\n',
+ '## -1 +0,0 ##\n',
+ '-text/plain\n',
+ '\ No newline at end of property\n']
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '-r2:1', newfile)
+
+def diff_switched_file(sbox):
+ "diff a switched file against repository"
+
+ sbox.build()
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ sbox.repo_url + '/A/mu',
+ sbox.ospath('iota'), '--ignore-ancestry')
+ sbox.simple_append('iota', 'Mu????')
+
+ # This diffs the file against its origin
+ expected_output = [
+ 'Index: %s\n' % sbox.path('iota'),
+ '===================================================================\n',
+ '--- %s\t(.../A/mu)\t(revision 1)\n' % sbox.path('iota'),
+ '+++ %s\t(.../iota)\t(working copy)\n' % sbox.path('iota'),
+ '@@ -1 +1,2 @@\n',
+ ' This is the file \'mu\'.\n',
+ '+Mu????\n',
+ '\ No newline at end of file\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', '1', sbox.ospath('iota'))
+
+ # And this undoes the switch for the diff
+ expected_output = [
+ 'Index: %s\n' % sbox.path('iota'),
+ '===================================================================\n',
+ '--- %s\t(revision 1)\n' % sbox.path('iota'),
+ '+++ %s\t(working copy)\n' % sbox.path('iota'),
+ '@@ -1 +1,2 @@\n',
+ '-This is the file \'iota\'.\n',
+ '+This is the file \'mu\'.\n',
+ '+Mu????\n',
+ '\ No newline at end of file\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', '1', sbox.ospath(''))
+
+def diff_parent_dir(sbox):
+ "diff parent directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url, '-m', 'Q',
+ 'mkdir', 'A/ZZZ',
+ 'propset', 'A', 'B', 'A/ZZZ')
+
+ was_cwd = os.getcwd()
+ os.chdir(os.path.join(wc_dir, 'A', 'B'))
+ try:
+ # This currently (1.8.9, 1.9.0 development) triggers an assertion failure
+ # as a non canonical relpath ".." is used as diff target
+
+ expected_output = [
+ 'Index: ../ZZZ\n',
+ '===================================================================\n',
+ '--- ../ZZZ (revision 2)\n',
+ '+++ ../ZZZ (nonexistent)\n',
+ '\n',
+ 'Property changes on: ../ZZZ\n',
+ '___________________________________________________________________\n',
+ 'Deleted: A\n',
+ '## -1 +0,0 ##\n',
+ '-B\n',
+ '\ No newline at end of property\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', '2', '..')
+
+ expected_output = [
+ 'Index: ../../A/ZZZ\n',
+ '===================================================================\n',
+ '--- ../../A/ZZZ (revision 2)\n',
+ '+++ ../../A/ZZZ (nonexistent)\n',
+ '\n',
+ 'Property changes on: ../../A/ZZZ\n',
+ '___________________________________________________________________\n',
+ 'Deleted: A\n',
+ '## -1 +0,0 ##\n',
+ '-B\n',
+ '\ No newline at end of property\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '-r', '2', '../..')
+ finally:
+ os.chdir(was_cwd)
+
+def diff_deleted_in_move_against_repos(sbox):
+ "diff deleted in move against repository"
+
+ sbox.build()
+ sbox.simple_move('A/B', 'BB')
+ sbox.simple_move('BB/E/alpha', 'BB/q')
+ sbox.simple_rm('BB/E/beta')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', sbox.repo_url + '/BB/E',
+ '--parents', '-m', 'Create dir')
+
+ # OK. Local diff
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.wc_dir)
+
+ # OK. Walks nodes locally from wc-root, notices ancestry
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.wc_dir, '-r1',
+ '--notice-ancestry')
+
+ # OK. Walks nodes locally from BB, notices ancestry
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.wc_dir, '-r2',
+ '--notice-ancestry')
+
+ # OK. Walks nodes locally from wc-root
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.wc_dir, '-r1')
+
+ # Assertion. Walks nodes locally from BB.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.wc_dir, '-r2')
+
+def diff_replaced_moved(sbox):
+ "diff against a replaced moved node"
+
+ sbox.build(read_only=True)
+ sbox.simple_move('A', 'AA')
+ sbox.simple_rm('AA/B')
+ sbox.simple_move('AA/D', 'AA/B')
+
+ # Ok
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.ospath('.'), '-r1')
+
+ # Ok (rhuijben: Works through a hack assuming some BASE knowledge)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.ospath('AA'), '-r1')
+
+ # Error (misses BASE node because the diff editor is driven incorrectly)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', sbox.ospath('AA/B'), '-r1')
+
+# Regression test for the fix in r1619380. Prior to this (and in releases
+# 1.8.0 through 1.8.10) a local diff incorrectly showed a copied dir's
+# properties as added, whereas it should show only the changes against the
+# copy-source.
+def diff_local_copied_dir(sbox):
+ "local WC diff of copied dir"
+
+ sbox.build()
+
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ try:
+ sbox.simple_propset('p1', 'v1', 'A/C')
+ sbox.simple_commit()
+
+ # dir with no prop changes
+ sbox.simple_copy('A/C', 'C2')
+ # dir with prop changes
+ sbox.simple_copy('A/C', 'C3')
+ sbox.simple_propset('p2', 'v2', 'C3')
+
+ expected_output_C2 = []
+ expected_output_C3 = [
+ 'Index: C3\n',
+ '===================================================================\n',
+ '--- C3 (revision 2)\n',
+ '+++ C3 (working copy)\n',
+ '\n',
+ 'Property changes on: C3\n',
+ '___________________________________________________________________\n',
+ 'Added: p2\n',
+ '## -0,0 +1 ##\n',
+ '+v2\n',
+ '\ No newline at end of property\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output_C2, [],
+ 'diff', 'C2')
+ svntest.actions.run_and_verify_svn(expected_output_C3, [],
+ 'diff', 'C3')
+ finally:
+ os.chdir(was_cwd)
+
+
+def diff_summarize_ignore_properties(sbox):
+ "diff --summarize --ignore-properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a property change and a content change to 'iota'
+ sbox.simple_propset('svn:eol-style', 'native', 'iota')
+ svntest.main.file_append(sbox.ospath('iota'), 'new text')
+
+ # Make a property change to 'A/mu'
+ sbox.simple_propset('svn:eol-style', 'native', 'A/mu')
+
+ # Make a content change to 'A/B/lambda'
+ svntest.main.file_append(sbox.ospath('A/B/lambda'), 'new text')
+
+ # Add a file.
+ svntest.main.file_write(sbox.ospath('new'), 'new text')
+ sbox.simple_add('new')
+
+ # Delete a file
+ sbox.simple_rm('A/B/E/alpha')
+
+ expected_diff = svntest.wc.State(wc_dir, {
+ 'iota': Item(status='M '),
+ 'new': Item(status='A '),
+ 'A/B/lambda': Item(status='M '),
+ 'A/B/E/alpha': Item(status='D '),
+ })
+ svntest.actions.run_and_verify_diff_summarize(expected_diff,
+ '--ignore-properties',
+ sbox.wc_dir)
+
+ # test with --xml, too
+ paths = ['iota', 'new', 'A/B/lambda', 'A/B/E/alpha']
+ items = ['modified', 'added', 'modified', 'deleted' ]
+ kinds = ['file','file', 'file', 'file']
+ props = ['none', 'none', 'none', 'none']
+ svntest.actions.run_and_verify_diff_summarize_xml(
+ [], wc_dir, paths, items, props, kinds, wc_dir, '--ignore-properties')
+
+def diff_incomplete(sbox):
+ "diff incomplete directory"
+
+ sbox.build()
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A',
+ '-m', '')
+
+ # This works ok
+ _, out1a, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ _, out1b, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+
+ svntest.main.run_wc_incomplete_tester(sbox.ospath('A'), 1)
+
+ # And this used to miss certain changes
+ _, out2a, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ _, out2b, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ # Ordering may be different, but length should match
+ if len(out1a) != len(out2a):
+ raise svntest.Failure('Different output when incomplete against repos')
+
+ svntest.verify.compare_and_display_lines('local diff', 'local diff', out1b,
+ out2b)
+
+ # And add a replacement on top of the incomplete, server side
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.repo_url + '/A/D/H@1',
+ sbox.repo_url + '/A', '-m', '')
+
+ svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ # And client side
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A'),
+ '--force')
+ sbox.simple_mkdir('A')
+ svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ svntest.actions.run_and_verify_svn(None, [], 'diff',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+def diff_incomplete_props(sbox):
+ "incomplete set of properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('r2-1', 'r2', 'iota', 'A')
+ sbox.simple_propset('r2-2', 'r2', 'iota', 'A')
+ sbox.simple_propset('r', 'r2', 'iota', 'A')
+ sbox.simple_commit() # r2
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ 'propset', 'r3-1', 'r3', 'iota',
+ 'propset', 'r3-1', 'r3', 'A',
+ 'propset', 'r3-2', 'r3', 'iota',
+ 'propset', 'r3-2', 'r3', 'A',
+ 'propset', 'r', 'r3', 'iota',
+ 'propset', 'r', 'r3', 'A',
+ 'propdel', 'r2-1', 'iota',
+ 'propdel', 'r2-1', 'A',
+ 'propdel', 'r2-2', 'iota',
+ 'propdel', 'r2-2', 'A',
+ '-m', 'r3')
+
+ _, out1, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD', wc_dir,
+ '--notice-ancestry')
+
+ # Now simulate a broken update to r3
+ svntest.actions.set_incomplete(wc_dir, 3)
+ svntest.actions.set_incomplete(sbox.ospath('A'), 3)
+
+ # The properties are still at r2
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', 'A', props={'r2-1':'r2', 'r2-2':'r2', 'r':'r2'})
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # But the working copy is incomplete at r3
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ expected_status.tweak('', 'A', wc_rev=3, status='! ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'iota' : Item(status=' U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Expect that iota and A have the expected sets of properties
+ # The r2 set is properly deleted where necessary
+ expected_disk.tweak('iota', 'A', props={'r3-2':'r3', 'r':'r3', 'r3-1':'r3'})
+
+ _, out2, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'HEAD', wc_dir,
+ '--notice-ancestry')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk,
+ expected_status, [], True)
+
+ # Ok, we tested that the update worked properly, but we also do this
+ # in the update tests... Let's see, what the diffs said
+
+ _, out3, _ = svntest.actions.run_and_verify_svn(None, [], 'diff',
+ '-r', 'BASE:2', wc_dir,
+ '--notice-ancestry')
+
+ # Filter out all headers (which include revisions, etc.)
+ out1 = [i for i in out1 if i[0].isupper()]
+ out1.sort()
+
+ out2 = [i for i in out2 if i[0].isupper()]
+ out2.sort()
+
+ out3 = [i for i in out3 if i[0].isupper()]
+ out3.sort()
+
+ svntest.verify.compare_and_display_lines('base vs incomplete', 'local diff',
+ out1, out2)
+
+ svntest.verify.compare_and_display_lines('base vs after', 'local diff',
+ out1, out3)
+
+def diff_symlinks(sbox):
+ "diff some symlinks"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_add_symlink('iota', 'to-iota')
+
+ svntest.actions.run_and_verify_svn([
+ 'Index: %s\n' % sbox.path('to-iota'),
+ '===================================================================\n',
+ '--- %s\t(nonexistent)\n' % sbox.path('to-iota'),
+ '+++ %s\t(working copy)\n' % sbox.path('to-iota'),
+ '@@ -0,0 +1 @@\n',
+ '+link iota\n',
+ '\ No newline at end of file\n',
+ '\n',
+ 'Property changes on: %s\n' % sbox.path('to-iota'),
+ '___________________________________________________________________\n',
+ 'Added: svn:special\n',
+ '## -0,0 +1 ##\n',
+ '+*\n',
+ '\ No newline at end of property\n',
+ ], [], 'diff', wc_dir)
+
+ svntest.actions.run_and_verify_svn([
+ 'Index: %s\n' % sbox.path('to-iota'),
+ '===================================================================\n',
+ 'diff --git a/to-iota b/to-iota\n',
+ 'new file mode 120644\n',
+ '--- a/to-iota\t(nonexistent)\n',
+ '+++ b/to-iota\t(working copy)\n',
+ '@@ -0,0 +1 @@\n',
+ '+iota\n',
+ '\ No newline at end of file\n',
+ '\n',
+ 'Property changes on: to-iota\n',
+ '___________________________________________________________________\n',
+ 'Added: svn:special\n',
+ '## -0,0 +1 ##\n',
+ '+*\n',
+ '\ No newline at end of property\n',
+ ], [], 'diff', wc_dir, '--git')
+
+ sbox.simple_commit()
+ os.remove(sbox.ospath('to-iota'))
+ sbox.simple_symlink('A/B/E/alpha', 'to-iota')
+
+ svntest.actions.run_and_verify_svn([
+ 'Index: %s\n' % sbox.path('to-iota'),
+ '===================================================================\n',
+ '--- %s\t(revision 2)\n' % sbox.path('to-iota'),
+ '+++ %s\t(working copy)\n' % sbox.path('to-iota'),
+ '@@ -1 +1 @@\n',
+ '-link iota\n',
+ '\ No newline at end of file\n',
+ '+link A/B/E/alpha\n',
+ '\ No newline at end of file\n',
+ ], [], 'diff', wc_dir)
+
+ svntest.actions.run_and_verify_svn([
+ 'Index: %s\n' % sbox.path('to-iota'),
+ '===================================================================\n',
+ 'diff --git a/to-iota b/to-iota\n',
+ 'index 3ef26e44..9930f9a0 120644\n',
+ '--- a/to-iota\t(revision 2)\n',
+ '+++ b/to-iota\t(working copy)\n',
+ '@@ -1 +1 @@\n',
+ '-iota\n',
+ '\ No newline at end of file\n',
+ '+A/B/E/alpha\n',
+ '\ No newline at end of file\n',
+ ], [], 'diff', wc_dir, '--git')
+
+
+@Issue(4597)
+def diff_peg_resolve(sbox):
+ "peg resolving during diff"
+
+ sbox.build()
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', repo_url, '-m', 'Q',
+ 'mkdir', 'branches',
+ 'cp', 1, 'A', 'branches/A1',
+ 'cp', 1, 'A', 'branches/A2',
+ 'rm', 'A')
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', repo_url, '-m', 'Q2',
+ 'rm', 'branches/A1')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff', repo_url + '/branches/A1@2',
+ sbox.wc_dir,
+ '--notice-ancestry')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ '--old=' + repo_url + '/branches/A1@2',
+ '--new=' + sbox.wc_dir,
+ '--git')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ '--old=' + repo_url + '/branches/A1@2',
+ '--new=' + repo_url + '/A@1',
+ '--git')
+
+ svntest.actions.run_and_verify_svn(None, '.*E160005: Target path.*A1',
+ 'diff',
+ repo_url + '/branches/A1',
+ wc_dir,
+ '--summarize')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'diff',
+ repo_url + '/branches/A2',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, '.*E200009: .*mix.*',
+ 'diff',
+ repo_url + '/branches/A2',
+ wc_dir, '-r1:2')
+
+@XFail()
+@Issue(4706)
+def diff_unversioned_files_git(sbox):
+ "diff unversioned files in git format"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.file_write(sbox.ospath('foo'), "foo\n")
+ svntest.main.file_write(sbox.ospath('A/bar'), "bar\n")
+ expected_output = make_diff_header("foo", "working copy", "working copy",
+ "foo", "A/bar") + [
+ "@@ -1 +1 @@\n",
+ "-foo\n",
+ "+bar\n"
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--git',
+ '--old', sbox.ospath('foo'),
+ '--new', sbox.ospath('A/bar'))
+
+
+########################################################################
+#Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ diff_update_a_file,
+ diff_add_a_file,
+ diff_add_a_file_in_a_subdir,
+ diff_replace_a_file,
+ diff_multiple_reverse,
+ diff_non_recursive,
+ diff_repo_subset,
+ diff_non_version_controlled_file,
+ diff_pure_repository_update_a_file,
+ diff_only_property_change,
+ dont_diff_binary_file,
+ diff_nonextant_urls,
+ diff_head_of_moved_file,
+ diff_base_to_repos,
+ diff_deleted_in_head,
+ diff_targets,
+ diff_branches,
+ diff_repos_and_wc,
+ diff_file_urls,
+ diff_prop_change_local_edit,
+ check_for_omitted_prefix_in_path_component,
+ diff_renamed_file,
+ diff_within_renamed_dir,
+ diff_prop_on_named_dir,
+ diff_keywords,
+ diff_force,
+ diff_schedule_delete,
+ diff_renamed_dir,
+ diff_property_changes_to_base,
+ diff_mime_type_changes,
+ diff_prop_change_local_propmod,
+ diff_repos_wc_add_with_props,
+ diff_nonrecursive_checkout_deleted_dir,
+ diff_repos_working_added_dir,
+ diff_base_repos_moved,
+ diff_added_subtree,
+ basic_diff_summarize,
+ diff_weird_author,
+ diff_ignore_whitespace,
+ diff_ignore_eolstyle,
+ diff_in_renamed_folder,
+ diff_with_depth,
+ diff_ignore_eolstyle_empty_lines,
+ diff_backward_repos_wc_copy,
+ diff_summarize_xml,
+ diff_file_depth_empty,
+ diff_wrong_extension_type,
+ diff_external_diffcmd,
+ diff_url_against_local_mods,
+ diff_preexisting_rev_against_local_add,
+ diff_git_format_wc_wc,
+ diff_git_format_url_wc,
+ diff_git_format_url_url,
+ diff_prop_missing_context,
+ diff_prop_multiple_hunks,
+ diff_git_empty_files,
+ diff_git_with_props,
+ diff_git_with_props_on_dir,
+ diff_abs_localpath_from_wc_folder,
+ no_spurious_conflict,
+ diff_correct_wc_base_revnum,
+ diff_two_working_copies,
+ diff_deleted_url,
+ diff_arbitrary_files_and_dirs,
+ diff_properties_only,
+ diff_properties_no_newline,
+ diff_arbitrary_same,
+ diff_git_format_wc_wc_dir_mv,
+ simple_ancestry,
+ local_tree_replace,
+ diff_dir_replaced_by_file,
+ diff_dir_replaced_by_dir,
+ diff_repos_empty_file_addition,
+ diff_missing_tree_conflict_victim,
+ diff_local_missing_obstruction,
+ diff_move_inside_copy,
+ diff_repo_wc_copies,
+ diff_repo_wc_file_props,
+ diff_repo_repo_added_file_mime_type,
+ diff_switched_file,
+ diff_parent_dir,
+ diff_deleted_in_move_against_repos,
+ diff_replaced_moved,
+ diff_local_copied_dir,
+ diff_summarize_ignore_properties,
+ diff_incomplete,
+ diff_incomplete_props,
+ diff_symlinks,
+ diff_peg_resolve,
+ diff_unversioned_files_git,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/diff_tests_data/3449_spurious_v1 b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v1
new file mode 100644
index 0000000..72323a8
--- /dev/null
+++ b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v1
@@ -0,0 +1,31 @@
+0A
+0B
+0C
+
+Line A
+Line B
+ Line C
+ Line D
+ Line E
+ Line F
+ Line G
+ Line H
+Line F
+ Line I
+
+ Line J
+ Line K
+ Line F
+ Line L
+ Line M
+ Line H
+Line H
+
+Blah
+
+0U
+0V
+0W
+0X
+0Y
+0Z
diff --git a/subversion/tests/cmdline/diff_tests_data/3449_spurious_v2 b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v2
new file mode 100644
index 0000000..ba5e3b7
--- /dev/null
+++ b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v2
@@ -0,0 +1,32 @@
+0A
+0B
+0C
+
+Line A
+Line B
+ Line C
+ Line D
+ Line E
+ Line F
+ Line G
+ Line H
+Line F
+ Line I
+
+ Line J
+ Line K
+ Line F
+ Line L
+ Line M
+ Line H
+Line H
+
+Blah
+
+0U
+0V
+0W
+ New
+0X
+0Y
+0Z
diff --git a/subversion/tests/cmdline/diff_tests_data/3449_spurious_v3 b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v3
new file mode 100644
index 0000000..b24d471
--- /dev/null
+++ b/subversion/tests/cmdline/diff_tests_data/3449_spurious_v3
@@ -0,0 +1,31 @@
+0A
+0B
+0C
+
+Line I
+Line J
+ Line A
+ Line B
+ Line C
+ Line D
+ Line E
+ Line F
+ Line G
+ Line H
+ Line F
+ Line K
+ Line H
+Line F
+ New3A
+ New3B
+Line H
+
+Blah
+
+0U
+0V
+0W
+ New
+0X
+0Y
+0Z
diff --git a/subversion/tests/cmdline/entries-dump.c b/subversion/tests/cmdline/entries-dump.c
new file mode 100644
index 0000000..469a2ff
--- /dev/null
+++ b/subversion/tests/cmdline/entries-dump.c
@@ -0,0 +1,408 @@
+/*
+ * entries-dump.c : dump pre-1.6 svn_wc_* output for python
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#define SVN_DEPRECATED
+
+#include "svn_types.h"
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+#include "svn_dirent_uri.h"
+
+#include "private/svn_wc_private.h"
+
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/lock.h"
+
+static void
+str_value(const char *name, const char *value)
+{
+ if (value == NULL)
+ printf("e.%s = None\n", name);
+ else
+ printf("e.%s = '%s'\n", name, value);
+}
+
+
+static void
+int_value(const char *name, long int value)
+{
+ printf("e.%s = %ld\n", name, value);
+}
+
+
+static void
+bool_value(const char *name, svn_boolean_t value)
+{
+ if (value)
+ printf("e.%s = True\n", name);
+ else
+ printf("e.%s = False\n", name);
+}
+
+static svn_error_t *
+entries_dump(const char *dir_path, svn_wc_adm_access_t *related, apr_pool_t *pool)
+{
+ svn_wc_adm_access_t *adm_access = NULL;
+ apr_hash_t *entries;
+ apr_hash_index_t *hi;
+ svn_boolean_t locked;
+ svn_error_t *err;
+ svn_wc_context_t *wc_ctx = NULL;
+ const char *dir_abspath;
+
+ SVN_ERR(svn_dirent_get_absolute(&dir_abspath, dir_path, pool));
+
+ err = svn_wc_adm_open3(&adm_access, related, dir_path, FALSE, 0,
+ NULL, NULL, pool);
+ if (!err)
+ {
+ SVN_ERR(svn_wc__context_create_with_db(&wc_ctx, NULL,
+ svn_wc__adm_get_db(adm_access),
+ pool));
+
+ SVN_ERR(svn_wc_locked2(NULL, &locked, wc_ctx, dir_abspath, pool));
+ SVN_ERR(svn_wc_entries_read(&entries, adm_access, TRUE, pool));
+ }
+ else if (err && err->apr_err == SVN_ERR_WC_LOCKED
+ && related
+ && ! strcmp(dir_path, svn_wc_adm_access_path(related)))
+ {
+ /* Common caller error: Can't open a baton when there is one. */
+ svn_error_clear(err);
+
+ SVN_ERR(svn_wc__context_create_with_db(&wc_ctx, NULL,
+ svn_wc__adm_get_db(related),
+ pool));
+
+ SVN_ERR(svn_wc_locked2(NULL, &locked, wc_ctx, dir_abspath, pool));
+ SVN_ERR(svn_wc_entries_read(&entries, related, TRUE, pool));
+ }
+ else
+ {
+ const char *lockfile_path;
+ svn_node_kind_t kind;
+
+ /* ### Should svn_wc_adm_open3 be returning UPGRADE_REQUIRED? */
+ if (err->apr_err != SVN_ERR_WC_NOT_DIRECTORY)
+ return err;
+ svn_error_clear(err);
+ adm_access = NULL;
+ SVN_ERR(svn_wc__read_entries_old(&entries, dir_abspath, pool, pool));
+ lockfile_path = svn_dirent_join_many(pool, dir_path,
+ svn_wc_get_adm_dir(pool),
+ "lock", SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(lockfile_path, &kind, pool));
+ locked = (kind == svn_node_file);
+ }
+
+ for (hi = apr_hash_first(pool, entries); hi; hi = apr_hash_next(hi))
+ {
+ const char *key = apr_hash_this_key(hi);
+ const svn_wc_entry_t *entry = apr_hash_this_val(hi);
+
+ SVN_ERR_ASSERT(strcmp(key, entry->name) == 0);
+
+ printf("e = Entry()\n");
+ str_value("name", entry->name);
+ int_value("revision", entry->revision);
+ str_value("url", entry->url);
+ str_value("repos", entry->repos);
+ str_value("uuid", entry->uuid);
+ int_value("kind", entry->kind);
+ int_value("schedule", entry->schedule);
+ bool_value("copied", entry->copied);
+ bool_value("deleted", entry->deleted);
+ bool_value("absent", entry->absent);
+ bool_value("incomplete", entry->incomplete);
+ str_value("copyfrom_url", entry->copyfrom_url);
+ int_value("copyfrom_rev", entry->copyfrom_rev);
+ str_value("conflict_old", entry->conflict_old);
+ str_value("conflict_new", entry->conflict_new);
+ str_value("conflict_wrk", entry->conflict_wrk);
+ str_value("prejfile", entry->prejfile);
+ /* skip: text_time */
+ /* skip: prop_time */
+ /* skip: checksum */
+ int_value("cmt_rev", entry->cmt_rev);
+ /* skip: cmt_date */
+ str_value("cmt_author", entry->cmt_author);
+ str_value("lock_token", entry->lock_token);
+ str_value("lock_owner", entry->lock_owner);
+ str_value("lock_comment", entry->lock_comment);
+ /* skip: lock_creation_date */
+ /* skip: has_props */
+ /* skip: has_prop_mods */
+ /* skip: cachable_props */
+ /* skip: present_props */
+ str_value("changelist", entry->changelist);
+ /* skip: working_size */
+ /* skip: keep_local */
+ int_value("depth", entry->depth);
+ /* skip: tree_conflict_data */
+ bool_value("file_external", entry->file_external_path != NULL);
+ /* skip: file_external_peg_rev */
+ /* skip: file_external_rev */
+ bool_value("locked", locked && *entry->name == '\0');
+ printf("entries['%s'] = e\n", (const char *)key);
+ }
+
+ if (wc_ctx)
+ SVN_ERR(svn_wc_context_destroy(wc_ctx));
+
+ if (adm_access)
+ SVN_ERR(svn_wc_adm_close2(adm_access, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* baton for print_dir */
+struct directory_walk_baton
+{
+ svn_wc_context_t *wc_ctx;
+ const char *root_abspath;
+ const char *prefix_path;
+ svn_wc_adm_access_t *adm_access;
+};
+
+/* svn_wc__node_found_func_t implementation for directory_dump */
+static svn_error_t *
+print_dir(const char *local_abspath,
+ svn_node_kind_t kind,
+ void *walk_baton,
+ apr_pool_t *scratch_pool)
+{
+ struct directory_walk_baton *bt = walk_baton;
+ const char *path;
+
+ if (kind != svn_node_dir)
+ return SVN_NO_ERROR;
+
+ /* If LOCAL_ABSPATH a child of or equal to ROOT_ABSPATH, then display
+ a relative path starting with PREFIX_PATH. */
+ path = svn_dirent_skip_ancestor(bt->root_abspath, local_abspath);
+ if (path)
+ path = svn_dirent_join(bt->prefix_path, path, scratch_pool);
+ else
+ path = local_abspath;
+ printf("%s\n", svn_dirent_local_style(path, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+directory_dump_old(struct directory_walk_baton *bt,
+ const char *dir_abspath,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *entries;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(svn_wc__read_entries_old(&entries, dir_abspath,
+ scratch_pool, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, entries); hi; hi = apr_hash_next(hi))
+ {
+ const svn_wc_entry_t *entry = apr_hash_this_val(hi);
+ const char *local_abspath;
+
+ if (entry->deleted || entry->absent || entry->kind != svn_node_dir)
+ continue;
+
+ local_abspath = svn_dirent_join(dir_abspath, entry->name, scratch_pool);
+ if (strcmp(entry->name, SVN_WC_ENTRY_THIS_DIR))
+ SVN_ERR(directory_dump_old(bt, local_abspath, scratch_pool));
+ else
+ SVN_ERR(print_dir(local_abspath, entry->kind, bt, scratch_pool));
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Print all not-hidden subdirectories in the working copy, starting by path */
+static svn_error_t *
+directory_dump(const char *path,
+ apr_pool_t *scratch_pool)
+{
+ struct directory_walk_baton bt;
+ svn_error_t *err;
+
+ SVN_ERR(svn_wc_context_create(&bt.wc_ctx, NULL, scratch_pool, scratch_pool));
+ SVN_ERR(svn_dirent_get_absolute(&bt.root_abspath, path, scratch_pool));
+
+ bt.prefix_path = path;
+
+ err = svn_wc__internal_walk_children(bt.wc_ctx->db, bt.root_abspath, FALSE,
+ NULL, print_dir, &bt, svn_depth_infinity,
+ NULL, NULL, scratch_pool);
+ if (err)
+ {
+ const char *dir_abspath;
+
+ if (err->apr_err != SVN_ERR_WC_UPGRADE_REQUIRED)
+ return err;
+ svn_error_clear(err);
+ SVN_ERR(svn_dirent_get_absolute(&dir_abspath, path, scratch_pool));
+ SVN_ERR(directory_dump_old(&bt, dir_abspath, scratch_pool));
+ }
+
+ return svn_error_trace(svn_wc_context_destroy(bt.wc_ctx));
+}
+
+static svn_error_t *
+tree_dump_dir(const char *local_abspath,
+ svn_node_kind_t kind,
+ void *walk_baton,
+ apr_pool_t *scratch_pool)
+{
+ struct directory_walk_baton *bt = walk_baton;
+ const char *path;
+
+ if (kind != svn_node_dir)
+ return SVN_NO_ERROR;
+
+ if (strcmp(local_abspath, bt->root_abspath) != 0)
+ {
+ svn_boolean_t is_wcroot;
+ SVN_ERR(svn_wc__db_is_wcroot(&is_wcroot, bt->wc_ctx->db,
+ local_abspath, scratch_pool));
+
+ if (is_wcroot)
+ return SVN_NO_ERROR; /* Report the stub, but not the data */
+ }
+
+ /* If LOCAL_ABSPATH a child of or equal to ROOT_ABSPATH, then display
+ a relative path starting with PREFIX_PATH. */
+ path = svn_dirent_skip_ancestor(bt->root_abspath, local_abspath);
+ if (path)
+ path = svn_dirent_join(bt->prefix_path, path, scratch_pool);
+ else
+ path = local_abspath;
+
+ printf("entries = {}\n");
+ SVN_ERR(entries_dump(path, bt->adm_access, scratch_pool));
+
+ printf("dirs['%s'] = entries\n", path);
+ return SVN_NO_ERROR;
+
+}
+
+static svn_error_t *
+tree_dump(const char *path,
+ apr_pool_t *scratch_pool)
+{
+ struct directory_walk_baton bt;
+ svn_sqlite__db_t *sdb;
+ svn_wc__db_t *db;
+
+ bt.prefix_path = path;
+
+ /* Obtain an access baton to allow re-using the same wc_db for all access */
+ SVN_ERR(svn_wc_adm_open3(&bt.adm_access, NULL, path, FALSE, 0, NULL, NULL,
+ scratch_pool));
+
+ db = svn_wc__adm_get_db(bt.adm_access);
+
+ SVN_ERR(svn_wc__context_create_with_db(&bt.wc_ctx, NULL, db, scratch_pool));
+
+ SVN_ERR(svn_dirent_get_absolute(&bt.root_abspath, path, scratch_pool));
+
+ /* And now get us a transaction on the database to avoid obtaining and
+ releasing locks all the time */
+ SVN_ERR(svn_wc__db_temp_borrow_sdb(&sdb, bt.wc_ctx->db, bt.root_abspath,
+ scratch_pool));
+
+ SVN_SQLITE__WITH_LOCK(
+ svn_wc__internal_walk_children(db, bt.root_abspath, FALSE,
+ NULL, tree_dump_dir, &bt,
+ svn_depth_infinity,
+ NULL, NULL, scratch_pool),
+ sdb);
+
+ /* And close everything we've opened */
+ SVN_ERR(svn_wc_context_destroy(bt.wc_ctx));
+ SVN_ERR(svn_wc_adm_close2(bt.adm_access, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+ const char *path;
+ const char *cmd;
+
+ if (argc < 2 || argc > 4)
+ {
+ fprintf(stderr, "USAGE: entries-dump [--entries|--subdirs|--tree-dump] DIR_PATH\n");
+ exit(1);
+ }
+
+ if (svn_cmdline_init("entries-dump", stderr) != EXIT_SUCCESS)
+ {
+ return EXIT_FAILURE;
+ }
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ path = svn_dirent_internal_style(argv[argc-1], pool);
+
+ if (argc > 2)
+ cmd = argv[1];
+ else
+ cmd = NULL;
+
+ if (!cmd || !strcmp(cmd, "--entries"))
+ err = entries_dump(path, NULL, pool);
+ else if (!strcmp(cmd, "--subdirs"))
+ err = directory_dump(path, pool);
+ else if (!strcmp(cmd, "--tree-dump"))
+ err = tree_dump(path, pool);
+ else
+ err = svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "Invalid command '%s'",
+ cmd);
+ if (err)
+ {
+ svn_handle_error2(err, stderr, FALSE, "entries-dump: ");
+ svn_error_clear(err);
+ exit_code = EXIT_FAILURE;
+ }
+
+ /* Clean up, and get outta here */
+ svn_pool_destroy(pool);
+ apr_terminate();
+
+ return exit_code;
+}
diff --git a/subversion/tests/cmdline/entries_tests.py b/subversion/tests/cmdline/entries_tests.py
new file mode 100755
index 0000000..0fcdc1a
--- /dev/null
+++ b/subversion/tests/cmdline/entries_tests.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+#
+# entries_tests.py: test the old entries API using entries-dump
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+#
+# This test series is to validate the old entries API using the entries-dump
+# tool to see what the API reports. In particular, this test is designed to
+# try and exercise all "extraordinary" code paths in the read_entries()
+# function in libsvn_wc/entries.c. Much of that function is exercised by
+# the regular test suite and its secondary "status" via entries-dump. This
+# test tries to pick up the straggly little edge cases.
+#
+
+import os, logging
+
+logger = logging.getLogger()
+
+import svntest
+
+Item = svntest.wc.StateItem
+
+
+SCHEDULE_NORMAL = 0
+SCHEDULE_ADD = 1
+SCHEDULE_DELETE = 2
+SCHEDULE_REPLACE = 3
+
+
+def validate(entry, **kw):
+ for key, value in kw.items():
+ if getattr(entry, key) != value:
+ logger.warn("Entry '%s' has an incorrect value for .%s", entry.name, key)
+ logger.warn(" Expected: %s", value)
+ logger.warn(" Actual: %s", getattr(entry, key))
+ raise svntest.Failure
+
+
+def check_names(entries, *names):
+ if entries is None:
+ logger.warn('entries-dump probably exited with a failure.')
+ raise svntest.Failure
+ have = set(entries.keys())
+ want = set(names)
+ missing = want - have
+ if missing:
+ logger.warn("Entry name(s) not found: %s",
+ ', '.join("'%s'" % name for name in missing))
+ raise svntest.Failure
+
+
+def basic_entries(sbox):
+ "basic entries behavior"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
+ beta_path = os.path.join(wc_dir, 'A', 'B', 'E', 'beta')
+ added_path = os.path.join(wc_dir, 'A', 'B', 'E', 'added')
+ G_path = os.path.join(wc_dir, 'A', 'D', 'G')
+ G2_path = os.path.join(wc_dir, 'A', 'D', 'G2')
+ iota_path = os.path.join(wc_dir, 'iota')
+ iota2_path = os.path.join(wc_dir, 'A', 'B', 'E', 'iota2')
+
+ # Remove 'alpha'. When it is committed, it will be marked DELETED.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path)
+
+ # Tweak 'beta' in order to bump its revision to ensure the replacement
+ # gets the new revision (2), not the value from the parent (1).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ beta_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E/beta', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status,
+ [],
+ alpha_path, beta_path)
+
+ # bump 'G' and iota another revision (3) for later testing
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ G_path, iota_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(verb='Sending'),
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/D/G', 'iota', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status,
+ [],
+ G_path, iota_path)
+
+ # Add a file over the DELETED 'alpha'. It should be schedule-add.
+ open(alpha_path, 'w').write('New alpha contents\n')
+
+ # Delete 'beta', then add a file over it. Should be schedule-replace.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', beta_path)
+ open(beta_path, 'w').write('New beta contents\n')
+
+ # Plain old add. Should have revision == 0.
+ open(added_path, 'w').write('Added file contents\n')
+
+ svntest.actions.run_and_verify_svn(None, [], 'add',
+ alpha_path, beta_path, added_path)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ iota_path, iota2_path)
+
+ entries = svntest.main.run_entriesdump(os.path.join(wc_dir, 'A', 'B', 'E'))
+ check_names(entries, 'alpha', 'beta', 'added', 'iota2')
+
+ # plain add should be rev=0. over a DELETED, should be SCHEDULE_ADD
+ validate(entries['alpha'], schedule=SCHEDULE_ADD, revision=0, copied=False)
+
+ # should pick up the BASE node's revision
+ validate(entries['beta'], schedule=SCHEDULE_REPLACE, revision=2,
+ copied=False)
+
+ # plain add should be rev=0
+ validate(entries['added'], schedule=SCHEDULE_ADD, revision=0, copied=False)
+
+ # copyfrom_rev is (3), but we inherit the rev from the parent (1)
+ validate(entries['iota2'], schedule=SCHEDULE_ADD, revision=1, copied=True,
+ copyfrom_rev=3)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, G2_path)
+
+ entries = svntest.main.run_entriesdump(G2_path)
+ check_names(entries, 'pi', 'rho', 'tau')
+
+ # added, but revision should match the copyfrom_rev (directories don't
+ # inherit a revision like iota2 did above)
+ validate(entries[''], schedule=SCHEDULE_ADD, copied=True, revision=3)
+
+ # children should be SCHEDULE_NORMAL. still rev=1 cuz of mixed-rev source.
+ validate(entries['pi'], schedule=SCHEDULE_NORMAL, copied=True, revision=1)
+
+
+def obstructed_entries(sbox):
+ "validate entries when obstructions exist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = os.path.join(wc_dir, 'A', 'D')
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+
+ # blast a directory. its revision should become SVN_INVALID_REVNUM.
+ entries = svntest.main.run_entriesdump(D_path)
+ check_names(entries, 'H')
+ validate(entries['H'], revision=1)
+
+ svntest.main.safe_rmtree(H_path)
+
+ entries = svntest.main.run_entriesdump(D_path)
+ check_names(entries, 'H')
+
+ # Data is not missing in single-db
+ validate(entries['H'], revision=1)
+
+ ### need to get svn_wc__db_read_info() to generate obstructed_add
+
+
+def deletion_details(sbox):
+ "various details about deleted nodes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = os.path.join(wc_dir, 'iota')
+ D_path = os.path.join(wc_dir, 'A', 'D')
+ D2_path = os.path.join(wc_dir, 'A', 'D2')
+ D2_G_path = os.path.join(wc_dir, 'A', 'D2', 'G')
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+
+ entries = svntest.main.run_entriesdump(wc_dir)
+ check_names(entries, 'iota')
+ iota = entries['iota']
+
+ # blast iota, then verify the now-deleted entry still contains much of
+ # the same information.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path)
+ entries = svntest.main.run_entriesdump(wc_dir)
+ check_names(entries, 'iota')
+ validate(entries['iota'], revision=iota.revision,
+ cmt_rev=iota.cmt_rev, cmt_author=iota.cmt_author)
+
+ # even deleted nodes have a URL
+ validate(entries['iota'], url='%s/iota' % sbox.repo_url)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', D_path, D2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'rm', D2_G_path)
+
+ entries = svntest.main.run_entriesdump(D2_path)
+ check_names(entries, 'gamma', 'G')
+
+ # copied nodes have URLs
+ validate(entries['gamma'], url='%s/A/D2/gamma' % sbox.repo_url,
+ copied=True, schedule=SCHEDULE_NORMAL)
+
+ entries = svntest.main.run_entriesdump(D2_G_path)
+ check_names(entries, 'pi')
+
+ # oh, and this sucker has a URL, too
+ validate(entries['pi'], url='%s/A/D2/G/pi' % sbox.repo_url,
+ copied=True, schedule=SCHEDULE_DELETE)
+
+ ### hmm. somehow, subtrees can be *added* over a *deleted* subtree.
+ ### maybe this can happen via 'svn merge' ? ... the operations below
+ ### will fail because E_path is scheduled for deletion, disallowing
+ ### any new node to sit on top of it. (tho it *should* allow it...)
+
+ ### for now... this test case is done. just return
+ return
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', E_path)
+ svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, E_path)
+
+ entries = svntest.main.run_entriesdump(E_path)
+ check_names(entries, 'chi', 'omega', 'psi', 'alpha', 'beta')
+
+ validate(entries['alpha'], schedule=SCHEDULE_DELETE)
+ validate(entries['chi'], schedule=SCHEDULE_NORMAL, copied=True)
+
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_entries,
+ obstructed_entries,
+ deletion_details,
+ ]
+
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
diff --git a/subversion/tests/cmdline/export_tests.py b/subversion/tests/cmdline/export_tests.py
new file mode 100755
index 0000000..a8ac7b8
--- /dev/null
+++ b/subversion/tests/cmdline/export_tests.py
@@ -0,0 +1,1168 @@
+#!/usr/bin/env python
+#
+# export_tests.py: testing export cases.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import tempfile
+
+# Our testing module
+import svntest
+
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def export_empty_directory(sbox):
+ "export an empty directory"
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ export_target = sbox.wc_dir
+ empty_dir_url = sbox.repo_url + '/A/C'
+ svntest.main.run_svn(None, 'export', empty_dir_url, export_target)
+ if not os.path.exists(export_target):
+ raise svntest.Failure
+
+def export_greek_tree(sbox):
+ "export the greek tree"
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ export_target = sbox.wc_dir
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = sbox.wc_dir
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ svntest.main.greek_state.copy())
+
+def export_nonexistent_url(sbox):
+ "attempt to export a nonexistent URL"
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ export_target = os.path.join(sbox.wc_dir, 'nonexistent')
+ nonexistent_url = sbox.repo_url + "/nonexistent"
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'export', nonexistent_url, export_target)
+
+def export_working_copy(sbox):
+ "export working copy"
+ sbox.build(read_only = True)
+
+ export_target = sbox.add_wc_path('export')
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ svntest.main.greek_state.copy())
+
+def export_working_copy_with_mods(sbox):
+ "export working copy with mods"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ # Make a couple of local mods to files
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ kappa_path = os.path.join(wc_dir, 'kappa')
+ gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ svntest.main.file_append(kappa_path, "This is the file 'kappa'.")
+ svntest.main.run_svn(None, 'add', kappa_path)
+ svntest.main.run_svn(None, 'rm', E_path, gamma_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+ expected_disk.tweak('A/D/G/rho',
+ contents=expected_disk.desc['A/D/G/rho'].contents
+ + 'new appended text for rho')
+ expected_disk.add({'kappa' : Item("This is the file 'kappa'.")})
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E', 'A/D/gamma')
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ #'A/B/E' : Item(status='A '), # Used to be reported as added
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ 'kappa' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk)
+
+def export_over_existing_dir(sbox):
+ "export over existing dir"
+ sbox.build(read_only = True)
+
+ export_target = sbox.add_wc_path('export')
+
+ # Create the target directory which should cause
+ # the export operation to fail.
+ os.mkdir(export_target)
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'export', sbox.wc_dir, export_target)
+
+ # As an extra precaution, make sure export_target doesn't have
+ # anything in it.
+ if len(os.listdir(export_target)):
+ raise svntest.Failure("Unexpected files/directories in " + export_target)
+
+def export_keyword_translation(sbox):
+ "export with keyword translation"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Add a keyword to A/mu and set the svn:keywords property
+ # appropriately to make sure it's translated during
+ # the export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, '$LastChangedRevision$')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords',
+ 'LastChangedRevision', mu_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', 'Added keyword to mu', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents +
+ '$LastChangedRevision: 2 $')
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk)
+
+def export_eol_translation(sbox):
+ "export with eol translation"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Set svn:eol-style to 'CR' to see if it's applied correctly in the
+ # export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.run_svn(None, 'ps', 'svn:eol-style',
+ 'CR', mu_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', 'Added eol-style prop to mu', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ new_contents = expected_disk.desc['A/mu'].contents.replace("\n", "\r")
+ expected_disk.tweak('A/mu', contents=new_contents)
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export2(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ keep_eol_style=True)
+
+def export_working_copy_with_keyword_translation(sbox):
+ "export working copy with keyword translation"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ # Add a keyword to A/mu and set the svn:keywords property
+ # appropriately to make sure it's translated during
+ # the export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, '$LastChangedRevision$')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords',
+ 'LastChangedRevision', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents +
+ '$LastChangedRevision: 1M $')
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export(wc_dir,
+ export_target,
+ expected_output,
+ expected_disk)
+
+def export_working_copy_with_property_mods(sbox):
+ "export working copy with property mods"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ # Make a local property mod to A/mu
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.run_svn(None, 'ps', 'svn:eol-style',
+ 'CR', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ new_contents = expected_disk.desc['A/mu'].contents.replace("\n", "\r")
+ expected_disk.tweak('A/mu', contents=new_contents)
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export2(wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ keep_eol_style=True)
+
+@XFail()
+@Issue(3798)
+def export_working_copy_at_base_revision(sbox):
+ "export working copy at base revision"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ kappa_path = os.path.join(wc_dir, 'kappa')
+ K_path = os.path.join(wc_dir, 'K')
+ gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+ phi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'phi')
+ chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi')
+
+ # Make some local modifications: modify mu and C, add kappa and K, delete
+ # gamma and E, and replace rho and H.
+ # These modifications should *not* get exported at the base revision.
+ svntest.main.file_append(mu_path, 'Appended text')
+ svntest.main.run_svn(None, 'propset', 'p', 'v', mu_path, C_path)
+ svntest.main.file_append(kappa_path, "This is the file 'kappa'.")
+ svntest.main.run_svn(None, 'add', kappa_path)
+ svntest.main.run_svn(None, 'mkdir', K_path)
+ svntest.main.run_svn(None, 'rm', E_path, gamma_path)
+ svntest.main.run_svn(None, 'rm', rho_path)
+ svntest.main.file_append(rho_path, "Replacement file 'rho'.")
+ svntest.main.run_svn(None, 'add', rho_path)
+ svntest.main.run_svn(None, 'rm', H_path)
+ svntest.main.run_svn(None, 'mkdir', H_path)
+ svntest.main.file_append(phi_path, "This is the file 'phi'.")
+ svntest.main.run_svn(None, 'add', phi_path)
+ svntest.main.file_append(chi_path, "Replacement file 'chi'.")
+ svntest.main.run_svn(None, 'add', chi_path)
+
+ # Note that we don't tweak the expected disk tree at all,
+ # since the modifications should not be present.
+ expected_disk = svntest.main.greek_state.copy()
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'K' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export(wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ '-rBASE')
+
+def export_native_eol_option(sbox):
+ "export with --native-eol"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Append a '\n' to A/mu and set svn:eol-style to 'native'
+ # to see if it's applied correctly in the export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.run_svn(None, 'ps', 'svn:eol-style',
+ 'native', mu_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', 'Added eol-style prop to mu', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ new_contents = expected_disk.desc['A/mu'].contents.replace("\n", "\r")
+ expected_disk.tweak('A/mu', contents=new_contents)
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export2(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ True,
+ '--native-eol','CR')
+
+def export_nonexistent_file(sbox):
+ "export nonexistent file"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ kappa_path = os.path.join(wc_dir, 'kappa')
+
+ export_target = sbox.add_wc_path('export')
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'export', kappa_path, export_target)
+
+def export_unversioned_file(sbox):
+ "export unversioned file"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ kappa_path = os.path.join(wc_dir, 'kappa')
+ svntest.main.file_append(kappa_path, "This is the file 'kappa'.")
+
+ export_target = sbox.add_wc_path('export')
+
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'export', kappa_path, export_target)
+
+def export_with_state_deleted(sbox):
+ "export with state deleted=true"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # state deleted=true caused export to crash
+ alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ export_target = sbox.add_wc_path('export')
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha')
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk)
+
+def export_creates_intermediate_folders(sbox):
+ "export and create some intermediate folders"
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ export_target = os.path.join(sbox.wc_dir, 'a', 'b', 'c')
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ svntest.main.greek_state.copy())
+
+def export_HEADplus1_fails(sbox):
+ "export -r {HEAD+1} fails"
+
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.actions.run_and_verify_svn(None, '.*No such revision.*',
+ 'export', sbox.repo_url, sbox.wc_dir,
+ '-r', 38956)
+
+def export_url_to_explicit_cwd(sbox):
+ "export a single file to '.', via url"
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ expected_output = svntest.wc.State('', {
+ 'iota': Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota': Item(contents="This is the file 'iota'.\n"),
+ })
+
+ os.mkdir(sbox.wc_dir)
+ os.chdir(sbox.wc_dir)
+ svntest.actions.run_and_verify_export(sbox.repo_url + '/iota',
+ '.', expected_output,
+ expected_disk)
+
+def export_file_to_explicit_cwd(sbox):
+ "export a single file to '.', via wc"
+ sbox.build(create_wc = True, read_only = True)
+
+ iota_path = os.path.abspath(os.path.join(sbox.wc_dir, 'iota'))
+
+ tmpdir = sbox.get_tempname('file-exports')
+ expected_output = svntest.wc.State('', {
+ 'iota': Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'iota': Item(contents="This is the file 'iota'.\n"),
+ })
+
+ os.mkdir(tmpdir)
+ os.chdir(tmpdir)
+ svntest.actions.run_and_verify_export(iota_path,
+ '.', expected_output,
+ expected_disk)
+
+@Issue(3799)
+def export_file_overwrite_fails(sbox):
+ "exporting a file refuses to silently overwrite"
+ sbox.build(create_wc = True, read_only = True)
+
+ iota_path = os.path.abspath(os.path.join(sbox.wc_dir, 'iota'))
+ iota_url = sbox.repo_url + '/iota'
+ not_iota_contents = "This obstructs 'iota'.\n"
+
+ tmpdir = sbox.get_tempname('file-overwrites')
+ os.mkdir(tmpdir)
+
+ # Run it for source local
+ open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents)
+ svntest.actions.run_and_verify_svn([], '.*exist.*',
+ 'export', iota_path, tmpdir)
+
+ # Verify it failed
+ expected_disk = svntest.wc.State('', {
+ 'iota': Item(contents=not_iota_contents),
+ })
+ svntest.actions.verify_disk(tmpdir, expected_disk)
+
+ # Run it for source URL
+ open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents)
+ svntest.actions.run_and_verify_svn([], '.*exist.*',
+ 'export', iota_url, tmpdir)
+
+ # Verify it failed
+ expected_disk = svntest.wc.State('', {
+ 'iota': Item(contents=not_iota_contents),
+ })
+ svntest.actions.verify_disk(tmpdir, expected_disk)
+
+def export_ignoring_keyword_translation(sbox):
+ "export ignoring keyword translation"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Add a keyword to A/mu and set the svn:keywords property
+ # appropriately to make sure it's not translated during
+ # the export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, '$LastChangedRevision$')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords',
+ 'LastChangedRevision', mu_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', 'Added keyword to mu', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents +
+ '$LastChangedRevision$')
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ "--ignore-keywords")
+
+def export_working_copy_ignoring_keyword_translation(sbox):
+ "export working copy ignoring keyword translation"
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ # Add a keyword to A/mu and set the svn:keywords property
+ # appropriately to make sure it's not translated during
+ # the export operation
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, '$LastChangedRevision$')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords',
+ 'LastChangedRevision', mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents +
+ '$LastChangedRevision$')
+
+ export_target = sbox.add_wc_path('export')
+
+ expected_output = svntest.wc.State(export_target, {
+ 'A' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_export(wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ "--ignore-keywords")
+
+# This is test for issue #3683 - 'Escape unsafe charaters in a URL during
+# export'
+@Issue(3683)
+def export_with_url_unsafe_characters(sbox):
+ "export file with URL unsafe characters"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3683 ##
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Define the paths
+ url_unsafe_path = os.path.join(wc_dir, 'A', 'test- @#$&.txt')
+ url_unsafe_path_url = sbox.repo_url + '/A/test- @#$&.txt@'
+ export_target = os.path.join(wc_dir, 'test- @#$&.txt')
+
+ # Create the file with special name and commit it.
+ svntest.main.file_write(url_unsafe_path, 'This is URL unsafe path file.')
+ svntest.main.run_svn(None, 'add', url_unsafe_path + '@')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg',
+ '--quiet', wc_dir)
+
+ # Export the file and verify it.
+ svntest.actions.run_and_verify_svn(None, [], 'export',
+ url_unsafe_path_url, export_target + '@')
+
+ if not os.path.exists(export_target):
+ raise svntest.Failure("export did not fetch file with URL unsafe path")
+
+@Issue(3800)
+def export_working_copy_with_depths(sbox):
+ "export working copy with different depths"
+ sbox.build(read_only = True)
+
+ expected_disk = svntest.wc.State('', {
+ 'A': Item(),
+ 'iota': Item(contents="This is the file 'iota'.\n"),
+ })
+ export_target = sbox.add_wc_path('immediates')
+ expected_output = svntest.wc.State(export_target, {
+ 'iota' : Item(status='A '),
+ 'A' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ '--depth=immediates')
+
+ expected_disk.remove('A')
+ export_target = sbox.add_wc_path('files')
+ expected_output = svntest.wc.State(export_target, {
+ 'iota' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ '--depth=files')
+
+ expected_disk.remove('iota')
+ expected_output = svntest.wc.State(export_target, {
+ })
+ export_target = sbox.add_wc_path('empty')
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ '--depth=empty')
+
+def export_externals_with_native_eol(sbox):
+ "export externals with eol translation"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Set svn:eol-style to 'native' to see if it's applied correctly to
+ # externals in the export operation
+ alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
+ svntest.main.run_svn(None, 'ps', 'svn:eol-style', 'native', alpha_path)
+ svntest.main.run_svn(None, 'ci',
+ '-m', 'Added eol-style prop to alpha', alpha_path)
+
+ # Set 'svn:externals' property in 'A/C' to 'A/B/E/alpha'(file external),
+ # 'A/B/E'(directory external) & commit the property
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ externals_prop = """^/A/B/E/alpha exfile_alpha
+ ^/A/B/E exdir_E"""
+
+ tmp_f = sbox.get_tempname('props')
+ svntest.main.file_append(tmp_f, externals_prop)
+ svntest.main.run_svn(None, 'ps', '-F', tmp_f, 'svn:externals', C_path)
+ svntest.main.run_svn(None,'ci', '-m', 'log msg', '--quiet', C_path)
+
+
+ # Update the working copy to receive all changes(file external and
+ # directroy external changes) from repository
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # After export, expected_disk will have all those present in standard
+ # greek tree and new externals we added above.
+ # Update the expected disk tree to include all those externals.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\n"),
+ 'A/C/exdir_E' : Item(),
+ 'A/C/exdir_E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'A/C/exdir_E/beta' : Item("This is the file 'beta'.\n")
+ })
+
+ # We are exporting with '--native-eol CR' option.
+ # So change the contents of files under *expected_disk* tree
+ # which have svn:eol-style property set to 'native' to verify
+ # with the exported tree.
+ # Here A/B/E/alpha and its external manifestations A/C/exfile_alpha
+ # and A/C/exdir_E/alpha needs a tweak.
+ new_contents = expected_disk.desc['A/C/exfile_alpha'].contents.replace("\n",
+ "\r")
+ expected_disk.tweak('A/C/exfile_alpha', 'A/B/E/alpha','A/C/exdir_E/alpha',
+ contents=new_contents)
+
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\r"),
+ 'A/C/exdir_E' : Item(),
+ 'A/C/exdir_E/alpha' : Item("This is the file 'alpha'.\r"),
+ 'A/C/exdir_E/beta' : Item("This is the file 'beta'.\n")
+ })
+
+ # Export the repository with '--native-eol CR' option
+ export_target = sbox.add_wc_path('export')
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+ svntest.actions.run_and_verify_export2(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ True,
+ '--native-eol', 'CR')
+
+@Issue(3727)
+def export_to_current_dir(sbox):
+ "export to current dir"
+ # Issue 3727: Forced export in current dir creates unexpected subdir.
+ sbox.build(create_wc = False, read_only = True)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ os.mkdir(sbox.wc_dir)
+
+ orig_dir = os.getcwd()
+ os.chdir(sbox.wc_dir)
+
+ export_url = sbox.repo_url + '/A/B/E'
+ export_target = '.'
+ expected_output = svntest.wc.State('', {
+ '.' : Item(status='A '),
+ 'alpha' : Item(status='A '),
+ 'beta' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'alpha' : Item("This is the file 'alpha'.\n"),
+ 'beta' : Item("This is the file 'beta'.\n"),
+ })
+ svntest.actions.run_and_verify_export(export_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ '--force')
+
+ os.chdir(orig_dir)
+
+def export_file_overwrite_with_force(sbox):
+ "exporting a file with force option"
+ sbox.build(create_wc = True, read_only = True)
+
+ iota_path = os.path.abspath(os.path.join(sbox.wc_dir, 'iota'))
+ iota_url = sbox.repo_url + '/iota'
+ not_iota_contents = "This obstructs 'iota'.\n"
+ iota_contents = "This is the file 'iota'.\n"
+
+ tmpdir = sbox.get_tempname('file-overwrites')
+ os.mkdir(tmpdir)
+
+ expected_disk = svntest.wc.State('', {
+ 'iota': Item(contents=iota_contents),
+ })
+
+ # Run it for WC export
+ open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput,
+ [], 'export', '--force',
+ iota_path, tmpdir)
+ svntest.actions.verify_disk(tmpdir, expected_disk)
+
+ # Run it for URL export
+ open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput,
+ [], 'export', '--force',
+ iota_url, tmpdir)
+ svntest.actions.verify_disk(tmpdir, expected_disk)
+
+def export_custom_keywords(sbox):
+ """export with custom keywords"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # 248=SVN_KEYWORD_MAX_LEN-7 where 7 is '$', 'Q', 'q', ':', ' ', ' ', '$'
+ alpha_content = ('[$Qq: %s $ $Pp: %s $]\n'
+ % (sbox.repo_url[:248],
+ (sbox.repo_url + '/A/B/E/alpha')[:248]))
+
+ sbox.simple_append('A/B/E/alpha', '[$Qq$ $Pp$]\n', truncate=True)
+ sbox.simple_propset('svn:keywords', 'Qq=%R Pp=%u', 'A/B/E/alpha')
+ sbox.simple_commit()
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha', contents=alpha_content)
+ svntest.actions.verify_disk(sbox.wc_dir, expected_disk)
+
+ # Export a tree
+ export_target = sbox.add_wc_path('export')
+ expected_output = svntest.wc.State(export_target, {
+ '' : Item(status='A '),
+ 'alpha' : Item(status='A '),
+ 'beta' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'alpha': Item(contents=alpha_content),
+ 'beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ svntest.actions.run_and_verify_export(sbox.repo_url + '/A/B/E',
+ export_target,
+ expected_output,
+ expected_disk)
+
+ # Export a file
+ export_file = os.path.join(export_target, 'alpha')
+ os.remove(export_file)
+ expected_output = ['A %s\n' % export_file, 'Export complete.\n']
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'export', '--force',
+ sbox.repo_url + '/A/B/E/alpha',
+ export_target)
+
+ if open(export_file).read() != ''.join(alpha_content):
+ raise svntest.Failure("wrong keyword expansion")
+
+@Issue(4427)
+def export_file_external(sbox):
+ "export file external from WC and URL"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Set 'svn:externals' property in 'A/C' to 'A/B/E/alpha'(file external),
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ externals_prop = "^/A/B/E/alpha exfile_alpha"
+
+ tmp_f = sbox.get_tempname('prop')
+ svntest.main.file_append(tmp_f, externals_prop)
+ svntest.main.run_svn(None, 'ps', '-F', tmp_f, 'svn:externals', C_path)
+ svntest.main.run_svn(None,'ci', '-m', 'log msg', '--quiet', C_path)
+
+ # Update the working copy to receive file external
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Update the expected disk tree to include the external.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\n"),
+ })
+
+ # Export from URL
+ export_target = sbox.add_wc_path('export_url')
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\r"),
+ })
+ expected_output.wc_dir = export_target
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk)
+
+ # Export from WC
+ export_target = sbox.add_wc_path('export_wc')
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\r"),
+ })
+ expected_output.wc_dir = export_target
+ expected_output.desc['A'] = Item()
+ expected_output.tweak(contents=None, status='A ')
+ svntest.actions.run_and_verify_export(wc_dir,
+ export_target,
+ expected_output,
+ expected_disk)
+
+@Issue(4427)
+def export_file_externals2(sbox):
+ "exporting file externals"
+
+ sbox.build()
+ sbox.simple_mkdir('DIR', 'DIR2')
+
+ sbox.simple_propset('svn:externals', '^/iota file', 'DIR')
+ sbox.simple_propset('svn:externals', '^/DIR TheDir', 'DIR2')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ tmp = sbox.add_wc_path('tmp')
+ os.mkdir(tmp)
+
+ expected_output = svntest.wc.State(tmp, {
+ 'file' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'file': Item(contents="This is the file 'iota'.\n")
+ })
+ # Fails in 1.8.8 and r1575909.
+ # Direct export of file external was just skipped
+ svntest.actions.run_and_verify_export(sbox.ospath('DIR/file'),
+ tmp,
+ expected_output,
+ expected_disk)
+
+ expected_output = svntest.wc.State(tmp, {
+ 'DIR/file' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'file': Item(contents="This is the file 'iota'.\n")
+ })
+ # Fails in 1.8.8 (doesn't export file), passes in r1575909
+ svntest.actions.run_and_verify_export(sbox.ospath('DIR'),
+ os.path.join(tmp, 'DIR'),
+ expected_output,
+ expected_disk)
+
+ expected_output = svntest.wc.State(tmp, {
+ 'DIR2/TheDir/file' : Item(status='A '),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'TheDir' : Item(),
+ 'TheDir/file' : Item(contents="This is the file 'iota'.\n")
+ })
+ # Fails in 1.8.8 (doesn't export anything),
+ # Fails in r1575909 (exports file twice; once as file; once as external)
+ svntest.actions.run_and_verify_export(sbox.ospath('DIR2'),
+ os.path.join(tmp, 'DIR2'),
+ expected_output,
+ expected_disk)
+
+def export_revision_with_root_relative_external(sbox):
+ "export a revision with root-relative external"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # Set 'svn:externals' property in 'A/C' to 'A/B/E/alpha'(file external),
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ externals_prop = "^/A/B/E/alpha exfile_alpha"
+
+ tmp_f = sbox.get_tempname('prop')
+ svntest.main.file_append(tmp_f, externals_prop)
+ svntest.main.run_svn(None, 'ps', '-F', tmp_f, 'svn:externals', C_path)
+ svntest.main.run_svn(None,'ci', '-m', 'log msg', '--quiet', C_path)
+
+ # Update the working copy to receive file external
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Update the expected disk tree to include the external.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\n"),
+ })
+
+ # Update the expected output to include the external.
+ expected_output = svntest.main.greek_state.copy()
+ expected_output.add({
+ 'A/C/exfile_alpha' : Item("This is the file 'alpha'.\r"),
+ })
+ expected_output.desc[''] = Item()
+ expected_output.tweak(contents=None, status='A ')
+
+ # Export revision 2 from URL
+ export_target = sbox.add_wc_path('export_url')
+ expected_output.wc_dir = export_target
+ svntest.actions.run_and_verify_export(sbox.repo_url,
+ export_target,
+ expected_output,
+ expected_disk,
+ '-r', 2)
+
+ # Export revision 2 from WC
+ export_target = sbox.add_wc_path('export_wc')
+ expected_output.wc_dir = export_target
+ svntest.actions.run_and_verify_export(sbox.wc_dir,
+ export_target,
+ expected_output,
+ expected_disk,
+ '-r', 2)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ export_empty_directory,
+ export_greek_tree,
+ export_nonexistent_url,
+ export_working_copy,
+ export_working_copy_with_mods,
+ export_over_existing_dir,
+ export_keyword_translation,
+ export_eol_translation,
+ export_working_copy_with_keyword_translation,
+ export_working_copy_with_property_mods,
+ export_working_copy_at_base_revision,
+ export_native_eol_option,
+ export_nonexistent_file,
+ export_unversioned_file,
+ export_with_state_deleted,
+ export_creates_intermediate_folders,
+ export_HEADplus1_fails,
+ export_url_to_explicit_cwd,
+ export_file_to_explicit_cwd,
+ export_file_overwrite_fails,
+ export_ignoring_keyword_translation,
+ export_working_copy_ignoring_keyword_translation,
+ export_with_url_unsafe_characters,
+ export_working_copy_with_depths,
+ export_externals_with_native_eol,
+ export_to_current_dir,
+ export_file_overwrite_with_force,
+ export_custom_keywords,
+ export_file_external,
+ export_file_externals2,
+ export_revision_with_root_relative_external,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/externals_tests.py b/subversion/tests/cmdline/externals_tests.py
new file mode 100755
index 0000000..c0ac029
--- /dev/null
+++ b/subversion/tests/cmdline/externals_tests.py
@@ -0,0 +1,4524 @@
+#!/usr/bin/env python
+#
+# module_tests.py: testing modules / external sources.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys
+import os
+import re
+import shutil
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+### todo: it's inefficient to keep calling externals_test_setup() for
+### every test. It's slow. But it's very safe -- we're guaranteed to
+### have a clean repository, built from the latest Subversion, with
+### the svn:externals properties preset in a known way. Right now I
+### can't think of any other way to achieve that guarantee, so the
+### result is that each individual test is slow.
+
+def externals_test_setup(sbox):
+ """Set up a repository in which some directories have the externals property,
+ and set up another repository, referred to by some of those externals.
+ Both repositories contain greek trees with five revisions worth of
+ random changes, then in the sixth revision the first repository --
+ and only the first -- has some externals properties set. ### Later,
+ test putting externals on the second repository. ###
+
+ The arrangement of the externals in the first repository is:
+
+ /A/B/ ==> ^/A/D/gamma gamma
+ /A/C/ ==> exdir_G <scheme>:///<other_repos>/A/D/G
+ ../../../<other_repos_basename>/A/D/H@1 exdir_H
+
+ /A/D/ ==> ^/../<other_repos_basename>/A exdir_A
+ //<other_repos>/A/D/G/ exdir_A/G/
+ exdir_A/H -r 1 <scheme>:///<other_repos>/A/D/H
+ /<some_paths>/A/B x/y/z/blah
+
+ A dictionary is returned keyed by the directory created by the
+ external whose value is the URL of the external.
+ """
+
+ # The test itself will create a working copy
+ sbox.build(create_wc = False)
+
+ svntest.main.safe_rmtree(sbox.wc_dir)
+
+ wc_init_dir = sbox.add_wc_path('init') # just for setting up props
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ other_repo_basename = os.path.basename(other_repo_dir)
+
+ # Get a scheme relative URL to the other repository.
+ scheme_relative_other_repo_url = other_repo_url[other_repo_url.find(':')+1:]
+
+ # Get a server root relative URL to the other repository by trimming
+ # off the first three /'s.
+ server_relative_other_repo_url = other_repo_url
+ for i in range(3):
+ j = server_relative_other_repo_url.find('/') + 1
+ server_relative_other_repo_url = server_relative_other_repo_url[j:]
+ server_relative_other_repo_url = '/' + server_relative_other_repo_url
+
+ # These files will get changed in revisions 2 through 5.
+ mu_path = os.path.join(wc_init_dir, "A/mu")
+ pi_path = os.path.join(wc_init_dir, "A/D/G/pi")
+ lambda_path = os.path.join(wc_init_dir, "A/B/lambda")
+ omega_path = os.path.join(wc_init_dir, "A/D/H/omega")
+
+ # These are the directories on which `svn:externals' will be set, in
+ # revision 6 on the first repo.
+ B_path = os.path.join(wc_init_dir, "A/B")
+ C_path = os.path.join(wc_init_dir, "A/C")
+ D_path = os.path.join(wc_init_dir, "A/D")
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_init_dir)
+
+ # Make revisions 2 through 5, but don't bother with pre- and
+ # post-commit status checks.
+
+ svntest.main.file_append(mu_path, "Added to mu in revision 2.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ '--quiet', wc_init_dir)
+
+ svntest.main.file_append(pi_path, "Added to pi in revision 3.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ '--quiet', wc_init_dir)
+
+ svntest.main.file_append(lambda_path, "Added to lambda in revision 4.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ '--quiet', wc_init_dir)
+
+ svntest.main.file_append(omega_path, "Added to omega in revision 5.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ '--quiet', wc_init_dir)
+
+ # Get the whole working copy to revision 5.
+ expected_output = svntest.wc.State(wc_init_dir, {
+ })
+ svntest.actions.run_and_verify_update(wc_init_dir,
+ expected_output, None, None)
+
+ # Now copy the initial repository to create the "other" repository,
+ # the one to which the first repository's `svn:externals' properties
+ # will refer. After this, both repositories have five revisions
+ # of random stuff, with no svn:externals props set yet.
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 5)
+
+ # This is the returned dictionary.
+ external_url_for = { }
+
+ external_url_for["A/B/gamma"] = "^/A/D/gamma"
+ external_url_for["A/C/exdir_G"] = other_repo_url + "/A/D/G"
+ external_url_for["A/C/exdir_H"] = "../../../" + \
+ other_repo_basename + \
+ "/A/D/H@1"
+
+ # Set up the externals properties on A/B/, A/C/ and A/D/.
+ externals_desc = \
+ external_url_for["A/B/gamma"] + " gamma\n"
+
+ change_external(B_path, externals_desc, commit=False)
+
+ externals_desc = \
+ "exdir_G " + external_url_for["A/C/exdir_G"] + "\n" + \
+ external_url_for["A/C/exdir_H"] + " exdir_H\n"
+
+ change_external(C_path, externals_desc, commit=False)
+
+ external_url_for["A/D/exdir_A"] = "^/../" + other_repo_basename + "/A"
+ external_url_for["A/D/exdir_A/G/"] = scheme_relative_other_repo_url + \
+ "/A/D/G/"
+ external_url_for["A/D/exdir_A/H"] = other_repo_url + "/A/D/H"
+ external_url_for["A/D/x/y/z/blah"] = server_relative_other_repo_url + "/A/B"
+
+ externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G/" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ external_url_for["A/D/x/y/z/blah"] + " x/y/z/blah" + \
+ "\n"
+
+ change_external(D_path, externals_desc, commit=False)
+
+ # Commit the property changes.
+
+ expected_output = svntest.wc.State(wc_init_dir, {
+ 'A/B' : Item(verb='Sending'),
+ 'A/C' : Item(verb='Sending'),
+ 'A/D' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_init_dir, 5)
+ expected_status.tweak('A/B', 'A/C', 'A/D', wc_rev=6, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_init_dir,
+ expected_output,
+ expected_status)
+
+ return external_url_for
+
+def change_external(path, new_val, commit=True):
+ """Change the value of the externals property on PATH to NEW_VAL,
+ and commit the change unless COMMIT is False."""
+
+ svntest.actions.set_prop('svn:externals', new_val, path)
+ if commit:
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'log msg', '--quiet', path)
+
+def change_external_expect_error(path, new_val, expected_err):
+ """Try to change the value of the externals property on PATH to NEW_VAL,
+ but expect to get an error message that matches EXPECTED_ERR."""
+
+ svntest.actions.set_prop('svn:externals', new_val, path,
+ expected_re_string=expected_err)
+
+
+def probe_paths_exist(paths):
+ """ Probe each one of PATHS to see if it exists, otherwise throw a
+ Failure exception. """
+
+ for path in paths:
+ if not os.path.exists(path):
+ raise svntest.Failure("Probing for " + path + " failed.")
+
+
+def probe_paths_missing(paths):
+ """ Probe each one of PATHS to see if does not exist, otherwise throw a
+ Failure exception. """
+
+ for path in paths:
+ if os.path.exists(path):
+ raise svntest.Failure(path + " unexpectedly still exists.")
+
+
+#----------------------------------------------------------------------
+
+
+### todo: It would be great if everything used the new wc.py system to
+### check output/status. In fact, it would be great to do more output
+### and status checking period! But must first see how well the
+### output checkers deal with multiple summary lines. With external
+### modules, you can get the first "Updated to revision X" line, and
+### then there will be more "Updated to..." and "Checked out..." lines
+### following it, one line for each new or changed external.
+
+
+#----------------------------------------------------------------------
+
+def checkout_with_externals(sbox):
+ "test checkouts with externals"
+
+ externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Probe the working copy a bit, see if it's as expected.
+ expected_existing_paths = [
+ sbox.ospath('A/B/gamma'),
+ sbox.ospath('A/C/exdir_G'),
+ sbox.ospath('A/C/exdir_G/pi'),
+ sbox.ospath('A/C/exdir_H'),
+ sbox.ospath('A/C/exdir_H/omega'),
+ sbox.ospath('A/D/x'),
+ sbox.ospath('A/D/x/y'),
+ sbox.ospath('A/D/x/y/z'),
+ sbox.ospath('A/D/x/y/z/blah'),
+ sbox.ospath('A/D/x/y/z/blah/E/alpha'),
+ sbox.ospath('A/D/x/y/z/blah/E/beta'),
+ ]
+ probe_paths_exist(expected_existing_paths)
+
+ # Pick a file at random, make sure it has the expected contents.
+ for path, contents in ((sbox.ospath('A/C/exdir_H/omega'),
+ "This is the file 'omega'.\n"),
+ (sbox.ospath('A/B/gamma'),
+ "This is the file 'gamma'.\n")):
+ if open(path).read() != contents:
+ raise svntest.Failure("Unexpected contents for rev 1 of " + path)
+
+#----------------------------------------------------------------------
+
+def update_receive_new_external(sbox):
+ "update to receive a new external module"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ other_wc_dir = sbox.add_wc_path('other')
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, other_wc_dir)
+
+ # Add one new external item to the property on A/D. The new item is
+ # "exdir_E", deliberately added in the middle not at the end.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G/" + \
+ "\n" + \
+ "exdir_E " + other_repo_url + "/A/B/E" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ external_url_for["A/D/x/y/z/blah"] + " x/y/z/blah" + \
+ "\n"
+
+ # Set and commit the property
+ change_external(sbox.ospath('A/D'), new_externals_desc)
+
+ # Update the other working copy, see if we get the new item.
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D' : Item(status=' U'),
+ 'A/D/exdir_E/beta' : Item(status='A '),
+ 'A/D/exdir_E/alpha' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(other_wc_dir,
+ expected_output, None, None)
+
+ probe_paths_exist([os.path.join(other_wc_dir, "A", "D", "exdir_E")])
+
+#----------------------------------------------------------------------
+
+def update_lose_external(sbox):
+ "update to lose an external module"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ other_wc_dir = sbox.add_wc_path('other')
+ repo_url = sbox.repo_url
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, other_wc_dir)
+
+ # Lose one new external item from A/D. The lost item is
+ # "exdir_A", chosen because there are two other externals underneath
+ # it (G and H) which are not being removed. We expect them to
+ # remain -- in other words:
+ #
+ # BEFORE AFTER
+ # ------------ ------------
+ # A/D/exdir_A A/D/exdir_A
+ # A/D/exdir_A/.svn/... <GONE>
+ # A/D/exdir_A/mu <GONE>
+ # A/D/exdir_A/B/... <GONE>
+ # A/D/exdir_A/C/... <GONE>
+ # A/D/exdir_A/D/... <GONE>
+ # A/D/exdir_A/G/... A/D/exdir_A/G/...
+ # A/D/exdir_A/H/... A/D/exdir_A/H/...
+
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ external_url_for["A/D/x/y/z/blah"] + " x/y/z/blah" + \
+ "\n"
+
+ # Set and commit the property
+ change_external(sbox.ospath('A/D'), new_externals_desc)
+
+ # The code should handle a missing local externals item
+ svntest.main.safe_rmtree(os.path.join(other_wc_dir, "A", "D", "exdir_A", \
+ "D"))
+
+ # Update other working copy, see if lose & preserve things appropriately
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D' : Item(status=' U'),
+ 'A/D/exdir_A' : Item(verb='Removed external'),
+ })
+ svntest.actions.run_and_verify_update(other_wc_dir,
+ expected_output, None, None)
+
+ expected_existing_paths = [
+ os.path.join(other_wc_dir, "A", "D", "exdir_A"),
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "G"),
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "H"),
+ ]
+ probe_paths_exist(expected_existing_paths)
+
+ expected_missing_paths = [
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "mu"),
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "B"),
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "C"),
+ os.path.join(other_wc_dir, "A", "D", "exdir_A", "D"),
+ ]
+ probe_paths_missing(expected_missing_paths)
+
+#----------------------------------------------------------------------
+
+def update_change_pristine_external(sbox):
+ "update change to an unmodified external module"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ other_wc_dir = sbox.add_wc_path('other')
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, other_wc_dir)
+
+ # Change the "x/y/z/blah" external on A/D to point to a different
+ # URL. Since no changes were made to the old checked-out external,
+ # we should get a clean replace.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ "x/y/z/blah " + other_repo_url + "/A/B/F" + \
+ "\n"
+
+ # Set and commit the property
+ change_external(sbox.ospath('A/D'), new_externals_desc)
+
+ # Update other working copy, see if get the right change.
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D' : Item(status=' U'),
+ 'A/D/x/y/z/blah/F' : Item(status='D '),
+ 'A/D/x/y/z/blah/E' : Item(status='D '),
+ 'A/D/x/y/z/blah/lambda': Item(status='D '),
+ })
+ svntest.actions.run_and_verify_update(other_wc_dir,
+ expected_output, None, None)
+
+ xyzb_path = os.path.join(other_wc_dir, "x", "y", "z", "blah")
+
+ expected_missing_paths = [
+ os.path.join(xyzb_path, "alpha"),
+ os.path.join(xyzb_path, "beta"),
+ ]
+ probe_paths_missing(expected_missing_paths)
+
+def update_change_modified_external(sbox):
+ "update changes to a modified external module"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ other_wc_dir = sbox.add_wc_path('other')
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, other_wc_dir)
+
+ # Make a couple of mods in the "x/y/z/blah/" external.
+ alpha_path = os.path.join(other_wc_dir, "A", "D",
+ "x", "y", "z", "blah", "alpha")
+ svntest.main.file_append(alpha_path, "Some new text in alpha.\n")
+ new_file = os.path.join(other_wc_dir, "A", "D",
+ "x", "y", "z", "blah", "fish.txt")
+ svntest.main.file_append(new_file, "This is an unversioned file.\n")
+
+ # Change the "x/y/z/blah" external on A/D to point to a different
+ # URL. There are some local mods under the old checked-out external,
+ # so the old dir should be saved under a new name.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G/" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ "x/y/z/blah " + other_repo_url + "/A/B/F" + \
+ "\n"
+
+ # Set and commit the property
+ change_external(sbox.ospath('A/D'), new_externals_desc)
+
+ # Update other working copy, see if get the right change.
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D' : Item(status=' U'),
+ 'A/D/x/y/z/blah/F' : Item(status='D '),
+ 'A/D/x/y/z/blah/lambda': Item(status='D '),
+ 'A/D/x/y/z/blah/E' : Item(status='D '),
+ })
+ svntest.actions.run_and_verify_update(other_wc_dir,
+ expected_output, None, None)
+
+
+ xyzb_path = os.path.join(other_wc_dir, "x", "y", "z", "blah")
+
+ expected_missing_paths = [
+ os.path.join(xyzb_path, "alpha"),
+ os.path.join(xyzb_path, "beta"),
+ ]
+ probe_paths_missing(expected_missing_paths)
+
+def update_receive_change_under_external(sbox):
+ "update changes under an external module"
+
+ externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ other_wc_dir = sbox.add_wc_path('other')
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ other_repo_url, other_wc_dir)
+
+ # Commit some modifications from the other_wc.
+ other_gamma_path = os.path.join(other_wc_dir, 'A', 'D', 'gamma')
+ svntest.main.file_append(other_gamma_path, "New text in other gamma.\n")
+
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(other_wc_dir, 5)
+ expected_status.tweak('A/D/gamma', wc_rev=6)
+ svntest.actions.run_and_verify_commit(other_wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now update the regular wc to see if we get the change. Note that
+ # none of the module *properties* in this wc have been changed; only
+ # the source repository of the modules has received a change, and
+ # we're verifying that an update here pulls that change.
+
+ # The output's going to be all screwy because of the module
+ # notifications, so don't bother parsing it, just run update
+ # directly.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/exdir_A/D/gamma': Item(status='U '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ external_gamma_path = sbox.ospath('A/D/exdir_A/D/gamma')
+ contents = open(external_gamma_path).read()
+ if contents != ("This is the file 'gamma'.\n"
+ "New text in other gamma.\n"):
+ raise svntest.Failure("Unexpected contents for externally modified " +
+ external_gamma_path)
+
+ # Commit more modifications
+ other_rho_path = os.path.join(other_wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(other_rho_path, "New text in other rho.\n")
+
+ expected_output = svntest.wc.State(other_wc_dir, {
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(other_wc_dir, 5)
+ expected_status.tweak('A/D/gamma', wc_rev=6)
+ expected_status.tweak('A/D/G/rho', wc_rev=7)
+ svntest.actions.run_and_verify_commit(other_wc_dir,
+ expected_output,
+ expected_status)
+
+ expected_output = svntest.wc.State(sbox.ospath('A/C'), {
+ 'exdir_G/rho' : Item(status='U '),
+ })
+ svntest.actions.run_and_verify_update(sbox.ospath('A/C'),
+ expected_output, None, None)
+
+ external_rho_path = sbox.ospath('A/C/exdir_G/rho')
+ contents = open(external_rho_path).read()
+ if contents != ("This is the file 'rho'.\n"
+ "New text in other rho.\n"):
+ raise svntest.Failure("Unexpected contents for externally modified " +
+ external_rho_path)
+
+#----------------------------------------------------------------------
+
+def modify_and_update_receive_new_external(sbox):
+ "commit and update additional externals"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Add one more external item
+ B_path = sbox.ospath('A/B')
+ externals_desc = \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_G" + \
+ "\n" + \
+ "exdir_H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ "exdir_Z " + external_url_for["A/D/exdir_A/H"] + \
+ "\n"
+
+ change_external(B_path, externals_desc)
+
+ # Now cd into A/B and try updating
+ was_cwd = os.getcwd()
+ os.chdir(B_path)
+
+ # Once upon a time there was a core-dump here
+
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'up' )
+
+ os.chdir(was_cwd)
+
+ probe_paths_exist([os.path.join(B_path, "exdir_Z")])
+
+#----------------------------------------------------------------------
+
+def disallow_dot_or_dotdot_directory_reference(sbox):
+ "error if external target dir involves '.' or '..'"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Try to set illegal externals in the original WC.
+ def set_externals_for_path_expect_error(path, val):
+ expected_err = ".*Invalid svn:externals property on '.*': target " + \
+ "'.*' is an absolute path or involves '..'.*"
+ change_external_expect_error(path, val, expected_err)
+
+ B_path = sbox.ospath('A/B')
+ G_path = sbox.ospath('A/D/G')
+ H_path = sbox.ospath('A/D/H')
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+
+ external_urls = list(external_url_for.values())
+
+ # The external_urls contains some examples of relative urls that are
+ # ambiguous with these local test paths, so we have to use the
+ # <url> <path> ordering here to check the local path validator.
+
+ externals_value_1 = external_urls.pop() + " ../foo\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_2 = external_urls.pop() + " foo/bar/../baz\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_3 = external_urls.pop() + " foo/..\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_4 = external_urls.pop() + " .\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_5 = external_urls.pop() + " ./\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_6 = external_urls.pop() + " ..\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_7 = external_urls.pop() + " ././/.///. \n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_8 = external_urls.pop() + " /foo \n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ if svntest.main.is_os_windows():
+ externals_value_9 = external_urls.pop() + " D:/foo\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_10 = external_urls.pop() + " D:\\foo\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+ externals_value_11 = external_urls.pop() + " D:foo\n"
+ if not external_urls: external_urls = list(external_url_for.values())
+
+ set_externals_for_path_expect_error(B_path, externals_value_1)
+ set_externals_for_path_expect_error(G_path, externals_value_2)
+ set_externals_for_path_expect_error(H_path, externals_value_3)
+ set_externals_for_path_expect_error(C_path, externals_value_4)
+ set_externals_for_path_expect_error(F_path, externals_value_5)
+ set_externals_for_path_expect_error(B_path, externals_value_6)
+ set_externals_for_path_expect_error(G_path, externals_value_7)
+ set_externals_for_path_expect_error(H_path, externals_value_8)
+ if svntest.main.is_os_windows():
+ set_externals_for_path_expect_error(B_path, externals_value_9)
+ set_externals_for_path_expect_error(B_path, externals_value_10)
+ set_externals_for_path_expect_error(B_path, externals_value_11)
+
+
+#----------------------------------------------------------------------
+
+def export_with_externals(sbox):
+ "test exports with externals"
+
+ externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'export',
+ repo_url, wc_dir)
+
+ # Probe the working copy a bit, see if it's as expected.
+ expected_existing_paths = [
+ sbox.ospath('A/C/exdir_G'),
+ sbox.ospath('A/C/exdir_G/pi'),
+ sbox.ospath('A/C/exdir_H'),
+ sbox.ospath('A/C/exdir_H/omega'),
+ sbox.ospath('A/D/x'),
+ sbox.ospath('A/D/x/y'),
+ sbox.ospath('A/D/x/y/z'),
+ sbox.ospath('A/D/x/y/z/blah'),
+ sbox.ospath('A/D/x/y/z/blah/E/alpha'),
+ sbox.ospath('A/D/x/y/z/blah/E/beta'),
+ ]
+ probe_paths_exist(expected_existing_paths)
+
+ # Pick some files, make sure their contents are as expected.
+ exdir_G_pi_path = sbox.ospath('A/C/exdir_G/pi')
+ contents = open(exdir_G_pi_path).read()
+ if contents != ("This is the file 'pi'.\n"
+ "Added to pi in revision 3.\n"):
+ raise svntest.Failure("Unexpected contents for rev 1 of " +
+ exdir_G_pi_path)
+
+ exdir_H_omega_path = sbox.ospath('A/C/exdir_H/omega')
+ contents = open(exdir_H_omega_path).read()
+ if contents != "This is the file 'omega'.\n":
+ raise svntest.Failure("Unexpected contents for rev 1 of " +
+ exdir_H_omega_path)
+
+#----------------------------------------------------------------------
+
+# Test for issue #2429
+@Issue(2429)
+def export_wc_with_externals(sbox):
+ "test exports from working copies with externals"
+
+ paths_dict = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ export_target = sbox.add_wc_path('export')
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+ # Export the working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'export', wc_dir, export_target)
+
+ ### We should be able to check exactly the paths that externals_test_setup()
+ ### set up; however, --ignore-externals fails to ignore 'A/B/gamma' so this
+ ### doesn't work:
+ # paths = [ os.path.join(export_target, path) for path in paths_dict.keys() ]
+ ### Therefore currently we check only a particular selection of paths.
+ paths = [
+ os.path.join(export_target, "A", "C", "exdir_G"),
+ os.path.join(export_target, "A", "C", "exdir_G", "pi"),
+ os.path.join(export_target, "A", "C", "exdir_H"),
+ os.path.join(export_target, "A", "C", "exdir_H", "omega"),
+ os.path.join(export_target, "A", "D", "x"),
+ os.path.join(export_target, "A", "D", "x", "y"),
+ os.path.join(export_target, "A", "D", "x", "y", "z"),
+ os.path.join(export_target, "A", "D", "x", "y", "z", "blah"),
+ os.path.join(export_target, "A", "D", "x", "y", "z", "blah", "E", "alpha"),
+ os.path.join(export_target, "A", "D", "x", "y", "z", "blah", "E", "beta"),
+ ]
+ probe_paths_exist(paths)
+
+ svntest.main.safe_rmtree(export_target)
+
+ # Export it again, without externals.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'export', '--ignore-externals',
+ wc_dir, export_target)
+ probe_paths_missing(paths)
+
+#----------------------------------------------------------------------
+
+def external_with_peg_and_op_revision(sbox):
+ "use a peg revision to specify an external module"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # remove A/D/H in the other repo
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm',
+ external_url_for["A/D/exdir_A/H"],
+ '-m', 'remove original A/D/H')
+
+ # Set an external property using peg revision syntax.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A/H"] + "@4 exdir_A/H" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G" + \
+ "\n"
+
+ # Set and commit the property.
+ change_external(sbox.ospath('A/D'), new_externals_desc)
+
+ # Update other working copy, see if we get the right change.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/x/y/z/blah' : Item(verb='Removed external'),
+ 'A/D/exdir_A' : Item(verb='Removed external'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+
+ external_chi_path = sbox.ospath('A/D/exdir_A/H/chi')
+ contents = open(external_chi_path).read()
+ if contents != "This is the file 'chi'.\n":
+ raise svntest.Failure("Unexpected contents for externally modified " +
+ external_chi_path)
+
+#----------------------------------------------------------------------
+
+def new_style_externals(sbox):
+ "check the new '-rN URL PATH' syntax"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Set an external property using the new '-rN URL PATH' syntax.
+ new_externals_desc = \
+ external_url_for["A/C/exdir_G"] + " exdir_G" + \
+ "\n" + \
+ "-r 1 " + external_url_for["A/C/exdir_H"] + " exdir_H" + \
+ "\n" + \
+ "-r1 " + external_url_for["A/C/exdir_H"] + " exdir_I" + \
+ "\n"
+
+ # Set and commit the property.
+ change_external(sbox.ospath('A/C'), new_externals_desc)
+
+ # Update other working copy.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/exdir_I/chi' : Item(status='A '),
+ 'A/C/exdir_I/omega' : Item(status='A '),
+ 'A/C/exdir_I/psi' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+ for dir_name in ["exdir_H", "exdir_I"]:
+ exdir_X_omega_path = os.path.join(wc_dir, "A", "C", dir_name, "omega")
+ contents = open(exdir_X_omega_path).read()
+ if contents != "This is the file 'omega'.\n":
+ raise svntest.Failure("Unexpected contents for rev 1 of " +
+ exdir_X_omega_path)
+
+#----------------------------------------------------------------------
+
+def disallow_propset_invalid_formatted_externals(sbox):
+ "error if propset'ing external with invalid format"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # It should not be possible to set these external properties on a
+ # directory.
+ for ext in [ b'arg1',
+ b'arg1 arg2 arg3',
+ b'arg1 arg2 arg3 arg4',
+ b'arg1 arg2 arg3 arg4 arg5',
+ b'-r',
+ b'-r1',
+ b'-r 1',
+ b'-r1 arg1',
+ b'-r 1 arg1',
+ b'arg1 -r',
+ b'arg1 -r1',
+ b'arg1 -r 1',
+ ]:
+ change_external_expect_error(A_path, ext,
+ '.*Error parsing svn:externals.*')
+
+ for ext in [ b'-r abc arg1 arg2',
+ b'-rabc arg1 arg2',
+ b'arg1 -r abc arg2',
+ b'arg1 -rabc arg2',
+ ]:
+ change_external_expect_error(A_path, ext,
+ '.*Error parsing svn:externals.*')
+
+ for ext in [ b'http://example.com/ http://example.com/',
+ b'-r1 http://example.com/ http://example.com/',
+ b'-r 1 http://example.com/ http://example.com/',
+ b'http://example.com/ -r1 http://example.com/',
+ b'http://example.com/ -r 1 http://example.com/',
+ ]:
+ change_external_expect_error(A_path, ext,
+ '.*cannot use two absolute URLs.*')
+
+ for ext in [ b'http://example.com/ -r1 foo',
+ b'http://example.com/ -r 1 foo',
+ b'-r1 foo http://example.com/',
+ b'-r 1 foo http://example.com/'
+ ]:
+ change_external_expect_error(A_path, ext,
+ '.*cannot use a URL \'.*\' as the ' \
+ 'target directory for an external ' \
+ 'definition.*')
+
+#----------------------------------------------------------------------
+
+def old_style_externals_ignore_peg_reg(sbox):
+ "old 'PATH URL' format should ignore peg revisions"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Update the working copy.
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ # Set an external property using the old 'PATH URL' syntax with
+ # @HEAD in the URL.
+ ext = "exdir_G " + external_url_for["A/C/exdir_G"] + "@HEAD\n"
+
+ # Set and commit the property.
+ change_external(sbox.ospath('A'), ext)
+
+ # Update the working copy. This should succeed (exitcode 0) but
+ # should print warnings on the external because the URL with '@HEAD'
+ # does not exist.
+ expected_error = "|".join([".*Error handling externals definition.*",
+ ".*URL .*/A/D/G@HEAD' .* doesn't exist.*",
+ ])
+ svntest.actions.run_and_verify_svn2(None,
+ expected_error,
+ 1,
+ 'up',
+ wc_dir)
+
+
+#----------------------------------------------------------------------
+
+def cannot_move_or_remove_file_externals(sbox):
+ "should not be able to mv or rm a file external"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Should not be able to delete the file external.
+ svntest.actions.run_and_verify_svn(None,
+ ".*Cannot remove the external at "
+ ".*gamma.*; please .* "
+ "the svn:externals .*",
+ 'rm',
+ sbox.ospath('A/B/gamma'))
+
+ # Should not be able to move the file external.
+ svntest.actions.run_and_verify_svn(None,
+ ".*Cannot move the external at "
+ ".*gamma.*; please .*edit.*"
+ "svn:externals.*",
+ 'mv',
+ sbox.ospath('A/B/gamma'),
+ sbox.ospath('A/B/gamma1'))
+
+ # But the directory that contains it can be deleted.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 6)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm',
+ sbox.ospath('A/B'))
+
+ expected_status.tweak('A/B', status='D ')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 6)
+ expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda')
+
+ expected_status.add({
+ 'A/D/exdir_A' : Item(status=' ', wc_rev='5', prev_status='X '),
+ 'A/D/exdir_A/D' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/gamma' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/pi' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/rho' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/tau' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/chi' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/psi' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/omega' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E/beta' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E/alpha' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/F' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/lambda' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/pi' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/tau' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/rho' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/H' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/psi' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/omega' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/chi' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/C' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/mu' : Item(status=' ', wc_rev='5'),
+
+ 'A/C/exdir_G' : Item(status=' ', prev_status='X ', wc_rev='5'),
+ 'A/C/exdir_G/tau' : Item(status=' ', wc_rev='5'),
+ 'A/C/exdir_G/pi' : Item(status=' ', wc_rev='5'),
+ 'A/C/exdir_G/rho' : Item(status=' ', wc_rev='5'),
+
+ 'A/D/x' : Item(status='X '),
+ 'A/D/x/y/z/blah' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/lambda' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E/beta' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E/alpha': Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/F' : Item(status=' ', wc_rev='5'),
+
+ 'A/C/exdir_H' : Item(status=' ', prev_status='X ', wc_rev='1'),
+ 'A/C/exdir_H/omega' : Item(status=' ', wc_rev='1'),
+ 'A/C/exdir_H/chi' : Item(status=' ', wc_rev='1'),
+ 'A/C/exdir_H/psi' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Bring the working copy up to date and check that the file the file
+ # external is switched to still exists.
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ open(sbox.ospath('A/D/gamma')).close()
+
+#----------------------------------------------------------------------
+
+def cant_place_file_external_into_dir_external(sbox):
+ "place a file external into a directory external"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Put a directory external into the same repository and then a file
+ # external into that.
+ ext = "^/A/D A/D-copy\n" + \
+ "^/A/B/E/beta A/D-copy/G/beta\n"
+ change_external(wc_dir, ext)
+
+ # Bring the working copy up to date and check that the file the file
+ # external is switched to still exists.
+ svntest.actions.run_and_verify_svn(None, 'svn: E205011: ' +
+ 'Failure occurred.*definitions',
+ 'up', wc_dir)
+
+#----------------------------------------------------------------------
+
+# Issue #2461.
+@Issue(2461)
+def external_into_path_with_spaces(sbox):
+ "allow spaces in external local paths"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ ext = '^/A/D "A/copy of D"\n' +\
+ '^/A/D A/another\ copy\ of\ D'
+ change_external(wc_dir, ext)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/another copy of D/G': Item(status='A '),
+ 'A/another copy of D/G/pi': Item(status='A '),
+ 'A/another copy of D/G/tau': Item(status='A '),
+ 'A/another copy of D/G/rho': Item(status='A '),
+ 'A/another copy of D/H': Item(status='A '),
+ 'A/another copy of D/H/chi': Item(status='A '),
+ 'A/another copy of D/H/omega': Item(status='A '),
+ 'A/another copy of D/H/psi': Item(status='A '),
+ 'A/another copy of D/gamma': Item(status='A '),
+ 'A/copy of D/H' : Item(status='A '),
+ 'A/copy of D/H/chi' : Item(status='A '),
+ 'A/copy of D/H/omega': Item(status='A '),
+ 'A/copy of D/H/psi' : Item(status='A '),
+ 'A/copy of D/gamma' : Item(status='A '),
+ 'A/copy of D/G' : Item(status='A '),
+ 'A/copy of D/G/rho' : Item(status='A '),
+ 'A/copy of D/G/tau' : Item(status='A '),
+ 'A/copy of D/G/pi' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+ probe_paths_exist([
+ sbox.ospath('A/copy of D'),
+ sbox.ospath('A/another copy of D'),
+ ])
+
+#----------------------------------------------------------------------
+
+# Issue #3368
+@Issue(3368)
+def binary_file_externals(sbox):
+ "binary file externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a binary file A/theta, write PNG file data into it.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Commit the binary file
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+ # Create a file external on the binary file A/theta
+ C = sbox.ospath('A/C')
+ external = os.path.join(C, 'external')
+ externals_prop = "^/A/theta external\n"
+
+ # Set and commit the property.
+ change_external(C, externals_prop)
+
+
+ # Now, /A/C/external is designated as a file external pointing to
+ # the binary file /A/theta, but the external file is not there yet.
+ # Try to actually insert the external file via a verified update:
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/external' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item(
+ theta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ 'A/C' : Item(props={'svn:externals':externals_prop}),
+ 'A/C/external' : Item(
+ theta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ 'A/C/external' : Item(status=' ', wc_rev=3, switched='X'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+
+# Issue #3351.
+@Issue(3351)
+def update_lose_file_external(sbox):
+ "delete a file external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+
+ # Create a file external in A/C/external on the file A/mu
+ C = sbox.ospath('A/C')
+ external = os.path.join(C, 'external')
+ externals_prop = "^/A/mu external\n"
+
+ # Set and commit the property.
+ change_external(C, externals_prop)
+
+
+ # Now, /A/C/external is designated as a file external pointing to
+ # the file /A/mu, but the external file is not there yet.
+ # Try to actually insert the external file via an update:
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/external' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C' : Item(props={'svn:externals':externals_prop}),
+ 'A/C/external' : Item("This is the file 'mu'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/C/external' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # now remove the svn:external prop
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', 'svn:externals', C)
+
+ # commit the property change
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ })
+
+ # (re-use above expected_status)
+ expected_status.tweak('A/C', wc_rev = 3)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # try to actually get rid of the external via an update
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/external' : Item(verb='Removed external')
+ })
+
+ # (re-use above expected_disk)
+ expected_disk.tweak('A/C', props = {})
+ expected_disk.remove('A/C/external')
+
+ # (re-use above expected_status)
+ expected_status.tweak(wc_rev = 3)
+
+ # And assume that the external will be removed.
+ expected_status.remove('A/C/external')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ probe_paths_missing([sbox.ospath('A/C/external')])
+
+
+#----------------------------------------------------------------------
+
+# Issue #3351.
+@Issue(3351)
+def switch_relative_external(sbox):
+ "switch a relative external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a relative external in A/D on ../B
+ A_path = sbox.ospath('A')
+ A_copy_path = sbox.ospath('A_copy')
+ A_copy_url = repo_url + '/A_copy'
+ D_path = os.path.join(A_path, 'D')
+ ext_path = os.path.join(D_path, 'ext')
+ externals_prop = "../B ext\n"
+ change_external(D_path, externals_prop)
+
+ # Update our working copy, and create a "branch" (A => A_copy)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/ext/E' : Item(status='A '),
+ 'A/D/ext/E/beta' : Item(status='A '),
+ 'A/D/ext/E/alpha' : Item(status='A '),
+ 'A/D/ext/F' : Item(status='A '),
+ 'A/D/ext/lambda' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ '--quiet', A_path, A_copy_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ '--quiet', wc_dir)
+
+ # Okay. We now want to switch A to A_copy, which *should* cause
+ # A/D/ext to point to the URL for A_copy/B (instead of A/B).
+ svntest.actions.run_and_verify_svn(None, [], 'sw',
+ A_copy_url, A_path)
+
+ expected_infos = [
+ { 'Path' : re.escape(D_path),
+ 'URL' : sbox.repo_url + '/A_copy/D',
+ },
+ { 'Path' : re.escape(ext_path),
+ 'URL' : sbox.repo_url + '/A_copy/B',
+ },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, D_path, ext_path)
+
+#----------------------------------------------------------------------
+
+# A regression test for a bug in exporting externals from a mixed-depth WC.
+def export_sparse_wc_with_externals(sbox):
+ "export from a sparse working copy with externals"
+
+ externals_test_setup(sbox)
+
+ repo_url = sbox.repo_url + '/A/B'
+ wc_dir = sbox.wc_dir
+ # /A/B contains (dir 'E', dir 'F', file 'lambda', external dir 'gamma').
+ children = [ 'E', 'F', 'lambda' ]
+ ext_children = [ 'gamma' ]
+
+ def wc_paths_of(relative_paths):
+ return [ os.path.join(wc_dir, path) for path in relative_paths ]
+
+ child_paths = wc_paths_of(children)
+ ext_child_paths = wc_paths_of(ext_children)
+
+ export_target = sbox.add_wc_path('export')
+
+ # Create a working copy with depth=empty itself but children that are
+ # depth=infinity.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout', '--depth=empty',
+ repo_url, wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', *child_paths)
+ # Export the working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'export', wc_dir, export_target)
+ # It failed with "'gamma' is not under version control" because the
+ # depth-infinity children led it wrongly to try to process externals
+ # in the parent.
+
+ svntest.main.safe_rmtree(export_target)
+
+#----------------------------------------------------------------------
+
+# Change external from one repo to another
+def relegate_external(sbox):
+ "relegate external from one repo to another"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ A_path = sbox.ospath('A')
+
+ # setup an external within the same repository
+ externals_desc = '^/A/B/E external'
+ change_external(A_path, externals_desc)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/external/alpha' : Item(status='A '),
+ 'A/external/beta' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ # create another repository
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 2)
+
+ # point external to the other repository
+ externals_desc = other_repo_url + '/A/B/E external\n'
+ change_external(A_path, externals_desc)
+
+ # Update "relegates", i.e. throws-away and recreates, the external
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/external' : Item(), # No A?
+ 'A/external/alpha' : Item(status='A '),
+ 'A/external/beta' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A', props={'svn:externals' : externals_desc})
+ expected_disk.add({
+ 'A/external' : Item(),
+ 'A/external/alpha' : Item('This is the file \'alpha\'.\n'),
+ 'A/external/beta' : Item('This is the file \'beta\'.\n'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/external' : Item(status=' ', prev_status='X ', wc_rev='2'),
+ 'A/external/alpha' : Item(status=' ', wc_rev='2'),
+ 'A/external/beta' : Item(status=' ', wc_rev='2'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+
+# Issue #3552
+@Issue(3552)
+def wc_repos_file_externals(sbox):
+ "tag directory with file externals from wc to url"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Add a file A/theta.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, 'theta', 'w')
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit the new file, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+ # Create a file external on the file A/theta
+ C = sbox.ospath('A/C')
+ external = os.path.join(C, 'theta')
+ externals_prop = "^/A/theta theta\n"
+
+ # Set and commit the property.
+ change_external(C, externals_prop)
+
+
+ # Now, /A/C/theta is designated as a file external pointing to
+ # the file /A/theta, but the external file is not there yet.
+ # Try to actually insert the external file via a verified update:
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/theta' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item('theta'),
+ 'A/C' : Item(props={'svn:externals':externals_prop}),
+ 'A/C/theta' : Item('theta'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ 'A/C/theta' : Item(status=' ', wc_rev=3, switched='X'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Copy A/C to a new tag in the repos
+ tag_url = repo_url + '/A/I'
+ svntest.main.run_svn(None, 'cp', C, tag_url, '-m', 'create tag')
+
+ # Try to actually insert the external file (A/I/theta) via a verified update:
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/I' : Item(status='A '),
+ 'A/I/theta' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item('theta'),
+ 'A/C' : Item(props={'svn:externals':externals_prop}),
+ 'A/C/theta' : Item('theta'),
+ 'A/I' : Item(props={'svn:externals':externals_prop}),
+ 'A/I/theta' : Item('theta'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=4),
+ 'A/C/theta' : Item(status=' ', wc_rev=4, switched='X'),
+ 'A/I' : Item(status=' ', wc_rev=4),
+ 'A/I/theta' : Item(status=' ', wc_rev=4, switched='X'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.server_has_mergeinfo)
+@Issue(3843)
+def merge_target_with_externals(sbox):
+ "merge target with externals"
+
+ # Test for a problem the plagued Subversion in the pre-1.7-single-DB world:
+ # Externals in a merge target would get meaningless explicit mergeinfo set
+ # on them. See http://svn.haxx.se/dev/archive-2010-08/0088.shtml
+ externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A_branch_path = sbox.ospath('A-branch')
+ A_gamma_branch_path = sbox.ospath('A-branch/D/gamma')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Setup A/external as file external to A/mu
+ # and A/external-pinned as a pinned file external to A/mu
+ externals_prop = "^/A/mu external\n^/A/mu@6 external-pinned\n"
+ change_external(sbox.ospath('A'), externals_prop)
+
+ # Branch A@1 to A-branch and make a simple text change on the latter in r8.
+ svntest.actions.run_and_verify_svn(None, [], 'copy', A_path + '@1',
+ A_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'make a copy', wc_dir)
+ svntest.main.file_write(A_gamma_branch_path, "The new gamma!\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'branch edit', wc_dir)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/external' : Item(status='A '),
+ 'A/external-pinned' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ # Merge r8 from A-branch back to A. There should be explicit mergeinfo
+ # only at the root of A; the externals should not get any.
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c8',
+ repo_url + '/A-branch', A_path)
+ svntest.actions.run_and_verify_svn(
+ ["Properties on '" + A_path + "':\n",
+ " svn:mergeinfo\n",
+ " /A-branch:8\n"],
+ [], 'pg', svntest.main.SVN_PROP_MERGEINFO, '-vR', wc_dir)
+
+def update_modify_file_external(sbox):
+ "update that modifies a file external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup A/external as file external to A/mu
+ externals_prop = "^/A/mu external\n"
+ change_external(sbox.ospath('A'), externals_prop)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/external' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A' : Item(props={'svn:externals':externals_prop}),
+ 'A/external' : Item("This is the file 'mu'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/external' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Modify A/mu
+ svntest.main.file_append(sbox.ospath('A/mu'), 'appended mu text')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/mu', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update to modify the file external, this asserts in update_editor.c
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/external' : Item(status='U '),
+ })
+ expected_disk.tweak('A/mu', 'A/external',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/external' : Item(status=' ', wc_rev='3', switched='X'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+# Test for issue #2267
+@Issue(2267)
+def update_external_on_locally_added_dir(sbox):
+ "update an external on a locally added dir"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+
+ # Checkout a working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Add one new external item to the property on A/foo. The new item is
+ # "exdir_E", deliberately added in the middle not at the end.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G/" + \
+ "\n" + \
+ "exdir_E " + other_repo_url + "/A/B/E" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ external_url_for["A/D/x/y/z/blah"] + " x/y/z/blah" + \
+ "\n"
+
+ # Add A/foo and set the property on it
+ new_dir = sbox.ospath("A/foo")
+ sbox.simple_mkdir("A/foo")
+ change_external(new_dir, new_externals_desc, commit=False)
+
+ # Update the working copy, see if we get the new item.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/foo/exdir_A/B' : Item(status='A '),
+ 'A/foo/exdir_A/B/E' : Item(status='A '),
+ 'A/foo/exdir_A/B/E/beta': Item(status='A '),
+ 'A/foo/exdir_A/B/E/alpha': Item(status='A '),
+ 'A/foo/exdir_A/B/F' : Item(status='A '),
+ 'A/foo/exdir_A/B/lambda': Item(status='A '),
+ 'A/foo/exdir_A/D' : Item(status='A '),
+ 'A/foo/exdir_A/D/G' : Item(status='A '),
+ 'A/foo/exdir_A/D/G/rho': Item(status='A '),
+ 'A/foo/exdir_A/D/G/pi': Item(status='A '),
+ 'A/foo/exdir_A/D/G/tau': Item(status='A '),
+ 'A/foo/exdir_A/D/gamma': Item(status='A '),
+ 'A/foo/exdir_A/D/H' : Item(status='A '),
+ 'A/foo/exdir_A/D/H/chi': Item(status='A '),
+ 'A/foo/exdir_A/D/H/omega': Item(status='A '),
+ 'A/foo/exdir_A/D/H/psi': Item(status='A '),
+ 'A/foo/exdir_A/C' : Item(status='A '),
+ 'A/foo/exdir_A/mu' : Item(status='A '),
+ 'A/foo/exdir_A/H/omega': Item(status='A '),
+ 'A/foo/exdir_A/H/psi': Item(status='A '),
+ 'A/foo/exdir_A/H/chi': Item(status='A '),
+ 'A/foo/exdir_A/G/tau': Item(status='A '),
+ 'A/foo/exdir_A/G/rho': Item(status='A '),
+ 'A/foo/exdir_A/G/pi': Item(status='A '),
+ 'A/foo/x/y/z/blah/F': Item(status='A '),
+ 'A/foo/x/y/z/blah/E': Item(status='A '),
+ 'A/foo/x/y/z/blah/E/beta': Item(status='A '),
+ 'A/foo/x/y/z/blah/E/alpha': Item(status='A '),
+ 'A/foo/x/y/z/blah/lambda': Item(status='A '),
+ 'A/foo/exdir_E/beta': Item(status='A '),
+ 'A/foo/exdir_E/alpha': Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+ probe_paths_exist([sbox.ospath('A/foo/exdir_E')])
+
+# Test for issue #2267
+@Issue(2267)
+def switch_external_on_locally_added_dir(sbox):
+ "switch an external on a locally added dir"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url + ".other"
+ A_path = repo_url + "/A"
+ A_copy_path = repo_url + "/A_copy"
+
+ # Create a branch of A
+ # Checkout a working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ A_path, A_copy_path,
+ '-m', 'Create branch of A')
+
+ # Checkout a working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ A_path, wc_dir)
+
+ # Add one new external item to the property on A/foo. The new item is
+ # "exdir_E", deliberately added in the middle not at the end.
+ new_externals_desc = \
+ external_url_for["A/D/exdir_A"] + " exdir_A" + \
+ "\n" + \
+ external_url_for["A/D/exdir_A/G/"] + " exdir_A/G/" + \
+ "\n" + \
+ "exdir_E " + other_repo_url + "/A/B/E" + \
+ "\n" + \
+ "exdir_A/H -r 1 " + external_url_for["A/D/exdir_A/H"] + \
+ "\n" + \
+ external_url_for["A/D/x/y/z/blah"] + " x/y/z/blah" + \
+ "\n"
+
+ # Add A/foo and set the property on it
+ new_dir = sbox.ospath("foo")
+ sbox.simple_mkdir("foo")
+ change_external(new_dir, new_externals_desc, commit=False)
+
+ # Switch the working copy to the branch, see if we get the new item.
+ svntest.actions.run_and_verify_svn(None, [], 'sw', A_copy_path, wc_dir)
+
+ probe_paths_exist([sbox.ospath('foo/exdir_E')])
+
+@Issue(3819)
+def file_external_in_sibling(sbox):
+ "update a file external in sibling dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup A2/iota as file external to ^/iota
+ externals_prop = "^/iota iota\n"
+ sbox.simple_mkdir("A2")
+ change_external(sbox.ospath('A2'), externals_prop)
+ sbox.simple_update()
+
+ os.chdir(sbox.ospath("A"))
+ svntest.actions.run_and_verify_svn(svntest.actions.expected_noop_update_output(2),
+ [], 'update')
+
+@Issue(3823)
+def file_external_update_without_commit(sbox):
+ "update a file external without committing target"
+
+ sbox.build(read_only=True)
+
+ # Setup A2/iota as file external to ^/iota
+ externals_prop = "^/iota iota\n"
+ sbox.simple_mkdir("A2")
+ change_external(sbox.ospath('A2'), externals_prop, commit=False)
+ # A2/ is an uncommitted added dir with an svn:externals property set.
+ sbox.simple_update()
+
+def incoming_file_on_file_external(sbox):
+ "bring in a new file over a file external"
+
+ sbox.build()
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+
+ change_external(sbox.wc_dir, "^/A/B/lambda ext\n")
+ # And bring in the file external
+ sbox.simple_update()
+
+ svntest.main.run_svn(None, 'cp', repo_url + '/iota',
+ repo_url + '/ext', '-m', 'copied')
+
+ # Until recently this took over the file external as 'E'xisting file, with
+ # a textual conflict.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'ext' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None, None)
+
+def incoming_file_external_on_file(sbox):
+ "bring in a new file external over a file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ change_external(sbox.wc_dir, "^/A/B/lambda iota\n")
+
+ # And bring in the file external
+ # Returns an error: WC status of external unchanged.
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None,
+ '.*The file external.*overwrite.*')
+
+
+def exclude_externals(sbox):
+ "try to exclude externals"
+
+ external_url_for = externals_test_setup(sbox)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Checkout two working copies.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Excluding a file external should either fail (current behavior)
+ # or register the file external as excluded (preferred behavior)
+ svntest.actions.run_and_verify_update(sbox.ospath('A/B/gamma'),
+ None, None, None,
+ '.*Cannot exclude.*', False,
+ '--set-depth', 'exclude',
+ sbox.ospath('A/B/gamma'))
+
+ # Excluding a directory external should either fail (current behavior)
+ # or register the directory external as excluded (preferred behavior)
+ svntest.actions.run_and_verify_update(sbox.ospath('A/C/exdir_G'),
+ None, None, None,
+ '.*Cannot exclude.*', False,
+ '--set-depth', 'exclude',
+ sbox.ospath('A/C/exdir_G'))
+
+ # And after an update with --set-depth infinity all externals should
+ # be there again.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 6)
+ expected_status.add({
+ 'A/B/gamma' : Item(status=' ', wc_rev='6', switched='X'),
+
+ 'A/C/exdir_H' : Item(status=' ', prev_status='X ', wc_rev='1'),
+ 'A/C/exdir_H/omega' : Item(status=' ', wc_rev='1'),
+ 'A/C/exdir_H/chi' : Item(status=' ', wc_rev='1'),
+ 'A/C/exdir_H/psi' : Item(status=' ', wc_rev='1'),
+
+ 'A/C/exdir_G' : Item(status=' ', prev_status='X ', wc_rev='5'),
+ 'A/C/exdir_G/pi' : Item(status=' ', wc_rev='5'),
+ 'A/C/exdir_G/rho' : Item(status=' ', wc_rev='5'),
+ 'A/C/exdir_G/tau' : Item(status=' ', wc_rev='5'),
+
+ 'A/D/exdir_A' : Item(status=' ', prev_status='X ', wc_rev='5'),
+ 'A/D/exdir_A/H' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/psi' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/chi' : Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/H/omega': Item(status=' ', wc_rev='1'),
+ 'A/D/exdir_A/D' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/chi': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/omega': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/H/psi': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/pi': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/rho': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/G/tau': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/D/gamma': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/F' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E/beta': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/E/alpha': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/B/lambda': Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/C' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/tau' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/rho' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/G/pi' : Item(status=' ', wc_rev='5'),
+ 'A/D/exdir_A/mu' : Item(status=' ', wc_rev='5'),
+
+ 'A/D/x' : Item(status='X '),
+ 'A/D/x/y/z/blah' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E' : Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E/alpha': Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/E/beta': Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/lambda': Item(status=' ', wc_rev='5'),
+ 'A/D/x/y/z/blah/F' : Item(status=' ', wc_rev='5'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, None, expected_status,
+ [], False,
+ '--set-depth', 'infinity', wc_dir)
+
+def file_externals_different_url(sbox):
+ "update file externals via different url"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ r1_url = sbox.repo_url
+
+ r2_dir, r2_url = sbox.add_repo_path('2')
+ svntest.main.copy_repos(sbox.repo_dir, r2_dir, 1, 0)
+
+
+ sbox.simple_propset('svn:externals',
+ 'r1-e-1 ' + r1_url + '/iota\n' +
+ r1_url + '/iota r1-e-2\n' +
+ 'r2-e-1 ' + r2_url + '/iota\n' +
+ r2_url + '/iota r2-e-2\n' +
+ '^/iota rr-e-1\n', '')
+
+ # All file externals appear in the working copy, with normalised URLs.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'r1-e-1' : Item(status='A '),
+ 'r1-e-2' : Item(status='A '),
+ 'r2-e-1' : Item(status='A '),
+ 'r2-e-2' : Item(status='A '),
+ 'rr-e-1' : Item(status='A '),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', status=' M')
+ expected_status.add({
+ 'r2-e-1' : Item(status=' ', wc_rev='1', switched='X'),
+ 'r1-e-1' : Item(status=' ', wc_rev='1', switched='X'),
+ 'r1-e-2' : Item(status=' ', wc_rev='1', switched='X'),
+ 'rr-e-1' : Item(status=' ', wc_rev='1', switched='X'),
+ 'r2-e-2' : Item(status=' ', wc_rev='1', switched='X'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None,
+ expected_status)
+
+ # Verify that all file external URLs are descendants of r1_url
+ for e in ['r1-e-1', 'r1-e-2', 'r2-e-1', 'r2-e-2', 'rr-e-1']:
+ actions.run_and_verify_info([{'Repository Root' : r1_url}],
+ os.path.join(sbox.wc_dir, e))
+
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'relocate', r1_url, r2_url, wc_dir)
+
+
+ # URLs of existing file externals are silently rewritten
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None,
+ expected_status)
+
+ # Verify that all file external URLs are descendants of r2_url
+ for e in ['r1-e-1', 'r1-e-2', 'r2-e-1', 'r2-e-2', 'rr-e-1']:
+ actions.run_and_verify_info([{'Repository Root' : r2_url}],
+ os.path.join(sbox.wc_dir, e))
+
+
+def file_external_in_unversioned(sbox):
+ "file external in unversioned dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('svn:externals', '^/A/mu X/mu', 'A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/X/mu' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None, None)
+
+ # At one point this failed with SVN_DEBUG wcng consistency checks enabled
+ svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir)
+
+
+from svntest import verify, actions, main
+
+@Issue(3589, 4000)
+def copy_file_externals(sbox):
+ "a WC->WC copy should exclude file externals"
+
+ # svntest.factory.make(sbox,"""
+ # svn mkdir X
+ # ### manual edit: add '\n ^/A/mu xmu' to externals definition:
+ # svn ps svn:externals "^/iota xiota" X
+ # """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ X = sbox.ospath('X')
+
+ # svn mkdir X
+ expected_stdout = ['A ' + X + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', X)
+
+ # svn ps svn:externals "^/iota xiota" X
+ expected_stdout = ["property 'svn:externals' set on '" + X + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals', '''
+ ^/iota xiota
+ ^/A/mu xmu
+ ''', X)
+
+ # svntest.factory.make(sbox, '''
+ # svn ci
+ # svn up
+ # # have a commit on one of the files
+ # echo mod >> X/xmu
+ # svn ci X/xmu
+ # svn up
+ # # now perform the WC->WC copy
+ # svn cp X X_copy
+ # ### manual edit: add a verify_disk(check_props=True) here
+ # svn ci
+ # ### manual edit: add check_props=True to below update
+ # svn up
+ # ''')
+
+ X = sbox.ospath('X')
+ X_copy = sbox.ospath('X_copy')
+ X_xmu = sbox.ospath('X/xmu')
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(verb='Adding'),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev='2'),
+ })
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/xmu' : Item(status='A '),
+ 'X/xiota' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/xiota' : Item(contents="This is the file 'iota'.\n"),
+ 'X/xmu' : Item(contents="This is the file 'mu'.\n"),
+ })
+
+ expected_status.add({
+ 'X/xiota' : Item(status=' ', wc_rev='2', switched='X'),
+ 'X/xmu' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+ expected_status.tweak(wc_rev='2')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # have a commit on one of the files
+ # echo mod >> X/xmu
+ main.file_append(X_xmu, 'mod\n')
+
+ # svn ci X/xmu
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/xmu' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('X/xmu', wc_rev='3')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], X_xmu)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ })
+
+ expected_disk.tweak('A/mu', 'X/xmu',
+ contents="This is the file 'mu'.\nmod\n")
+
+ expected_status.tweak(wc_rev='3')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # now perform the WC->WC copy
+ # svn cp X X_copy
+ expected_stdout = ['A ' + X_copy + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', X,
+ X_copy)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X_copy' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'X_copy' : Item(status=' ', wc_rev='4'),
+ })
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # verify disk state, also verifying props
+ expected_disk.add({
+ 'X_copy' : Item(),
+ })
+ expected_disk.tweak('X', 'X_copy',
+ props={'svn:externals' : '\n ^/iota xiota\n ^/A/mu xmu\n \n'})
+
+ actions.verify_disk(wc_dir, expected_disk, True)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X_copy/xmu' : Item(status='A '),
+ 'X_copy/xiota' : Item(status='A '),
+ })
+
+ expected_disk.add({
+ 'X_copy/xmu' : Item(contents="This is the file 'mu'.\nmod\n"),
+ 'X_copy/xiota' : Item(contents="This is the file 'iota'.\n"),
+ })
+
+ expected_status.add({
+ 'X_copy/xmu' : Item(status=' ', wc_rev='4', switched='X'),
+ 'X_copy/xiota' : Item(status=' ', wc_rev='4', switched='X'),
+ })
+ expected_status.tweak(wc_rev='4')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status, check_props=True)
+
+def commit_include_externals(sbox):
+ "commit --include-externals"
+ # svntest.factory.make(sbox, """
+ # mkdir Z
+ # echo 'This is the file zeta.' > Z/zeta
+ # svn add Z
+ # svn mkdir --parents Xpegged X/Y
+ # svn ci
+ # svn up
+ # svn ps svn:externals "^/Z xZ" A/D/H
+ # svn ps svn:externals "^/iota@1 Xpegged/xiota" wc_dir
+ # # ^^^ manually set externals to:
+ # # ^/iota@1 Xpegged/xiota
+ # # -r1 ^/A/B/E Xpegged/xE
+ # # ^/A/mu X/xmu
+ # # ^/A/B/lambda X/Y/xlambda
+ # # ^/A/D/G X/xG
+ # # ^/A/D/H X/Y/xH
+ # """)
+ # exit(0)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_D_H = sbox.ospath('A/D/H')
+ X = sbox.ospath('X')
+ X_Y = sbox.ospath('X/Y')
+ Xpegged = sbox.ospath('Xpegged')
+ Z = sbox.ospath('Z')
+ Z_zeta = sbox.ospath('Z/zeta')
+
+ # mkdir Z
+ os.makedirs(Z)
+
+ # echo 'This is the file zeta.' > Z/zeta
+ main.file_write(Z_zeta, 'This is the file zeta.\n')
+
+ # svn add Z
+ expected_stdout = verify.UnorderedOutput([
+ 'A ' + Z + '\n',
+ 'A ' + Z_zeta + '\n',
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'add', Z)
+
+ # svn mkdir --parents Xpegged X/Y
+ expected_stdout = verify.UnorderedOutput([
+ 'A ' + Xpegged + '\n',
+ 'A ' + X + '\n',
+ 'A ' + X_Y + '\n',
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir',
+ '--parents', Xpegged, X_Y)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'Z' : Item(verb='Adding'),
+ 'Z/zeta' : Item(verb='Adding'),
+ 'X' : Item(verb='Adding'),
+ 'X/Y' : Item(verb='Adding'),
+ 'Xpegged' : Item(verb='Adding'),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'Z' : Item(status=' ', wc_rev='2'),
+ 'Z/zeta' : Item(status=' ', wc_rev='2'),
+ 'X' : Item(status=' ', wc_rev='2'),
+ 'X/Y' : Item(status=' ', wc_rev='2'),
+ 'Xpegged' : Item(status=' ', wc_rev='2'),
+ })
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'Z' : Item(),
+ 'Z/zeta' : Item(contents="This is the file zeta.\n"),
+ 'Xpegged' : Item(),
+ 'X' : Item(),
+ 'X/Y' : Item(),
+ })
+
+ expected_status.tweak(wc_rev='2')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # svn ps svn:externals "^/Z xZ" A/D/H
+ expected_stdout = ["property 'svn:externals' set on '" + A_D_H + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals', '^/Z xZ', A_D_H)
+
+ # svn ps svn:externals "^/iota@1 Xpegged/xiota" wc_dir
+ expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals',
+ '''
+ ^/iota@1 Xpegged/xiota
+ -r1 ^/A/B/E Xpegged/xE
+ ^/A/mu X/xmu
+ ^/A/B/lambda X/Y/xlambda
+ ^/A/D/G X/xG
+ ^/A/D/H X/Y/xH
+ ''', wc_dir)
+
+ # svntest.factory.make(sbox, prev_disk=expected_disk,
+ # prev_status=expected_status,
+ # commands = """
+ # svn ci
+ # svn up
+ # echo mod >> Xpegged/xE/alpha
+ # echo mod >> X/xmu
+ # echo mod >> X/Y/xlambda
+ # echo mod >> X/xG/pi
+ # echo mod >> X/Y/xH/chi
+ # echo mod >> X/Y/xH/xZ/zeta
+ # svn status
+ # # Expect no externals to be committed
+ # svn ci
+ # # Expect no externals to be committed, because pegged
+ # svn ci --include-externals Xpegged
+ # # Expect no externals to be committed, because of depth
+ # svn ci --depth=immediates --include-externals
+ # # Expect only unpegged externals to be committed (those in X/)
+ # svn ci --include-externals
+ # # ### Below, manually add:
+ # # expected_status.tweak('A/D/H/xZ', 'Xpegged/xE', 'X/Y/xH', 'X/xG',
+ # # wc_rev=None)
+ # svn up
+ # # new mods to check more cases
+ # echo mod >> X/xmu
+ # echo mod >> X/Y/xlambda
+ # echo mod >> X/xG/pi
+ # echo mod >> X/Y/xH/chi
+ # echo mod >> X/Y/xH/xZ/zeta
+ # svn status
+ # # Expect no externals to be committed, because of depth
+ # svn ci --include-externals --depth=empty X
+ # # Expect only file external xmu to be committed, because of depth
+ # svn ci --include-externals --depth=files X
+ # svn status
+ # # ### Below, manually add:
+ # # expected_status.tweak('A/D/H/xZ', 'Xpegged/xE', 'X/Y/xH', 'X/xG',
+ # # wc_rev=None)
+ # svn up
+ # echo mod >> X/xG/pi
+ # svn status
+ # # Expect explicit targets to be committed
+ # svn ci X/Y/xlambda X/xG
+ # svn status
+ # """)
+
+ X = sbox.ospath('X')
+ X_xG = sbox.ospath('X/xG')
+ X_xG_pi = sbox.ospath('X/xG/pi')
+ X_xmu = sbox.ospath('X/xmu')
+ X_Y_xH_chi = sbox.ospath('X/Y/xH/chi')
+ X_Y_xH_xZ_zeta = sbox.ospath('X/Y/xH/xZ/zeta')
+ X_Y_xlambda = sbox.ospath('X/Y/xlambda')
+ Xpegged = sbox.ospath('Xpegged')
+ Xpegged_xE_alpha = sbox.ospath('Xpegged/xE/alpha')
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(verb='Sending'),
+ 'A/D/H' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('', 'A/D/H', wc_rev='3')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/xmu' : Item(status='A '),
+ 'X/xG/tau' : Item(status='A '),
+ 'X/xG/rho' : Item(status='A '),
+ 'X/xG/pi' : Item(status='A '),
+ 'X/Y/xH' : Item(status=' U'),
+ 'X/Y/xH/psi' : Item(status='A '),
+ 'X/Y/xH/xZ/zeta' : Item(status='A '),
+ 'X/Y/xH/chi' : Item(status='A '),
+ 'X/Y/xH/omega' : Item(status='A '),
+ 'X/Y/xlambda' : Item(status='A '),
+ 'A/D/H/xZ/zeta' : Item(status='A '),
+ 'Xpegged/xiota' : Item(status='A '),
+ 'Xpegged/xE/alpha' : Item(status='A '),
+ 'Xpegged/xE/beta' : Item(status='A '),
+ })
+
+ expected_disk.add({
+ 'Xpegged/xE' : Item(),
+ 'Xpegged/xE/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'Xpegged/xE/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'Xpegged/xiota' : Item(contents="This is the file 'iota'.\n"),
+ 'A/D/H/xZ' : Item(),
+ 'A/D/H/xZ/zeta' : Item(contents="This is the file zeta.\n"),
+ 'X/Y/xlambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'X/Y/xH' : Item(),
+ 'X/Y/xH/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'X/Y/xH/xZ' : Item(),
+ 'X/Y/xH/xZ/zeta' : Item(contents="This is the file zeta.\n"),
+ 'X/Y/xH/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'X/Y/xH/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'X/xmu' : Item(contents="This is the file 'mu'.\n"),
+ 'X/xG' : Item(),
+ 'X/xG/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'X/xG/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'X/xG/pi' : Item(contents="This is the file 'pi'.\n"),
+ })
+
+ expected_status.tweak(wc_rev='3')
+ expected_status.add({
+ 'A/D/H/xZ' : Item(status=' ', prev_status='X ', wc_rev='3'),
+ 'A/D/H/xZ/zeta' : Item(status=' ', wc_rev='3'),
+
+ 'Xpegged/xiota' : Item(status=' ', wc_rev='1', switched='X'),
+ 'Xpegged/xE' : Item(status=' ', prev_status='X ', wc_rev='1'),
+ 'Xpegged/xE/alpha' : Item(status=' ', wc_rev='1'),
+ 'Xpegged/xE/beta' : Item(status=' ', wc_rev='1'),
+
+ 'X/Y/xH' : Item(status=' ', prev_status='X ', wc_rev='3'),
+ 'X/Y/xH/psi' : Item(status=' ', wc_rev='3'),
+ 'X/Y/xH/omega' : Item(status=' ', wc_rev='3'),
+ 'X/Y/xH/chi' : Item(status=' ', wc_rev='3'),
+ 'X/Y/xH/xZ' : Item(status=' ', prev_status='X ', wc_rev='3'),
+ 'X/Y/xH/xZ/zeta' : Item(status=' ', wc_rev='3'),
+
+ 'X/Y/xlambda' : Item(status=' ', wc_rev='3', switched='X'),
+ 'X/xmu' : Item(status=' ', wc_rev='3', switched='X'),
+
+ 'X/xG' : Item(status=' ', prev_status='X ', wc_rev='3'),
+ 'X/xG/rho' : Item(status=' ', wc_rev='3'),
+ 'X/xG/tau' : Item(status=' ', wc_rev='3'),
+ 'X/xG/pi' : Item(status=' ', wc_rev='3'),
+ })
+ expected_status.tweak('Xpegged/xiota', wc_rev='1')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # echo mod >> Xpegged/xE/alpha
+ main.file_append(Xpegged_xE_alpha, 'mod\n')
+
+ # echo mod >> X/xmu
+ main.file_append(X_xmu, 'mod\n')
+
+ # echo mod >> X/Y/xlambda
+ main.file_append(X_Y_xlambda, 'mod\n')
+
+ # echo mod >> X/xG/pi
+ main.file_append(X_xG_pi, 'mod\n')
+
+ # echo mod >> X/Y/xH/chi
+ main.file_append(X_Y_xH_chi, 'mod\n')
+
+ # echo mod >> X/Y/xH/xZ/zeta
+ main.file_append(X_Y_xH_xZ_zeta, 'mod\n')
+
+ # svn status
+ expected_status.tweak('X/Y/xlambda', 'X/xmu', 'X/Y/xH/chi',
+ 'X/Y/xH/xZ/zeta', 'Xpegged/xE/alpha',
+ 'X/xG/pi', status='M ')
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # Expect no externals to be committed
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # Expect no externals to be committed, because pegged
+ # svn ci --include-externals Xpegged
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--include-externals', Xpegged)
+
+ # Expect no externals to be committed, because of depth
+ # svn ci --depth=immediates --include-externals
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--depth=immediates', '--include-externals', wc_dir)
+
+ # Expect only unpegged externals to be committed (those in X/)
+ # svn ci --include-externals
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/xmu' : Item(verb='Sending'),
+ 'X/Y/xlambda' : Item(verb='Sending'),
+ 'X/Y/xH/xZ/zeta' : Item(verb='Sending'),
+ 'X/Y/xH/chi' : Item(verb='Sending'),
+ 'X/xG/pi' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak(status=' ')
+ expected_status.tweak('X/xmu', 'X/Y/xlambda', 'X/Y/xH/xZ/zeta',
+ 'X/Y/xH/chi', 'X/xG/pi', wc_rev='4')
+
+ expected_status.tweak('Xpegged/xE/alpha', status='M ')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--include-externals', wc_dir)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ 'A/D/H/chi' : Item(status='U '),
+ 'A/D/H/xZ/zeta' : Item(status='U '),
+ 'A/D/G/pi' : Item(status='U '),
+ 'A/B/lambda' : Item(status='U '),
+ 'Z/zeta' : Item(status='U '),
+ })
+
+ expected_disk.tweak('Xpegged/xE/alpha',
+ contents="This is the file 'alpha'.\nmod\n")
+ expected_disk.tweak('A/D/H/chi', 'X/Y/xH/chi',
+ contents="This is the file 'chi'.\nmod\n")
+ expected_disk.tweak('A/D/H/xZ/zeta', 'X/Y/xH/xZ/zeta', 'Z/zeta',
+ contents='This is the file zeta.\nmod\n')
+ expected_disk.tweak('A/D/G/pi', 'X/xG/pi',
+ contents="This is the file 'pi'.\nmod\n")
+ expected_disk.tweak('A/mu', 'X/xmu',
+ contents="This is the file 'mu'.\nmod\n")
+ expected_disk.tweak('A/B/lambda', 'X/Y/xlambda',
+ contents="This is the file 'lambda'.\nmod\n")
+
+
+ # Assume everything r4, except what is pegged
+ expected_status.tweak(wc_rev='4')
+ expected_status.tweak('Xpegged/xiota', 'Xpegged/xE', 'Xpegged/xE/alpha',
+ 'Xpegged/xE/beta', wc_rev=1)
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # new mods to check more cases
+ # echo mod >> X/xmu
+ main.file_append(X_xmu, 'mod\n')
+
+ # echo mod >> X/Y/xlambda
+ main.file_append(X_Y_xlambda, 'mod\n')
+
+ # echo mod >> X/xG/pi
+ main.file_append(X_xG_pi, 'mod\n')
+
+ # echo mod >> X/Y/xH/chi
+ main.file_append(X_Y_xH_chi, 'mod\n')
+
+ # echo mod >> X/Y/xH/xZ/zeta
+ main.file_append(X_Y_xH_xZ_zeta, 'mod\n')
+
+ # svn status
+ expected_status.tweak('X/Y/xlambda', 'X/xmu', 'X/xG/pi',
+ 'X/Y/xH/chi', 'X/Y/xH/xZ/zeta', status='M ')
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # Expect no externals to be committed, because of depth
+ # svn ci --include-externals --depth=empty X
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--include-externals', '--depth=empty', X)
+
+ # Expect only file external xmu to be committed, because of depth
+ # svn ci --include-externals --depth=files X
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/xmu' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak(status=' ')
+ expected_status.tweak('X/xmu', wc_rev='5')
+ expected_status.tweak('X/Y/xlambda', 'X/xG/pi', 'X/Y/xH/chi',
+ 'X/Y/xH/xZ/zeta', 'Xpegged/xE/alpha', status='M ')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--include-externals', '--depth=files', X)
+
+ # svn status
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ })
+
+ expected_disk.tweak('A/mu', 'X/xmu',
+ contents="This is the file 'mu'.\nmod\nmod\n")
+ expected_disk.tweak('X/Y/xlambda',
+ contents="This is the file 'lambda'.\nmod\nmod\n")
+ expected_disk.tweak('X/Y/xH/chi',
+ contents="This is the file 'chi'.\nmod\nmod\n")
+ expected_disk.tweak('X/Y/xH/xZ/zeta',
+ contents='This is the file zeta.\nmod\nmod\n')
+ expected_disk.tweak('X/xG/pi',
+ contents="This is the file 'pi'.\nmod\nmod\n")
+
+ expected_status.tweak(wc_rev='5')
+ expected_status.tweak('Xpegged/xiota', wc_rev='1')
+ expected_status.tweak('Xpegged/xiota', 'Xpegged/xE', 'Xpegged/xE/alpha',
+ 'Xpegged/xE/beta', wc_rev=1)
+
+ expected_status.tweak('X/Y/xH/chi', status='M ')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # echo mod >> X/xG/pi
+ main.file_append(X_xG_pi, 'mod\n')
+
+ # svn status
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # Expect explicit targets to be committed
+ # svn ci X/Y/xlambda X/xG
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/Y/xlambda' : Item(verb='Sending'),
+ 'X/xG/pi' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak(status=' ')
+ expected_status.tweak('X/Y/xlambda', 'X/xG/pi', wc_rev='6')
+ expected_status.tweak('X/Y/xH/chi', 'X/Y/xH/xZ/zeta', 'Xpegged/xE/alpha',
+ status='M ')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], X_Y_xlambda, X_xG)
+
+ # svn status
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+
+@Issue(4252)
+def include_immediate_dir_externals(sbox):
+ "commit --include-externals --depth=immediates"
+ # See also comment in append_externals_as_explicit_targets() in
+ # libsvn_client/commit.c, from r1198765.
+
+ # svntest.factory.make(sbox,"""
+ # svn mkdir X
+ # svn ci
+ # svn up
+ # svn ps svn:externals "^/A/B/E X/XE" wc_dir
+ # svn ci
+ # svn up
+ #
+ # svn ps some change X/XE
+ # echo mod >> X/XE/alpha
+ #
+ # svn st X/XE
+ # # Expect only the propset on X/XE to be committed.
+ # # Should be like 'svn commit --include-externals --depth=empty X/XE'.
+ # svn commit --include-externals --depth=immediates X
+ # """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ X = sbox.ospath('X')
+ X_XE = sbox.ospath('X/XE')
+ X_XE_alpha = sbox.ospath('X/XE/alpha')
+
+ # svn mkdir X
+ expected_stdout = ['A ' + X + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', X)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X' : Item(verb='Adding'),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev='2'),
+ })
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ })
+
+ expected_status.tweak(wc_rev='2')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # svn ps svn:externals "^/A/B/E X/XE" wc_dir
+ expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals', '^/A/B/E X/XE', wc_dir)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('', wc_rev='3')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/XE/alpha' : Item(status='A '),
+ 'X/XE/beta' : Item(status='A '),
+ })
+
+ expected_disk.add({
+ 'X/XE' : Item(),
+ 'X/XE/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'X/XE/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+
+ expected_status.tweak(wc_rev='3')
+ expected_status.add({
+ 'X/XE' : Item(status=' ', prev_status='X ', wc_rev='3'),
+ 'X/XE/beta' : Item(status=' ', wc_rev='3'),
+ 'X/XE/alpha' : Item(status=' ', wc_rev='3'),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ sbox.simple_propset('some', 'change', 'X/XE')
+
+ # echo mod >> X/XE/alpha
+ main.file_append(X_XE_alpha, 'mod\n')
+
+ # svn st X/XE
+ expected_status.tweak('X/XE', status=' M')
+ expected_status.tweak('X/XE/alpha', status='M ')
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # Expect only the propset on X/XE to be committed.
+ # Should be like 'svn commit --include-externals --depth=empty X/XE'.
+ # svn commit --include-externals --depth=immediates X
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/XE' : Item(verb='Sending'),
+ })
+ expected_status.tweak('X/XE', status=' ', wc_rev=4)
+
+ # Currently this fails because nothing is committed.
+ #
+ # >svn st
+ # X X\XE
+ #
+ # Performing status on external item at 'X\XE':
+ # M C:\SVN\src-trunk\...\externals_tests-37\X\XE
+ # M C:\SVN\src-trunk\...\externals_tests-37\X\XE\alpha
+ #
+ # >svn ci -m "m" --include-externals --depth immediates X
+ #
+ # >
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], '--include-externals', '--depth=immediates', X)
+
+
+@Issue(4085)
+def shadowing(sbox):
+ "external shadows an existing dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup external: /A/B/F as 'C' child of /A
+ externals_prop = "^/A/B/F C\n"
+ change_external(sbox.ospath('A'), externals_prop, commit=False)
+
+ # An update errors out because the external is shadowed by an existing dir
+ svntest.main.run_svn("W205011: Error handling externals definition for '%s'"
+ % (sbox.wc_dir) + "/A/C", 'update', wc_dir)
+
+ # Remove the shadowed directory to unblock the external
+ svntest.main.run_svn(None, 'rm', sbox.repo_url + '/A/C', '-m', 'remove A/C')
+
+ # The next update should fetch the external and not error out
+ sbox.simple_update()
+
+
+# Test for issue #4093 'remapping a file external can segfault due to
+# "deleted" props'.
+@Issue(4093)
+def remap_file_external_with_prop_del(sbox):
+ "file external remap segfaults due to deleted props"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ mu_path = sbox.ospath('A/mu')
+
+ # Add a property to A/mu
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'propname', 'propval', mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'New property on a file',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Add a new file external A/external pointing to ^/A/mu
+ externals_prop = "^/A/mu external\n"
+ change_external(A_path, externals_prop)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Change A/external to point to ^/iota
+ externals_prop = "^/iota external\n"
+ change_external(A_path, externals_prop)
+
+ # Now update to bring the new external down.
+ # This previously segfaulted as described in
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=4093#desc1
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+
+# Test for issue #4053 'svn:externals with explicit rev checks out HEAD'
+@Issue(4053)
+def dir_external_with_dash_r_only(sbox):
+ "whether '-r1 ^/A B' updates properly"
+ # svntest.factory.make(sbox,"""
+ # echo 'newer alpha' > A/B/E/alpha
+ # svn ci
+ # svn ps svn:externals ' -r1 ^/A/B/E E_ext' .
+ # svn up
+ # # ^ move the 'status.tweak(wc_rev=2)' above the 'add()' call
+ # svn info E_ext
+ # # ^ change the 'svn info' call to
+ # # expected_info = { 'Revision': '1' }
+ # # actions.run_and_verify_info([expected_info], E_ext)
+ # """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A_B_E_alpha = sbox.ospath('A/B/E/alpha')
+ E_ext = sbox.ospath('E_ext')
+
+ # echo 'newer alpha' > A/B/E/alpha
+ main.file_write(A_B_E_alpha, 'newer alpha\n')
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', wc_rev='2')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn ps svn:externals ' -r1 ^/A/B/E E_ext' .
+ expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals', ' -r1 ^/A/B/E E_ext', wc_dir)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'E_ext/beta' : Item(status='A '),
+ 'E_ext/alpha' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'E_ext' : Item(),
+ 'E_ext/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E_ext/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_disk.tweak('A/B/E/alpha', contents='newer alpha\n')
+
+ expected_status.tweak(wc_rev='2')
+ expected_status.tweak('', status=' M')
+ expected_status.add({
+ 'E_ext' : Item(status=' ', prev_status='X ', wc_rev=1),
+ 'E_ext/beta' : Item(status=' ', wc_rev='1'),
+ 'E_ext/alpha' : Item(status=' ', wc_rev='1'),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # svn info E_ext/alpha
+ expected_info = { 'Revision': '1' }
+ actions.run_and_verify_info([expected_info], E_ext)
+
+# Test for issue #4123 'URL-to-WC copy of externals fails on Windows'
+@Issue(4123)
+def url_to_wc_copy_of_externals(sbox):
+ "url-to-wc copy of externals"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create an external A/C/external pointing to ^/A/D/G.
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ 'svn:externals', '^/A/D/G external',
+ sbox.ospath('A/C'))
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'create an external', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Copy ^/A/C to External-WC-to-URL-Copy.
+ #
+ # Previously this failed with:
+ # >svn copy ^^/A/C External-WC-to-URL-Copy
+ # U External-WC-to-URL-Copy
+ #
+ # Fetching external item into 'External-WC-to-URL-Copy\external':
+ # A External-WC-to-URL-Copy\external\pi
+ # A External-WC-to-URL-Copy\external\rho
+ # A External-WC-to-URL-Copy\external\tau
+ # Checked out external at revision 2.
+ #
+ # Checked out revision 2.
+ # ..\..\..\subversion\libsvn_client\copy.c:2249: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_client\copy.c:1857: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_client\copy.c:1857: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_client\copy.c:1737: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_client\copy.c:1737: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_client\copy.c:1537: (apr_err=720005)
+ # ..\..\..\subversion\libsvn_subr\io.c:3416: (apr_err=720005)
+ # svn: E720005: Can't move 'C:\SVN\src-trunk-3\Debug\subversion\tests\
+ # cmdline\svn-test-work\working_copies\externals_tests-41\.svn\tmp\
+ # svn-F9E2C0EC' to 'C:\SVN\src-trunk-3\Debug\subversion\tests\cmdline\
+ # svn-test-work\working_copies\externals_tests-41\External-WC-to-URL-Copy':
+ # Access is denied.
+ external_root_path = sbox.ospath('External-WC-to-URL-Copy')
+ external_ex_path = os.path.join(wc_dir, "External-WC-to-URL-Copy",
+ "external")
+ external_pi_path = os.path.join(wc_dir, "External-WC-to-URL-Copy",
+ "external", "pi")
+ external_rho_path = os.path.join(wc_dir, "External-WC-to-URL-Copy",
+ "external", "rho")
+ external_tau_path = os.path.join(wc_dir, "External-WC-to-URL-Copy",
+ "external", "tau")
+ expected_stdout = verify.UnorderedOutput([
+ " U " + external_root_path + "\n",
+ "\n",
+ "Fetching external item into '" + external_ex_path + "':\n",
+ "A " + external_pi_path + "\n",
+ "A " + external_rho_path + "\n",
+ "A " + external_tau_path + "\n",
+ "Checked out external at revision 2.\n",
+ "\n",
+ "Checked out revision 2.\n",
+ "A " + external_root_path + "\n"
+ ])
+ exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2(
+ expected_stdout, [], 0, 'copy', repo_url + '/A/C',
+ sbox.ospath('External-WC-to-URL-Copy'))
+
+@Issue(4227)
+def duplicate_targets(sbox):
+ "local path appears twice in one svn:external prop"
+
+ if False:
+ svntest.factory.make(sbox, r"""
+ svn ps svn:externals "^/A/B/E barf\n^/A/B/E barf" .
+ svn ps svn:externals "^/A/B/E barf\n^/A/D/G barf" .
+ svn ps svn:externals "^/A/B/E barf/.\n^/A/D/G ./barf" .
+ svn ps svn:externals "^/A/B/E ././barf\n^/A/D/G .//barf" .
+ svn pg svn:externals .
+ svn ps svn:externals "^/A/B/E ok" .
+ svn pg svn:externals .
+ """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ abs_wc_dir = os.path.abspath(sbox.wc_dir)
+
+ expected_stderr = verify.RegexOutput(
+ ".*Invalid svn:externals property on '" + re.escape(abs_wc_dir) +
+ "': target 'barf' appears more than once\n",
+ match_all=False)
+
+ # svn ps svn:externals "^/A/B/E barf\n^/A/B/E barf" .
+ actions.run_and_verify_svn2([], expected_stderr, 1, 'ps',
+ 'svn:externals', '^/A/B/E barf\n^/A/B/E barf', wc_dir)
+
+ # svn ps svn:externals "^/A/B/E barf\n^/A/D/G barf" .
+ actions.run_and_verify_svn2([], expected_stderr, 1, 'ps',
+ 'svn:externals', '^/A/B/E barf\n^/A/D/G barf', wc_dir)
+
+ # svn ps svn:externals "^/A/B/E barf/.\n^/A/D/G ./barf" .
+ actions.run_and_verify_svn2([], expected_stderr, 1, 'ps',
+ 'svn:externals', '^/A/B/E barf/.\n^/A/D/G ./barf', wc_dir)
+
+ # svn ps svn:externals "^/A/B/E ././barf\n^/A/D/G .//barf" .
+ actions.run_and_verify_svn2([], expected_stderr, 1, 'ps',
+ 'svn:externals', '^/A/B/E ././barf\n^/A/D/G .//barf', wc_dir)
+
+ # svn pg svn:externals .
+ expected_stderr = '.*W200017: Property.*not found'
+
+ actions.run_and_verify_svn2([], expected_stderr, 1, 'pg',
+ 'svn:externals', wc_dir)
+
+ # svn ps svn:externals "^/A/B/E ok" .
+ expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'svn:externals', '^/A/B/E ok', wc_dir)
+
+ # svn pg svn:externals .
+ expected_stdout = verify.UnorderedOutput([
+ '^/A/B/E ok\n',
+ '\n'
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'pg',
+ 'svn:externals', wc_dir)
+
+@Issue(4225)
+def list_include_externals(sbox):
+ "list with --include-externals"
+
+ externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ B_path = sbox.ospath("A/B")
+ C_path = sbox.ospath("A/C")
+
+ B_url = repo_url + "/A/B"
+ C_url = repo_url + "/A/C"
+
+ def list_external_string(path, url):
+ string = "Listing external" + " '" + path + "' " + "defined on" + " '" + \
+ url + "'" + ":"
+ return string
+
+ expected_stdout = verify.UnorderedOutput([
+ "E/" + "\n",
+ "F/" + "\n",
+ "lambda" + "\n",
+ list_external_string("gamma", B_url ) + "\n",
+ "gamma" + "\n"])
+
+ exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2(
+ expected_stdout, [], 0, 'ls', '--include-externals', B_path)
+
+ exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2(
+ expected_stdout, [], 0, 'ls', '--include-externals', B_url)
+
+ expected_stdout = verify.UnorderedOutput([
+ list_external_string("exdir_G", C_url)+ "\n",
+ "pi" + "\n",
+ "rho" + "\n",
+ "tau" + "\n",
+ list_external_string("exdir_H", C_url) + "\n",
+ "chi" + "\n",
+ "omega" + "\n",
+ "psi" + "\n"])
+
+ exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2(
+ expected_stdout, [], 0, 'ls', '--include-externals', C_path)
+
+ exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2(
+ expected_stdout, [], 0, 'ls', '--include-externals', C_url)
+
+@Issue(4293)
+@XFail()
+def move_with_file_externals(sbox):
+ "move with file externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ sbox.simple_propset('svn:externals', '^/A/mu@1 mu-1\n', 'A/D')
+ sbox.simple_commit()
+
+ sbox.simple_update()
+ sbox.simple_move('A/D', 'A/D_moved')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+@Issue(4185,4529)
+def pinned_externals(sbox):
+ "pinned external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create X in r2
+ sbox.simple_copy('A', 'X')
+ sbox.simple_mkdir('Z')
+ sbox.simple_commit('')
+
+ repo_X_C = repo_url + '/X/C'
+ repo_X_mu = repo_url + '/X/mu'
+
+ expected_output = verify.RegexOutput(
+ '^ 1 jrandom .* mu$'
+ )
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'list', repo_X_mu, '-v')
+
+ # So, we copied A/mu to X/mu in r2, but its last changed revision is
+ # still r1. It existed as A/mu at r1.
+
+ # In the old format the -r is interpreted like an @1 on checkout.
+
+ sbox.simple_propset('svn:externals',
+ 'old-plain ' + repo_X_mu + '\n' +
+ 'old-rev -r 1 ' + repo_X_mu + '\n' +
+ repo_X_mu + ' new-plain\n' +
+ '-r1 ' + repo_X_mu + ' new-rev\n' +
+ repo_X_mu + '@1 new-peg\n'
+ '-r1 ' + repo_X_C + ' new-dir-rev\n',
+ 'Z')
+
+ expected_error = "svn: E205011: Failure.*externals"
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ # The interesting values
+ 'Z/old-plain' : Item(contents="This is the file 'mu'.\n"),
+ 'Z/new-plain' : Item(contents="This is the file 'mu'.\n"),
+ 'Z/new-rev' : Item(contents="This is the file 'mu'.\n"),
+ 'Z/new-dir-rev' : Item(),
+
+ # And verifying X
+ 'X/D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'X/D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'X/D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'X/D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'X/D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'X/D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'X/D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'X/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'X/B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'X/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'X/B/F' : Item(),
+ 'X/C' : Item(),
+ 'X/mu' : Item(contents="This is the file 'mu'.\n"),
+ })
+
+
+ # ### Would be nice if verify update would still verify the result
+ # on exiting with an error. Why would you pass it?
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None,
+ expected_error)
+
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+# Test for issue #3741 'externals not removed when working copy is made shallow'
+@Issue(3741)
+def update_dir_external_shallow(sbox):
+ "shallow update should remove externals"
+
+ sbox.build()
+
+ # Create an external in r2
+ sbox.simple_propset('svn:externals', '^/A/D/H X', 'A/B/E')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Now make A/B/E shallow by updating with "--set-depth empty"
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E/alpha' : Item(status='D '),
+ 'A/B/E/X' : Item(verb='Removed external'),
+ 'A/B/E/beta' : Item(status='D '),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '--set-depth=empty',
+ sbox.ospath('A/B/E'))
+
+ # And bring the external back by updating with "--set-depth infinity"
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E/X/psi' : Item(status='A '),
+ 'A/B/E/X/chi' : Item(status='A '),
+ 'A/B/E/X/omega' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '--set-depth=infinity',
+ sbox.ospath('A/B/E'))
+
+@Issue(4411)
+def switch_parent_relative_file_external(sbox):
+ "switch parent-relative file external"
+
+ sbox.build()
+
+ # Create a parent-relative file external in r2
+ sbox.simple_propset('svn:externals', '../D/gamma gamma-ext', 'A/B')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create a branch that contains the file external
+ sbox.simple_copy('A', 'A_copy')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Check out A/B_copy to a new working copy
+ branch_wc = sbox.add_wc_path("branch")
+ branch_url = sbox.repo_url + '/A_copy'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout', branch_url,
+ branch_wc)
+
+ # Rename the branch
+ sbox.simple_move('A_copy', 'A_copy2')
+ sbox.simple_commit()
+
+ # Switch the branch working copy to the new branch URL
+ new_branch_url = sbox.repo_url + '/A_copy2'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', new_branch_url,
+ branch_wc)
+
+ # Bug: The branch working copy can no longer be updated.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', branch_wc)
+
+@Issue(4420)
+def file_external_unversioned_obstruction(sbox):
+ """file externals unversioned obstruction"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_output = verify.RegexOutput(b'r2 committed .*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-U', sbox.repo_url, '-m', 'r2: set external',
+ 'propset', 'svn:externals', '^/A/mu mu-ext', 'A')
+
+ sbox.simple_append('A/mu-ext', 'unversioned obstruction')
+
+ # Update reports a tree-conflict but status doesn't show any such
+ # conflict. I'm no sure whether this is correct.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/mu-ext' : Item('unversioned obstruction'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*svn: warning: W155014: The file external '.*mu-ext'"
+ " can not be created because the node exists.*",
+ 'up', wc_dir)
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ os.remove(sbox.ospath('A/mu-ext'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu-ext' : Item(status='A '),
+ })
+ expected_status.add({
+ 'A/mu-ext' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+ expected_disk.tweak('A/mu-ext', contents="This is the file 'mu'.\n")
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk,
+ expected_status)
+
+@Issue(4001)
+@XFail()
+def file_external_versioned_obstruction(sbox):
+ """file externals versioned obstruction"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_output = verify.RegexOutput('r2 committed .*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-U', sbox.repo_url, '-m', 'r2: set external',
+ 'propset', 'svn:externals', '^/A/mu mu-ext', 'A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'A/mu-ext' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/mu-ext' : Item('This is the file \'mu\'.\n'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/mu-ext' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk,
+ expected_status)
+
+ # Update skips adding the versioned node because of the file
+ # external obstruction then when the external is deleted the
+ # versioned node is missing from disk and wc.db. Not really sure
+ # what should happen, perhaps a not-present node?
+ expected_output = verify.RegexOutput('r3 committed .*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-U', sbox.repo_url, '-m', 'r3: copy file',
+ 'cp', 'head', 'A/mu', 'A/mu-ext',
+ 'propdel', 'svn:externals', 'A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'A/mu-ext' : Item(verb='Removed external', prev_verb='Skipped'),
+ })
+ expected_disk.tweak('A/mu-ext', content='This is the file \'mu\'.\n')
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('A/mu-ext', switched=None)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk,
+ expected_status)
+
+@Issue(4495)
+def update_external_peg_rev(sbox):
+ "update external peg rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_rm('A/B/E/alpha')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ sbox.simple_propset('svn:externals', '^/A/B/E@1 xE', 'A/B/F')
+ sbox.simple_commit()
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/xE/alpha' : Item(status='A '),
+ 'A/B/F/xE/beta' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha')
+ expected_disk.add({
+ 'A/B/F/xE' : Item(),
+ 'A/B/F/xE/alpha' : Item('This is the file \'alpha\'.\n'),
+ 'A/B/F/xE/beta' : Item('This is the file \'beta\'.\n'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.add({
+ 'A/B/F/xE' : Item(status=' ', wc_rev='1', prev_status='X '),
+ 'A/B/F/xE/alpha' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/xE/beta' : Item(status=' ', wc_rev='1'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ sbox.simple_propset('svn:externals', '^/A/B/E@2 xE', 'A/B/F')
+ sbox.simple_commit()
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/xE/alpha' : Item(status='D '),
+ })
+ expected_disk.remove('A/B/F/xE/alpha')
+ expected_status.remove('A/B/F/xE/alpha')
+ expected_status.tweak(wc_rev=4)
+ expected_status.tweak('A/B/F/xE', 'A/B/F/xE/beta', wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # XFAIL: EXTERNALS.def_revision and EXTERNALS.def_operational_revision
+ # are still r1 for 'A/B/F/xE' so status is not against the expected r2.
+ # No testsuite support for ood marker so examine status output manually.
+ expected_output = [
+ "X %s\n" % sbox.ospath('A/B/F/xE'),
+ "Status against revision: 4\n",
+ "\n",
+ "Performing status on external item at '%s':\n" % sbox.ospath('A/B/F/xE'),
+ "Status against revision: 2\n",
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'status', '-u', sbox.wc_dir)
+
+def update_deletes_file_external(sbox):
+ "update deletes a file external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('svn:externals', '../D/gamma gamma', 'A/C')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create a branch
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ '-m', 'create branch',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A_copy')
+
+ # Update the working copy
+ sbox.simple_update()
+
+ # Remove the branch
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm',
+ '-m', 'remove branch',
+ sbox.repo_url + '/A_copy')
+
+ # As of r1448345, this update fails:
+ # E000002: Can't remove directory '.../A_copy/C': No such file or directory
+ sbox.simple_update()
+
+
+@Issue(4519)
+def switch_relative_externals(sbox):
+ "switch relative externals"
+
+ sbox.build(create_wc=False)
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url, '-m', 'Q',
+ 'mkdir', 'branches',
+ 'cp', '1', 'A', 'trunk',
+ 'cp', '1', 'A', 'branches/A',
+ 'propset', 'svn:externals',
+ '../C dirExC\n ../mu fileExMu',
+ 'trunk/B',
+ 'propset', 'svn:externals',
+ '../C dirExC\n ../mu fileExMu',
+ 'branches/A/B')
+
+ wc = sbox.add_wc_path('wc')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', sbox.repo_url + '/trunk', wc)
+
+ # This forgets to update some externals data
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', sbox.repo_url + '/branches/A', wc)
+
+ # This upgrade makes the following update fail
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', wc)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc)
+
+
+def copy_file_external_to_repo(sbox):
+ "explicitly copy file external to repo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ change_external(sbox.ospath('A'), '^/A/mu ext')
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ '--message', 'external copy',
+ sbox.ospath('A/ext'),
+ sbox.repo_url + '/ext_copy')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'ext_copy' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/ext' : Item('This is the file \'mu\'.\n'),
+ 'ext_copy' : Item('This is the file \'mu\'.\n'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk, None)
+
+@Issue(4550)
+def replace_tree_with_foreign_external(sbox):
+ "replace tree with foreign external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1)
+
+ sbox.simple_propset('svn:externals', other_repo_url + '/A/B X', 'A')
+ sbox.simple_commit()
+ sbox.simple_propdel('svn:externals', 'A')
+ sbox.simple_mkdir('A/X')
+ sbox.simple_mkdir('A/X/E')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/X' : Item(status='D '),
+ 'A' : Item(status=' U'),
+ 'A/X/lambda' : Item(status='A '),
+ 'A/X/E' : Item(status='A '),
+ 'A/X/E/alpha' : Item(status='A '),
+ 'A/X/E/beta' : Item(status='A '),
+ 'A/X/F' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/X' : Item(status=' ', wc_rev=1, prev_status='X '),
+ 'A/X/E' : Item(status=' ', wc_rev=1, prev_status=' '),
+ 'A/X/E/alpha' : Item(status=' ', wc_rev=1),
+ 'A/X/E/beta' : Item(status=' ', wc_rev=1),
+ 'A/X/F' : Item(status=' ', wc_rev=1),
+ 'A/X/lambda' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, expected_status,
+ [], True,
+ '-r', '2', wc_dir)
+
+
+def verify_pinned_externals(sbox, external_url_for, base_path_or_url,
+ external_youngest_rev, other_external_youngest_rev):
+ "helper for pin-externals tests"
+
+ expected_output = [
+ '%s@%d gamma\n' % (external_url_for["A/B/gamma"],
+ external_youngest_rev),
+ '\n',
+ ]
+ if svntest.sandbox.is_url(base_path_or_url):
+ target = base_path_or_url + '/A_copy/B'
+ else:
+ target = sbox.ospath('A_copy/B')
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:externals',
+ target)
+ expected_output = [
+ 'exdir_G -r%d %s\n' % (other_external_youngest_rev,
+ external_url_for["A/C/exdir_G"]),
+ '%s exdir_H\n' % external_url_for["A/C/exdir_H"],
+ '\n',
+ ]
+ if svntest.sandbox.is_url(base_path_or_url):
+ target = base_path_or_url + '/A_copy/C'
+ else:
+ target = sbox.ospath('A_copy/C')
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:externals',
+ target)
+ expected_output = [
+ '%s@%d exdir_A\n' % (external_url_for["A/D/exdir_A"],
+ other_external_youngest_rev),
+ '%s@%d exdir_A/G\n' % (external_url_for["A/D/exdir_A/G/"],
+ other_external_youngest_rev),
+ 'exdir_A/H -r1 %s\n' % external_url_for["A/D/exdir_A/H"],
+ '%s@%d x/y/z/blah\n' % (external_url_for["A/D/x/y/z/blah"],
+ other_external_youngest_rev),
+ '\n',
+ ]
+ if svntest.sandbox.is_url(base_path_or_url):
+ target = base_path_or_url + '/A_copy/D'
+ else:
+ target = sbox.ospath('A_copy/D')
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:externals',
+ target)
+
+
+def copy_pin_externals_repos_repos(sbox):
+ "svn copy --pin-externals repos->repos"
+
+ external_url_for = externals_test_setup(sbox)
+
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_dir = repo_dir + ".other"
+
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+
+ # Perform a repos->repos copy, pinning externals
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ repo_url + '/A',
+ repo_url + '/A_copy',
+ '-m', 'copy',
+ '--pin-externals')
+ verify_pinned_externals(sbox, external_url_for, repo_url,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_repos_wc(sbox):
+ "svn copy --pin-externals repos->wc"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_dir = repo_dir + ".other"
+
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Perform a repos->wc copy, pinning externals
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ repo_url + '/A',
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+ verify_pinned_externals(sbox, external_url_for, wc_dir,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_wc_repos(sbox):
+ "svn copy --pin-externals wc->repos"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_dir = repo_dir + ".other"
+
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Perform a wc->repos copy, pinning externals
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ repo_url + '/A_copy',
+ '-m', 'copy',
+ '--pin-externals')
+ verify_pinned_externals(sbox, external_url_for, repo_url,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_wc_wc(sbox):
+ "svn copy --pin-externals wc->wc"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_dir = repo_dir + ".other"
+
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Perform a wc->wc copy, pinning externals
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+ verify_pinned_externals(sbox, external_url_for, wc_dir,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_moved_external(sbox):
+ "pin externals which were moved since last changed"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_dir = repo_dir + ".other"
+
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Test behaviour for external URLs which were moved since
+ # their last-changed revision.
+ sbox.simple_move('A/D/gamma', 'A/D/gamma-moved')
+ sbox.simple_commit()
+ change_external(sbox.ospath('A/B'), '^/A/D/gamma-moved gamma', commit=True)
+ sbox.simple_update()
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+ external_url_for["A/B/gamma"] = '^/A/D/gamma-moved'
+ verify_pinned_externals(sbox, external_url_for, wc_dir,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_removed_in_head(sbox):
+ "already pinned external which was removed in HEAD"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_url = repo_url + ".other"
+ other_repo_dir = repo_dir + ".other"
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Test an already pinned external which was removed in HEAD.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm',
+ other_repo_url + '/A/D/H',
+ '-m', 'remove A/D/H')
+ sbox.simple_update()
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+ verify_pinned_externals(sbox, external_url_for, wc_dir,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_from_old_rev(sbox):
+ "copy from an old revision with pinning"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ repo_dir = sbox.repo_dir
+ other_repo_url = repo_url + ".other"
+ other_repo_dir = repo_dir + ".other"
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+ # Create a couple of revisions affecting 'A'.
+ for i in range(5):
+ svntest.main.file_append(sbox.ospath('A/mu'), 'a new line')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Test a copy from an old revision with pinning.
+ external_youngest_rev = svntest.main.youngest(repo_dir)
+ other_external_youngest_rev = svntest.main.youngest(other_repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ os.path.join(wc_dir, 'A@6'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+ external_url_for["A/B/gamma"] = '^/A/D/gamma'
+ verify_pinned_externals(sbox, external_url_for, wc_dir,
+ external_youngest_rev, other_external_youngest_rev)
+
+
+def copy_pin_externals_wc_local_mods(sbox):
+ "cannot pin WC externals with local mods"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.main.file_append(sbox.ospath('A/C/exdir_G/pi'), 'this file changed')
+ expected_stderr = verify.RegexOutput(".*Cannot pin.*local modifications.*",
+ match_all=False)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+
+
+def copy_pin_externals_wc_switched_subtrees(sbox):
+ "cannot pin WC externals with switched subtrees"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', '--ignore-ancestry', '^/A/B',
+ sbox.ospath('A/D/exdir_A/C'))
+ expected_stderr = verify.RegexOutput(".*Cannot pin.*switched subtree.*",
+ match_all=False)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+
+
+def copy_pin_externals_wc_mixed_revisions(sbox):
+ "cannot pin WC externals with mixed revisions"
+
+ external_url_for = externals_test_setup(sbox)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create a working copy.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '-r1',
+ sbox.ospath('A/D/exdir_A/mu'))
+ expected_stderr = verify.RegexOutput(".*Cannot pin.*mixed-revision.*",
+ match_all=False)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'copy',
+ os.path.join(wc_dir, 'A'),
+ os.path.join(wc_dir, 'A_copy'),
+ '--pin-externals')
+
+@Issue(4558)
+def copy_pin_externals_whitespace_dir(sbox):
+ "copy --pin-externals with whitespace dir"
+
+ sbox.build(empty=True)
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+ ss_path = repo_url[repo_url.find('//'):]
+
+ extdef = sbox.get_tempname('extdef')
+ info = sbox.get_tempname('info')
+
+ open(extdef, 'w').write(
+ '"' + ss_path +'/deps/sqlite" ext/sqlite\n' +
+ '"^/deps/A P R" \'ext/A P R\'\n' +
+ '^/deps/B\ D\ B\' ext/B\ D\ B\'\n' +
+ repo_url + '/deps/wors%23+t ext/wors#+t')
+ open(info, 'w').write('info\n')
+
+ svntest.actions.run_and_verify_svnmucc(None, [], '-U', repo_url,
+ 'mkdir', 'trunk',
+ 'mkdir', 'branches',
+ 'mkdir', 'deps',
+ 'mkdir', 'deps/sqlite',
+ 'put', info, 'deps/sqlite/readme',
+ 'mkdir', 'deps/A P R',
+ 'put', info, 'deps/A P R/about',
+ 'mkdir', 'deps/B D B\'',
+ 'put', info, 'deps/B D B\'/copying',
+ 'mkdir', 'deps/wors#+t',
+ 'put', info, 'deps/wors#+t/brood',
+ 'propsetf', 'svn:externals', extdef,
+ 'trunk',
+ '-mm'
+ )
+
+ svntest.actions.run_and_verify_svn(None, [], 'update', sbox.ospath('trunk'),
+ '--ignore-externals')
+ sbox.simple_update('branches')
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='0'),
+ 'trunk' : Item(status=' ', wc_rev='1'),
+ 'branches' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ trunk_url = repo_url + '/trunk'
+ branches_url = repo_url + '/branches'
+ trunk_wc = sbox.ospath('trunk')
+
+ # Create a new revision to creat interesting pinning revisions
+ sbox.simple_propset('A', 'B', 'trunk')
+ sbox.simple_commit('trunk')
+
+ # And let's copy/pin
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--pin-externals',
+ trunk_url, branches_url + '/url-url', '-mm')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--pin-externals',
+ trunk_url, sbox.ospath('branches/url-wc'))
+ sbox.simple_commit('branches/url-wc')
+
+ # Now try to copy without externals in the WC
+ expected_err = '.*E155035: Cannot pin external.*'
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'copy', '--pin-externals',
+ trunk_wc, branches_url + '/wc-url', '-mm')
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'copy', '--pin-externals',
+ trunk_wc, sbox.ospath('branches/wc-wc'))
+
+ # Bring in the externals on trunk
+ svntest.actions.run_and_verify_svn(None, [], 'update', sbox.ospath('trunk'))
+ expected_status = svntest.wc.State(wc_dir, {
+ 'trunk' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext' : Item(status='X '),
+ 'trunk/ext/sqlite' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/sqlite/readme' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/A P R' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/A P R/about' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/B D B\'' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/wors#+t' : Item(status=' ', wc_rev='4'),
+ 'trunk/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'),
+ })
+ svntest.actions.run_and_verify_status(sbox.ospath('trunk'), expected_status)
+
+ # And copy again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--pin-externals',
+ trunk_wc, branches_url + '/wc-url', '-mm')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--pin-externals',
+ trunk_wc, sbox.ospath('branches/wc-wc'))
+ sbox.simple_commit('branches/wc-wc')
+
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branches/url-url' : Item(status='A '),
+ 'branches/url-url/ext/A P R/about' : Item(status='A '),
+ 'branches/url-url/ext/B D B\'/copying' : Item(status='A '),
+ 'branches/url-url/ext/wors#+t/brood' : Item(status='A '),
+ 'branches/url-url/ext/sqlite/readme' : Item(status='A '),
+
+ # url-wc is already up to date
+
+ 'branches/wc-url' : Item(status='A '),
+ 'branches/wc-url/ext/wors#+t/brood' : Item(status='A '),
+ 'branches/wc-url/ext/sqlite/readme' : Item(status='A '),
+ 'branches/wc-url/ext/B D B\'/copying' : Item(status='A '),
+ 'branches/wc-url/ext/A P R/about' : Item(status='A '),
+
+ ## branches/wc-wc should checkout its externals here
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ 'branches' : Item(status=' ', wc_rev='6'),
+
+ 'branches/url-url' : Item(status=' ', wc_rev='6'),
+ 'branches/url-url/ext' : Item(status='X '),
+ 'branches/url-url/ext/A P R' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/A P R/about' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/sqlite' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/sqlite/readme' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/wors#+t' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/wors#+t/brood' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/B D B\'' : Item(status=' ', wc_rev='2'),
+ 'branches/url-url/ext/B D B\'/copying' : Item(status=' ', wc_rev='2'),
+
+ 'branches/url-wc' : Item(status=' ', wc_rev='6'),
+ 'branches/url-wc/ext' : Item(status='X '),
+ 'branches/url-wc/ext/wors#+t' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/wors#+t/brood' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/B D B\'' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/B D B\'/copying' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/sqlite' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/sqlite/readme' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/A P R' : Item(status=' ', wc_rev='3'),
+ 'branches/url-wc/ext/A P R/about' : Item(status=' ', wc_rev='3'),
+
+ 'branches/wc-url' : Item(status=' ', wc_rev='6'),
+ 'branches/wc-url/ext' : Item(status='X '),
+ 'branches/wc-url/ext/wors#+t' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/sqlite' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/sqlite/readme' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/B D B\'' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/A P R' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-url/ext/A P R/about' : Item(status=' ', wc_rev='4'),
+
+ 'branches/wc-wc' : Item(status=' ', wc_rev='6'),
+ 'branches/wc-wc/ext' : Item(status='X '),
+ 'branches/wc-wc/ext/wors#+t' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/sqlite' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/sqlite/readme' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/B D B\'' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/A P R' : Item(status=' ', wc_rev='4'),
+ 'branches/wc-wc/ext/A P R/about' : Item(status=' ', wc_rev='4'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir + '/branches', expected_output,
+ None, expected_status)
+
+ # Now let's use our existing setup to perform some copies with dynamic
+ # destinations
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--parents', '--pin-externals',
+ repo_url + '/branches/wc-url',
+ repo_url + '/branches/url-url',
+ trunk_url,
+ branches_url + '/3x-url-url',
+ '-mm')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--parents', '--pin-externals',
+ repo_url + '/branches/wc-url',
+ repo_url + '/branches/url-url',
+ trunk_url,
+ sbox.ospath('branches/3x-url-wc'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--parents', '--pin-externals',
+ sbox.ospath('branches/wc-url'),
+ sbox.ospath('branches/url-url'),
+ sbox.ospath('trunk'),
+ branches_url + '/3x-wc-url',
+ '-mm')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '--parents', '--pin-externals',
+ sbox.ospath('branches/wc-url'),
+ sbox.ospath('branches/url-url'),
+ sbox.ospath('trunk'),
+ sbox.ospath('branches/3x-wc-wc'))
+
+def nested_notification(sbox):
+ "notification for nested externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ sbox.simple_mkdir('D1')
+ sbox.simple_mkdir('D2')
+ sbox.simple_mkdir('D3')
+ sbox.simple_mkdir('D4')
+ sbox.simple_propset('svn:externals', '^/D2 X', 'D1')
+ sbox.simple_propset('svn:externals', '^/D3 X', 'D2')
+ sbox.simple_propset('svn:externals', '^/D4 X', 'D3')
+ sbox.simple_commit()
+ expected_output = [
+ 'Updating \'' + sbox.ospath('D1') + '\':\n',
+ '\n',
+ 'Fetching external item into \'' + sbox.ospath('D1/X') + '\':\n',
+ ' U ' + sbox.ospath('D1/X') + '\n',
+ '\n',
+ 'Fetching external item into \'' + sbox.ospath('D1/X/X') + '\':\n',
+ ' U ' + sbox.ospath('D1/X/X') + '\n',
+ '\n',
+ 'Fetching external item into \'' + sbox.ospath('D1/X/X/X') + '\':\n',
+ 'Updated external to revision 2.\n',
+ '\n',
+ 'External at revision 2.\n',
+ '\n',
+ 'External at revision 2.\n',
+ '\n',
+ 'At revision 2.\n'
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'update', sbox.ospath('D1'))
+
+def file_external_to_normal_file(sbox):
+ "change a file external to a normal file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_propset('svn:externals', '^/iota iota', 'A')
+ sbox.simple_commit()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/iota' : Item(status=' ', wc_rev='2', switched='X'),
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/iota' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+ # Create second working copy in this state
+ sbox2 = sbox.clone_dependent(copy_wc=True)
+
+ sbox.simple_propdel('svn:externals', 'A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/iota' : Item(verb='Removed external'),
+ })
+ expected_status.remove('A/iota')
+ expected_status.tweak('A', status=' M')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+ sbox.simple_copy('iota', 'A/iota')
+ sbox.simple_commit()
+
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('A', status=' ')
+ expected_status.add({
+ # This case used to triggered a switched status in 1.8.x before this
+ # test (and the fix for this problem) where added.
+ 'A/iota' : Item(status=' ', wc_rev='3'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+
+ wc_dir = sbox2.wc_dir
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'A/iota' : Item(verb='Removed external', prev_verb='Skipped'),
+ })
+ # This reports an obstruction and removes the file external
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+ expected_status.add({
+ 'A/iota' : Item(status=' ', wc_rev='3'),
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/iota' : Item(status='A '),
+ })
+ # This should bring in the new file
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+@Issue(4580)
+def file_external_recorded_info(sbox):
+ "check file external recorded info"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - Create file external
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', '',
+ 'propset', 'svn:externals',
+ '^/iota i', '')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ 'i' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'i' : Item(status=' ', wc_rev='2', switched='X')
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status, [], False,
+ '-r', 2, wc_dir)
+
+ expected_infos = [{
+ 'Path': re.escape(sbox.ospath('i')),
+ 'Relative URL': re.escape('^/iota'),
+ 'Revision': '2',
+ 'Last Changed Rev': '1',
+ 'Last Changed Author': 'jrandom'
+ }]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i'))
+
+ # r3 - No-op change
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', '',
+ 'cp', '1', 'iota', 'iotb')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iotb' : Item(status='A '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'i' : Item(status=' ', wc_rev='3', switched='X'),
+ 'iotb' : Item(status=' ', wc_rev='3')
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status, [], False,
+ '-r', 3, wc_dir)
+
+ expected_infos = [{
+ 'Path': re.escape(sbox.ospath('i')),
+ 'Relative URL': re.escape('^/iota'),
+ 'Revision': '3',
+ 'Last Changed Rev': '1',
+ 'Last Changed Author': 'jrandom'
+ }]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i'))
+
+ # r4 - Update url
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', '',
+ 'propset', 'svn:externals',
+ '^/iotb i', '')
+
+
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.add({
+ 'i' : Item(status=' ', wc_rev='4', switched='X'),
+ 'iotb' : Item(status=' ', wc_rev='4')
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status, [], False,
+ '-r', 4, wc_dir)
+
+ expected_infos = [{
+ 'Path': re.escape(sbox.ospath('i')),
+ 'Relative URL': re.escape('^/iotb'),
+ 'Revision': '4',
+ 'Last Changed Rev': '3',
+ 'Last Changed Author': 'jrandom'
+ }]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i'))
+
+ # r5 - Replace file
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', '',
+ 'rm', 'iotb',
+ 'cp', '3', 'A/mu', 'iotb')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'i' : Item(status='U '),
+ 'iotb' : Item(status='A ', prev_status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 5)
+ expected_status.add({
+ 'i' : Item(status=' ', wc_rev='5', switched='X'),
+ 'iotb' : Item(status=' ', wc_rev='5')
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status, [], False,
+ '-r', 5, wc_dir)
+
+ expected_infos = [{
+ 'Path': re.escape(sbox.ospath('i')),
+ 'Relative URL': re.escape('^/iotb'),
+ 'Revision': '5',
+ 'Last Changed Rev': '5',
+ 'Last Changed Author': 'jrandom'
+ }]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i'))
+
+ # Back to r2. But with a conflict
+ sbox.simple_append('i', 'i')
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ 'iotb' : Item(status='D '),
+ 'i' : Item(status='C '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'i' : Item(status='C ', wc_rev='5', switched='X'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status, [], False,
+ '-r', 2, wc_dir)
+
+ expected_infos = [{
+ 'Path': re.escape(sbox.ospath('i')),
+ 'Relative URL': re.escape('^/iota'),
+ 'Revision': '5',
+ 'Last Changed Rev': '1',
+ 'Last Changed Author': 'jrandom',
+ 'Conflict Details': re.escape('incoming file edit upon switch'
+ ' Source left: (file) ^/iotb@5'
+ ' Source right: (file) ^/iota@5')
+ }]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i'))
+
+def external_externally_removed(sbox):
+ "external externally removed"
+
+ sbox.build(read_only = True)
+
+ sbox.simple_propset('svn:externals', '^/A/B B', '')
+
+ # Try fetching the external with a versioned obstruction
+ sbox.simple_mkdir('B')
+ expected_err = ".*W155035: The external.*B' is already a versioned path"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'up', sbox.wc_dir)
+ sbox.simple_rm('B')
+
+
+ os.makedirs(sbox.ospath('B'))
+ expected_err2 = "svn: warning: W155007:.*B'"
+ svntest.actions.run_and_verify_svn(None, expected_err2,
+ 'up', sbox.wc_dir)
+ os.rmdir(sbox.ospath('B'))
+
+ # Fetch the external
+ sbox.simple_update()
+
+ svntest.main.safe_rmtree(sbox.ospath('B'))
+ sbox.simple_update() # Fetched again
+ if not os.path.isdir(sbox.ospath('B')):
+ raise svntest.Failure("B not recreated")
+
+ svntest.main.safe_rmtree(sbox.ospath('B'))
+ sbox.simple_propdel('svn:externals', '')
+
+ expected_output = [
+ "Updating '%s':\n" % sbox.wc_dir,
+ "Removed external '%s'\n" % sbox.ospath('B'),
+ "Updated to revision 1.\n"
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'up', sbox.wc_dir)
+
+
+ sbox.simple_propset('svn:externals', '^/A/B B', '')
+ sbox.simple_update()
+ svntest.main.safe_rmtree(sbox.ospath('B'))
+ sbox.simple_mkdir('B')
+
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'up', sbox.wc_dir)
+
+ sbox.simple_propdel('svn:externals', '')
+ sbox.simple_update() # Should succeed
+
+def invalid_uris_in_repo(sbox):
+ "invalid URIs in repo"
+
+ sbox.build(empty=True),
+
+ # Using a dump file because the client may not allow adding invalid URIs.
+ svntest.actions.load_repo(sbox,
+ os.path.join(os.path.dirname(sys.argv[0]),
+ 'externals_tests_data',
+ 'invalid_uris_in_repo.dump'),
+ create_wc=False)
+
+ # 'foo://host:-/D X'
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_disk = svntest.wc.State('', {
+ })
+ expected_error = ".*warning: W205011: Error handling externals definition.*"
+
+ # A repository might have invalid URIs and the client used to SEGV.
+ # r1 has 'foo://host:-/D X'
+ # r2 has 'foo://host::/D X'
+ # r3 has 'foo://host:123xx/D X'
+ # r4 has 'foo://host:123:123/D X'
+ for revision in range(1,4):
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_error,
+ "-r", revision)
+ svntest.main.safe_rmtree(sbox.wc_dir)
+
+# Like issue #3741 'externals not removed when working copy is made shallow'
+# but with --set-depth=exclude instead of --set-depth=empty.
+def update_dir_external_exclude(sbox):
+ "exclude update should remove externals"
+
+ sbox.build()
+
+ # Create an external in r2
+ sbox.simple_propset('svn:externals', '^/A/D/H X', 'A/B/E')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Now make A/B/E shallow by updating with "--set-depth exclude"
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/E/X' : Item(verb='Removed external'),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '--set-depth=exclude',
+ sbox.ospath('A/B/E'))
+
+ # And bring the external back by updating with "--set-depth infinity"
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/E/X/chi' : Item(status='A '),
+ 'A/B/E/X/omega' : Item(status='A '),
+ 'A/B/E/X/psi' : Item(status='A '),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '--set-depth=infinity',
+ sbox.ospath('A/B/E'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ checkout_with_externals,
+ update_receive_new_external,
+ update_lose_external,
+ update_change_pristine_external,
+ update_change_modified_external,
+ update_receive_change_under_external,
+ modify_and_update_receive_new_external,
+ disallow_dot_or_dotdot_directory_reference,
+ export_with_externals,
+ export_wc_with_externals,
+ external_with_peg_and_op_revision,
+ new_style_externals,
+ disallow_propset_invalid_formatted_externals,
+ old_style_externals_ignore_peg_reg,
+ cannot_move_or_remove_file_externals,
+ cant_place_file_external_into_dir_external,
+ external_into_path_with_spaces,
+ binary_file_externals,
+ update_lose_file_external,
+ switch_relative_external,
+ export_sparse_wc_with_externals,
+ relegate_external,
+ wc_repos_file_externals,
+ merge_target_with_externals,
+ update_modify_file_external,
+ update_external_on_locally_added_dir,
+ switch_external_on_locally_added_dir,
+ file_external_in_sibling,
+ file_external_update_without_commit,
+ incoming_file_on_file_external,
+ incoming_file_external_on_file,
+ exclude_externals,
+ file_externals_different_url,
+ file_external_in_unversioned,
+ copy_file_externals,
+ commit_include_externals,
+ include_immediate_dir_externals,
+ shadowing,
+ remap_file_external_with_prop_del,
+ dir_external_with_dash_r_only,
+ url_to_wc_copy_of_externals,
+ duplicate_targets,
+ list_include_externals,
+ move_with_file_externals,
+ pinned_externals,
+ update_dir_external_shallow,
+ switch_parent_relative_file_external,
+ file_external_unversioned_obstruction,
+ file_external_versioned_obstruction,
+ update_external_peg_rev,
+ update_deletes_file_external,
+ switch_relative_externals,
+ copy_file_external_to_repo,
+ replace_tree_with_foreign_external,
+ copy_pin_externals_repos_repos,
+ copy_pin_externals_repos_wc,
+ copy_pin_externals_wc_repos,
+ copy_pin_externals_wc_wc,
+ copy_pin_externals_moved_external,
+ copy_pin_externals_removed_in_head,
+ copy_pin_externals_from_old_rev,
+ copy_pin_externals_wc_local_mods,
+ copy_pin_externals_wc_switched_subtrees,
+ copy_pin_externals_wc_mixed_revisions,
+ copy_pin_externals_whitespace_dir,
+ nested_notification,
+ file_external_to_normal_file,
+ file_external_recorded_info,
+ external_externally_removed,
+ invalid_uris_in_repo,
+ update_dir_external_exclude,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/externals_tests_data/invalid_uris_in_repo.dump b/subversion/tests/cmdline/externals_tests_data/invalid_uris_in_repo.dump
new file mode 100644
index 0000000..52807d4
--- /dev/null
+++ b/subversion/tests/cmdline/externals_tests_data/invalid_uris_in_repo.dump
@@ -0,0 +1,142 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 9402b9d1-cda2-4b37-b4d4-cbe5373b6650
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2017-07-30T21:45:44.035017Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 2
+pm
+K 8
+svn:date
+V 27
+2017-07-30T21:45:44.071811Z
+K 7
+svn:log
+V 1
+m
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:externals
+V 21
+foo:://host:-/path X
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 2
+pm
+K 8
+svn:date
+V 27
+2017-07-30T21:45:44.117957Z
+K 7
+svn:log
+V 1
+m
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 55
+Content-length: 55
+
+K 13
+svn:externals
+V 20
+foo://host::/path X
+
+PROPS-END
+
+
+Revision-number: 3
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 2
+pm
+K 8
+svn:date
+V 27
+2017-07-30T21:45:44.157176Z
+K 7
+svn:log
+V 1
+m
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 59
+Content-length: 59
+
+K 13
+svn:externals
+V 24
+foo://host:123xx/path X
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 2
+pm
+K 8
+svn:date
+V 27
+2017-07-30T21:45:44.199565Z
+K 7
+svn:log
+V 1
+m
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 61
+Content-length: 61
+
+K 13
+svn:externals
+V 26
+foo://host:123:123/path X
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/getopt_tests.py b/subversion/tests/cmdline/getopt_tests.py
new file mode 100755
index 0000000..f4e6398
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python
+#
+# getopt_tests.py: testing the svn command line processing
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os.path, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+
+# This directory contains all the expected output from svn.
+getopt_output_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'getopt_tests_data')
+
+# Naming convention for golden files: take the svn command line as a
+# single string and apply the following sed transformations:
+# echo svn option1 option2 ... | sed -e 's/ /_/g' -e 's/_--/--/g'
+# Then append either _stdout or _stderr for the file descriptor to
+# compare against.
+
+def load_expected_output(basename):
+ "load the expected standard output and standard error"
+
+ stdout_filename = os.path.join(getopt_output_dir, basename + '_stdout')
+ stderr_filename = os.path.join(getopt_output_dir, basename + '_stderr')
+
+ exp_stdout = open(stdout_filename, 'r').readlines()
+ exp_stderr = open(stderr_filename, 'r').readlines()
+
+ return exp_stdout, exp_stderr
+
+# With plaintext password storage enabled, `svn --version' emits a warning:
+warn_line_re = re.compile("WARNING: Plaintext password storage")
+
+# This is a list of lines to delete.
+del_lines_res = [
+ # In 'svn --version', the date line is variable, for example:
+ # "compiled Apr 5 2002, 10:08:45"
+ re.compile(r'\s+compiled\s+'),
+
+ # Also for 'svn --version':
+ re.compile(r"\* ra_(neon|local|svn|serf) :"),
+ re.compile(r" - handles '(https?|file|svn)' scheme"),
+ re.compile(r" - with Cyrus SASL authentication"),
+ re.compile(r" - using serf \d+\.\d+\.\d+"),
+ re.compile(r"\* fs_(base|fs) :"),
+
+ # Remove 'svn --version' list of platform-specific
+ # auth cache providers.
+ re.compile(r"\* Wincrypt cache.*"),
+ re.compile(r"\* Plaintext cache.*"),
+ re.compile(r"\* Gnome Keyring"),
+ re.compile(r"\* GPG-Agent"),
+ re.compile(r"\* Mac OS X Keychain"),
+ re.compile(r"\* KWallet \(KDE\)"),
+ ]
+
+# This is a list of lines to search and replace text on.
+rep_lines_res = [
+ # In 'svn --version', this line varies, for example:
+ # "Subversion Client, version 0.10.2-dev (under development)"
+ # "Subversion Client, version 0.10.2 (r1729)"
+ (re.compile(r'version \d+\.\d+\.\d+(-[^ ]*)? \(.*\)'),
+ 'version X.Y.Z '),
+ # The copyright end date keeps changing; fix forever.
+ (re.compile(r'Copyright \(C\) 20\d\d The Apache '
+ 'Software Foundation\.'),
+ 'Copyright (C) YYYY The Apache Software Foundation'),
+ # In 'svn --version --quiet', we print only the version
+ # number in a single line.
+ (re.compile(r'^\d+\.\d+\.\d+(-[a-zA-Z0-9]+)?$'), 'X.Y.Z\n'),
+ ]
+
+# This is a trigger pattern that selects the secondary set of
+# delete/replace patterns
+switch_res_line = 'System information:'
+
+# This is a list of lines to delete after having seen switch_res_line.
+switched_warn_line_re = None
+switched_del_lines_res = [
+ # In svn --version --verbose, dependent libs loaded
+ # shared libs are optional.
+ re.compile(r'^\* (loaded|linked)'),
+ # In svn --version --verbose, remove everything from
+ # the extended lists
+ re.compile(r'^ - '),
+ ]
+
+# This is a list of lines to search and replace text on after having
+# seen switch_res_line.
+switched_rep_lines_res = [
+ # We don't care about the actual canonical host
+ (re.compile('^\* running on.*$'), '* running on'),
+ ]
+
+def process_lines(lines):
+ "delete lines that should not be compared and search and replace the rest"
+ output = [ ]
+ warn_re = warn_line_re
+ del_res = del_lines_res
+ rep_res = rep_lines_res
+
+ skip_next_line = 0
+ for line in lines:
+ if skip_next_line:
+ skip_next_line = 0
+ continue
+
+ if line.startswith(switch_res_line):
+ warn_re = switched_warn_line_re
+ del_res = switched_del_lines_res
+ rep_res = switched_rep_lines_res
+
+ # Skip these lines from the output list.
+ delete_line = 0
+ if warn_re and warn_re.match(line):
+ delete_line = 1
+ skip_next_line = 1 # Ignore the empty line after the warning
+ else:
+ for delete_re in del_res:
+ if delete_re.match(line):
+ delete_line = 1
+ break
+ if delete_line:
+ continue
+
+ # Search and replace text on the rest.
+ for replace_re, replace_str in rep_res:
+ line = replace_re.sub(replace_str, line)
+
+ output.append(line)
+
+ return output
+
+def run_one_test(sbox, basename, *varargs):
+ "run svn with args and compare against the specified output files"
+
+ ### no need to use sbox.build() -- we don't need a repos or working copy
+ ### for these tests.
+
+ exp_stdout, exp_stderr = load_expected_output(basename)
+
+ # special case the 'svn' test so that no extra arguments are added
+ if basename != 'svn':
+ exit_code, actual_stdout, actual_stderr = svntest.main.run_svn(1, *varargs)
+ else:
+ exit_code, actual_stdout, actual_stderr = svntest.main.run_command(svntest.main.svn_binary,
+ 1, False, *varargs)
+
+ # Delete and perform search and replaces on the lines from the
+ # actual and expected output that may differ between build
+ # environments.
+ exp_stdout = process_lines(exp_stdout)
+ exp_stderr = process_lines(exp_stderr)
+ actual_stdout = process_lines(actual_stdout)
+ actual_stderr = process_lines(actual_stderr)
+
+ svntest.verify.compare_and_display_lines("Standard output does not match.",
+ "STDOUT", exp_stdout, actual_stdout)
+
+ svntest.verify.compare_and_display_lines("Standard error does not match.",
+ "STDERR", exp_stderr, actual_stderr)
+
+def getopt_no_args(sbox):
+ "run svn with no arguments"
+ run_one_test(sbox, 'svn')
+
+def getopt__version(sbox):
+ "run svn --version"
+ run_one_test(sbox, 'svn--version', '--version')
+
+def getopt__version__quiet(sbox):
+ "run svn --version --quiet"
+ run_one_test(sbox, 'svn--version--quiet', '--version', '--quiet')
+
+def getopt__version__verbose(sbox):
+ "run svn --version --verbose"
+ run_one_test(sbox, 'svn--version--verbose', '--version', '--verbose')
+
+def getopt__help(sbox):
+ "run svn --help"
+ run_one_test(sbox, 'svn--help', '--help')
+
+def getopt_help(sbox):
+ "run svn help"
+ run_one_test(sbox, 'svn_help', 'help')
+
+def getopt_help_log_switch(sbox):
+ "run svn help log switch"
+ run_one_test(sbox, 'svn_help_log_switch', 'help', 'log', 'switch')
+
+def getopt_help_bogus_cmd(sbox):
+ "run svn help bogus-cmd"
+ run_one_test(sbox, 'svn_help_bogus-cmd', 'help', 'bogus-cmd')
+
+def getopt_config_option(sbox):
+ "--config-option's spell checking"
+ sbox.build(create_wc=False, read_only=True)
+ expected_stderr = '.*W205000.*did you mean.*'
+ expected_stdout = svntest.verify.AnyOutput
+ svntest.actions.run_and_verify_svn2(expected_stdout, expected_stderr, 0,
+ 'info',
+ '--config-option',
+ 'config:miscellanous:diff-extensions=' +
+ '-u -p',
+ sbox.repo_url)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ getopt_no_args,
+ getopt__version,
+ getopt__version__quiet,
+ getopt__version__verbose,
+ getopt__help,
+ getopt_help,
+ getopt_help_bogus_cmd,
+ getopt_help_log_switch,
+ getopt_config_option,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--help_stderr b/subversion/tests/cmdline/getopt_tests_data/svn--help_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--help_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout
new file mode 100644
index 0000000..05f0aa8
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout
@@ -0,0 +1,55 @@
+usage: svn <subcommand> [options] [args]
+Subversion command-line client.
+Type 'svn help <subcommand>' for help on a specific subcommand.
+Type 'svn --version' to see the program version and RA modules,
+ 'svn --version --verbose' to see dependency versions as well,
+ 'svn --version --quiet' to see just the version number.
+
+Most subcommands take file and/or directory arguments, recursing
+on the directories. If no arguments are supplied to such a
+command, it recurses on the current directory (inclusive) by default.
+
+Available subcommands:
+ add
+ auth
+ blame (praise, annotate, ann)
+ cat
+ changelist (cl)
+ checkout (co)
+ cleanup
+ commit (ci)
+ copy (cp)
+ delete (del, remove, rm)
+ diff (di)
+ export
+ help (?, h)
+ import
+ info
+ list (ls)
+ lock
+ log
+ merge
+ mergeinfo
+ mkdir
+ move (mv, rename, ren)
+ patch
+ propdel (pdel, pd)
+ propedit (pedit, pe)
+ propget (pget, pg)
+ proplist (plist, pl)
+ propset (pset, ps)
+ relocate
+ resolve
+ resolved
+ revert
+ status (stat, st)
+ switch (sw)
+ unlock
+ update (up)
+ upgrade
+ x-shelve (shelve)
+ x-unshelve (unshelve)
+ x-shelves (shelves)
+
+Subversion is a tool for version control.
+For additional information, see http://subversion.apache.org/
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stderr b/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stdout
new file mode 100644
index 0000000..a803cc2
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version--quiet_stdout
@@ -0,0 +1 @@
+0.14.0
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stderr b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout
new file mode 100644
index 0000000..bb30055
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout
@@ -0,0 +1,97 @@
+svn, version 1.9.0-dev (under development)
+ compiled Feb 26 2014, 15:15:42 on x86_64-unknown-openbsd5.5
+
+Copyright (C) 2012 The Apache Software Foundation.
+This software consists of contributions made by many people;
+see the NOTICE file for more information.
+Subversion is open source software, see http://subversion.apache.org/
+
+The following repository access (RA) modules are available:
+
+* ra_svn : Module for accessing a repository using the svn network protocol.
+ - with Cyrus SASL authentication
+ - handles 'svn' scheme
+* ra_local : Module for accessing a repository on local disk.
+ - handles 'file' scheme
+* ra_serf : Module for accessing a repository via WebDAV protocol using serf.
+ - using serf 1.3.3
+ - handles 'http' scheme
+ - handles 'https' scheme
+
+The following authentication credential caches are available:
+
+* Plaintext cache in /home/stsp/.subversion
+* Gnome Keyring
+* GPG-Agent
+
+System information:
+
+* running on i386-apple-darwin11.4.0
+ - Mac OS X 10.7.4 Lion, build 11E53
+* linked dependencies:
+ - APR 1.4.2 (compiled with 1.4.2)
+ - APR-Util 1.3.10 (compiled with 1.3.10)
+ - SQLite 3.7.13 (compiled with 3.7.13)
+* loaded shared libraries:
+ - /opt/subversion/bin/svn (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_client-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_wc-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_ra-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_diff-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_ra_local-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_repos-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_fs-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_fs_fs-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_fs_base-1.0.dylib (Intel 64-bit)
+ - /usr/local/lib/libdb-5.3.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_fs_util-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_ra_svn-1.0.dylib (Intel 64-bit)
+ - /usr/lib/libsasl2.2.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_ra_serf-1.0.dylib (Intel 64-bit)
+ - /usr/local/lib/libserf-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_delta-1.0.dylib (Intel 64-bit)
+ - /opt/subversion/lib/libsvn_subr-1.0.dylib (Intel 64-bit)
+ - /usr/lib/libexpat.1.dylib (Intel 64-bit)
+ - /usr/lib/libz.1.dylib (Intel 64-bit)
+ - /usr/local/lib/libsqlite3.0.8.6.dylib (Intel 64-bit)
+ - /usr/local/lib/libmagic.1.dylib (Intel 64-bit)
+ - /usr/lib/libaprutil-1.0.dylib (Intel 64-bit)
+ - /usr/lib/libapr-1.0.dylib (Intel 64-bit)
+ - /usr/lib/libSystem.B.dylib (Intel 64-bit)
+ - /usr/lib/libiconv.2.dylib (Intel 64-bit)
+ - /usr/lib/libpq.5.dylib (Intel 64-bit)
+ - /usr/lib/libsqlite3.dylib (Intel 64-bit)
+ - /usr/lib/libresolv.9.dylib (Intel 64-bit)
+ - /usr/lib/libssl.0.9.8.dylib (Intel 64-bit)
+ - /usr/lib/libcrypto.0.9.8.dylib (Intel 64-bit)
+ - /usr/lib/libicucore.A.dylib (Intel 64-bit)
+ - /usr/lib/libauto.dylib (Intel 64-bit)
+ - /usr/lib/libobjc.A.dylib (Intel 64-bit)
+ - /usr/lib/libstdc++.6.dylib (Intel 64-bit)
+ - /usr/lib/libpam.2.dylib (Intel 64-bit)
+ - /usr/lib/libbsm.0.dylib (Intel 64-bit)
+ - /usr/lib/libxar-nossl.dylib (Intel 64-bit)
+ - /usr/lib/libc++.1.dylib (Intel 64-bit)
+ - /usr/lib/libc++abi.dylib (Intel 64-bit)
+ - /usr/lib/libDiagnosticMessagesClient.dylib (Intel 64-bit)
+ - /usr/lib/libbz2.1.0.dylib (Intel 64-bit)
+ - /usr/lib/libxml2.2.dylib (Intel 64-bit)
+ - /usr/lib/liblangid.dylib (Intel 64-bit)
+ - /usr/lib/libCRFSuite.dylib (Intel 64-bit)
+ - /usr/lib/libxslt.1.dylib (Intel 64-bit)
+ - /usr/lib/sasl2/apop.so (Intel 64-bit)
+ - /usr/lib/sasl2/dhx.so (Intel 64-bit)
+ - /usr/lib/sasl2/digestmd5WebDAV.so (Intel 64-bit)
+ - /usr/lib/sasl2/libanonymous.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/libcrammd5.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/libdigestmd5.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/libgssapiv2.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/login.so (Intel 64-bit)
+ - /usr/lib/sasl2/libntlm.so (Intel 64-bit)
+ - /usr/lib/sasl2/libotp.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/libplain.2.so (Intel 64-bit)
+ - /usr/lib/sasl2/libpps.so (Intel 64-bit)
+ - /usr/lib/sasl2/mschapv2.so (Intel 64-bit)
+ - /usr/lib/sasl2/shadow_auxprop.so (Intel 64-bit)
+ - /usr/lib/sasl2/smb_nt.so (Intel 64-bit)
+ - /usr/lib/sasl2/smb_ntlmv2.so (Intel 64-bit)
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version_stderr b/subversion/tests/cmdline/getopt_tests_data/svn--version_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout
new file mode 100644
index 0000000..513ec11
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout
@@ -0,0 +1,26 @@
+svn, version 1.9.0-dev (under development)
+ compiled Feb 26 2014, 15:15:42 on x86_64-unknown-openbsd5.5
+
+Copyright (C) 2014 The Apache Software Foundation.
+This software consists of contributions made by many people;
+see the NOTICE file for more information.
+Subversion is open source software, see http://subversion.apache.org/
+
+The following repository access (RA) modules are available:
+
+* ra_svn : Module for accessing a repository using the svn network protocol.
+ - with Cyrus SASL authentication
+ - handles 'svn' scheme
+* ra_local : Module for accessing a repository on local disk.
+ - handles 'file' scheme
+* ra_serf : Module for accessing a repository via WebDAV protocol using serf.
+ - using serf 1.3.3
+ - handles 'http' scheme
+ - handles 'https' scheme
+
+The following authentication credential caches are available:
+
+* Plaintext cache in /home/stsp/.subversion
+* Gnome Keyring
+* GPG-Agent
+
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stderr b/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stdout
new file mode 100644
index 0000000..0bd1126
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help--version_stdout
@@ -0,0 +1,8 @@
+help (?, h): Describe the usage of this program or its subcommands.
+usage: help [SUBCOMMAND...]
+
+Valid options:
+ --version : print client version info
+ -q [--quiet] : print nothing, or only summary information
+ --config-dir ARG : read user configuration files from directory ARG
+
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stderr b/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stderr
new file mode 100644
index 0000000..db22b2a
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stderr
@@ -0,0 +1,2 @@
+"bogus-cmd": unknown command.
+
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stdout
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_bogus-cmd_stdout
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stderr b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout
new file mode 100644
index 0000000..5dddc70
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout
@@ -0,0 +1,249 @@
+log: Show the log messages for a set of revision(s) and/or path(s).
+usage: 1. log [PATH][@REV]
+ 2. log URL[@REV] [PATH...]
+
+ 1. Print the log messages for the URL corresponding to PATH
+ (default: '.'). If specified, REV is the revision in which the
+ URL is first looked up, and the default revision range is REV:1.
+ If REV is not specified, the default revision range is BASE:1,
+ since the URL might not exist in the HEAD revision.
+
+ 2. Print the log messages for the PATHs (default: '.') under URL.
+ If specified, REV is the revision in which the URL is first
+ looked up, and the default revision range is REV:1; otherwise,
+ the URL is looked up in HEAD, and the default revision range is
+ HEAD:1.
+
+ Multiple '-c' or '-r' options may be specified (but not a
+ combination of '-c' and '-r' options), and mixing of forward and
+ reverse ranges is allowed.
+
+ With -v, also print all affected paths with each log message.
+ Each changed path is preceded with a symbol describing the change:
+ A: The path was added or copied.
+ D: The path was deleted.
+ R: The path was replaced (deleted and re-added in the same revision).
+ M: The path's file and/or property content was modified.
+ If an added or replaced path was copied from somewhere else, the copy
+ source path and revision are shown in parentheses.
+ If a file or directory was moved from one path to another with 'svn move'
+ the old path will be listed as deleted and the new path will be listed
+ as copied from the old path at a prior revision.
+
+ With -q, don't print the log message body itself (note that this is
+ compatible with -v).
+
+ Each log message is printed just once, even if more than one of the
+ affected paths for that revision were explicitly requested. Logs
+ follow copy history by default. Use --stop-on-copy to disable this
+ behavior, which can be useful for determining branchpoints.
+
+ The --depth option is only valid in combination with the --diff option
+ and limits the scope of the displayed diff to the specified depth.
+
+ If the --search option is used, log messages are displayed only if the
+ provided search pattern matches any of the author, date, log message
+ text (unless --quiet is used), or, if the --verbose option is also
+ provided, a changed path.
+ The search pattern may include "glob syntax" wildcards:
+ ? matches any single character
+ * matches a sequence of arbitrary characters
+ [abc] matches any of the characters listed inside the brackets
+ If multiple --search options are provided, a log message is shown if
+ it matches any of the provided search patterns. If the --search-and
+ option is used, that option's argument is combined with the pattern
+ from the previous --search or --search-and option, and a log message
+ is shown only if it matches the combined search pattern.
+ If --limit is used in combination with --search, --limit restricts the
+ number of log messages searched, rather than restricting the output
+ to a particular number of matching log messages.
+
+ Examples:
+
+ Show the latest 5 log messages for the current working copy
+ directory and display paths changed in each commit:
+ svn log -l 5 -v
+
+ Show the log for bar.c as of revision 42:
+ svn log bar.c@42
+
+ Show log messages and diffs for each commit to foo.c:
+ svn log --diff http://www.example.com/repo/project/foo.c
+ (Because the above command uses a full URL it does not require
+ a working copy.)
+
+ Show log messages for the children foo.c and bar.c of the directory
+ '/trunk' as it appeared in revision 50, using the ^/ URL shortcut:
+ svn log ^/trunk@50 foo.c bar.c
+
+ Show the log messages for any incoming changes to foo.c during the
+ next 'svn update':
+ svn log -r BASE:HEAD foo.c
+
+ Show the log message for the revision in which /branches/foo
+ was created:
+ svn log --stop-on-copy --limit 1 -r0:HEAD ^/branches/foo
+
+ If ^/trunk/foo.c was moved to ^/trunk/bar.c' in revision 22, 'svn log -v'
+ shows a deletion and a copy in its changed paths list, such as:
+ D /trunk/foo.c
+ A /trunk/bar.c (from /trunk/foo.c:21)
+
+Valid options:
+ -r [--revision] ARG : ARG (some commands also take ARG1:ARG2 range)
+ A revision argument can be one of:
+ NUMBER revision number
+ '{' DATE '}' revision at start of the date
+ 'HEAD' latest in repository
+ 'BASE' base rev of item's working copy
+ 'COMMITTED' last commit at or before BASE
+ 'PREV' revision just before COMMITTED
+ -c [--change] ARG : the change made in revision ARG
+ -q [--quiet] : do not print the log message
+ -v [--verbose] : also print all affected paths
+ -g [--use-merge-history] : use/display additional information from merge
+ history
+ --targets ARG : pass contents of file ARG as additional args
+ --stop-on-copy : do not cross copies while traversing history
+ --incremental : give output suitable for concatenation
+ --xml : output in XML
+ -l [--limit] ARG : maximum number of log entries
+ --with-all-revprops : retrieve all revision properties
+ --with-no-revprops : retrieve no revision properties
+ --with-revprop ARG : retrieve revision property ARG
+ --depth ARG : limit operation by depth ARG ('empty', 'files',
+ 'immediates', or 'infinity')
+ --diff : produce diff output
+ --diff-cmd ARG : use ARG as diff command
+ --internal-diff : override diff-cmd specified in config file
+ -x [--extensions] ARG : Specify differencing options for external diff or
+ internal diff or blame. Default: '-u'. Options are
+ separated by spaces. Internal diff and blame take:
+ -u, --unified: Show 3 lines of unified context
+ -b, --ignore-space-change: Ignore changes in
+ amount of white space
+ -w, --ignore-all-space: Ignore all white space
+ --ignore-eol-style: Ignore changes in EOL style
+ -U ARG, --context ARG: Show ARG lines of context
+ -p, --show-c-function: Show C function name
+ --search ARG : use ARG as search pattern (glob syntax, case-
+ and accent-insensitive, may require quotation marks
+ to prevent shell expansion)
+ --search-and ARG : combine ARG with the previous search pattern
+
+Global options:
+ --username ARG : specify a username ARG
+ --password ARG : specify a password ARG (caution: on many operating
+ systems, other users will be able to see this)
+ --password-from-stdin : read password from stdin
+ --no-auth-cache : do not cache authentication tokens
+ --non-interactive : do no interactive prompting (default is to prompt
+ only if standard input is a terminal device)
+ --force-interactive : do interactive prompting even if standard input
+ is not a terminal device
+ --trust-server-cert : deprecated; same as
+ --trust-server-cert-failures=unknown-ca
+ --trust-server-cert-failures ARG : with --non-interactive, accept SSL server
+ certificates with failures; ARG is comma-separated
+ list of 'unknown-ca' (Unknown Authority),
+ 'cn-mismatch' (Hostname mismatch), 'expired'
+ (Expired certificate), 'not-yet-valid' (Not yet
+ valid certificate) and 'other' (all other not
+ separately classified certificate errors).
+ --config-dir ARG : read user configuration files from directory ARG
+ --config-option ARG : set user configuration option in the format:
+ FILE:SECTION:OPTION=[VALUE]
+ For example:
+ servers:global:http-library=serf
+
+switch (sw): Update the working copy to a different URL within the same repository.
+usage: 1. switch URL[@PEGREV] [PATH]
+ 2. switch --relocate FROM-PREFIX TO-PREFIX [PATH...]
+
+ 1. Update the working copy to mirror a new URL within the repository.
+ This behavior is similar to 'svn update', and is the way to
+ move a working copy to a branch or tag within the same repository.
+ If specified, PEGREV determines in which revision the target is first
+ looked up.
+
+ If --force is used, unversioned obstructing paths in the working
+ copy do not automatically cause a failure if the switch attempts to
+ add the same path. If the obstructing path is the same type (file
+ or directory) as the corresponding path in the repository it becomes
+ versioned but its contents are left 'as-is' in the working copy.
+ This means that an obstructing directory's unversioned children may
+ also obstruct and become versioned. For files, any content differences
+ between the obstruction and the repository are treated like a local
+ modification to the working copy. All properties from the repository
+ are applied to the obstructing path.
+
+ Use the --set-depth option to set a new working copy depth on the
+ targets of this operation.
+
+ By default, Subversion will refuse to switch a working copy path to
+ a new URL with which it shares no common version control ancestry.
+ Use the '--ignore-ancestry' option to override this sanity check.
+
+ 2. The '--relocate' option is deprecated. This syntax is equivalent to
+ 'svn relocate FROM-PREFIX TO-PREFIX [PATH]'.
+
+ See also 'svn help update' for a list of possible characters
+ reporting the action taken.
+
+ Examples:
+ svn switch ^/branches/1.x-release
+ svn switch --relocate http:// svn://
+ svn switch --relocate http://www.example.com/repo/project \
+ svn://svn.example.com/repo/project
+
+Valid options:
+ -r [--revision] ARG : ARG (some commands also take ARG1:ARG2 range)
+ A revision argument can be one of:
+ NUMBER revision number
+ '{' DATE '}' revision at start of the date
+ 'HEAD' latest in repository
+ 'BASE' base rev of item's working copy
+ 'COMMITTED' last commit at or before BASE
+ 'PREV' revision just before COMMITTED
+ -N [--non-recursive] : obsolete; try --depth=files or --depth=immediates
+ --depth ARG : limit operation by depth ARG ('empty', 'files',
+ 'immediates', or 'infinity')
+ --set-depth ARG : set new working copy depth to ARG ('exclude',
+ 'empty', 'files', 'immediates', or 'infinity')
+ -q [--quiet] : print nothing, or only summary information
+ --diff3-cmd ARG : use ARG as merge command
+ --ignore-externals : ignore externals definitions
+ --ignore-ancestry : allow switching to a node with no common ancestor
+ --force : handle unversioned obstructions as changes
+ --accept ARG : specify automatic conflict resolution action
+ ('postpone', 'working', 'base', 'mine-conflict',
+ 'theirs-conflict', 'mine-full', 'theirs-full',
+ 'edit', 'launch', 'recommended') (shorthand:
+ 'p', 'mc', 'tc', 'mf', 'tf', 'e', 'l', 'r')
+ --relocate : deprecated; use 'svn relocate'
+
+Global options:
+ --username ARG : specify a username ARG
+ --password ARG : specify a password ARG (caution: on many operating
+ systems, other users will be able to see this)
+ --password-from-stdin : read password from stdin
+ --no-auth-cache : do not cache authentication tokens
+ --non-interactive : do no interactive prompting (default is to prompt
+ only if standard input is a terminal device)
+ --force-interactive : do interactive prompting even if standard input
+ is not a terminal device
+ --trust-server-cert : deprecated; same as
+ --trust-server-cert-failures=unknown-ca
+ --trust-server-cert-failures ARG : with --non-interactive, accept SSL server
+ certificates with failures; ARG is comma-separated
+ list of 'unknown-ca' (Unknown Authority),
+ 'cn-mismatch' (Hostname mismatch), 'expired'
+ (Expired certificate), 'not-yet-valid' (Not yet
+ valid certificate) and 'other' (all other not
+ separately classified certificate errors).
+ --config-dir ARG : read user configuration files from directory ARG
+ --config-option ARG : set user configuration option in the format:
+ FILE:SECTION:OPTION=[VALUE]
+ For example:
+ servers:global:http-library=serf
+
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_stderr b/subversion/tests/cmdline/getopt_tests_data/svn_help_stderr
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_stderr
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout
new file mode 100644
index 0000000..05f0aa8
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout
@@ -0,0 +1,55 @@
+usage: svn <subcommand> [options] [args]
+Subversion command-line client.
+Type 'svn help <subcommand>' for help on a specific subcommand.
+Type 'svn --version' to see the program version and RA modules,
+ 'svn --version --verbose' to see dependency versions as well,
+ 'svn --version --quiet' to see just the version number.
+
+Most subcommands take file and/or directory arguments, recursing
+on the directories. If no arguments are supplied to such a
+command, it recurses on the current directory (inclusive) by default.
+
+Available subcommands:
+ add
+ auth
+ blame (praise, annotate, ann)
+ cat
+ changelist (cl)
+ checkout (co)
+ cleanup
+ commit (ci)
+ copy (cp)
+ delete (del, remove, rm)
+ diff (di)
+ export
+ help (?, h)
+ import
+ info
+ list (ls)
+ lock
+ log
+ merge
+ mergeinfo
+ mkdir
+ move (mv, rename, ren)
+ patch
+ propdel (pdel, pd)
+ propedit (pedit, pe)
+ propget (pget, pg)
+ proplist (plist, pl)
+ propset (pset, ps)
+ relocate
+ resolve
+ resolved
+ revert
+ status (stat, st)
+ switch (sw)
+ unlock
+ update (up)
+ upgrade
+ x-shelve (shelve)
+ x-unshelve (unshelve)
+ x-shelves (shelves)
+
+Subversion is a tool for version control.
+For additional information, see http://subversion.apache.org/
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_stderr b/subversion/tests/cmdline/getopt_tests_data/svn_stderr
new file mode 100644
index 0000000..dce55a2
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_stderr
@@ -0,0 +1 @@
+Type 'svn help' for usage.
diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_stdout
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/subversion/tests/cmdline/getopt_tests_data/svn_stdout
diff --git a/subversion/tests/cmdline/history_tests.py b/subversion/tests/cmdline/history_tests.py
new file mode 100755
index 0000000..31588e2
--- /dev/null
+++ b/subversion/tests/cmdline/history_tests.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+#
+# history_tests.py: testing history-tracing code
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+
+def cat_traces_renames(sbox):
+ "verify that 'svn cat' traces renames"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ pi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ bloo_path = os.path.join(wc_dir, 'A', 'D', 'G', 'bloo')
+
+ # rename rho to bloo. commit r2.
+ svntest.main.run_svn(None, 'mv', rho_path, bloo_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(verb='Deleting'),
+ 'A/D/G/bloo' : Item(verb='Adding')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G/rho')
+ expected_status.add({ 'A/D/G/bloo' :
+ Item(wc_rev=2, status=' ') })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # rename pi to rho. commit r3.
+ svntest.main.run_svn(None, 'mv', pi_path, rho_path)
+
+ # svn cat -r1 rho --> should show pi's contents.
+ svntest.actions.run_and_verify_svn([ "This is the file 'pi'.\n"], [],
+ 'cat', '-r', '1', rho_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(verb='Deleting'),
+ 'A/D/G/rho' : Item(verb='Adding')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G/pi')
+ expected_status.tweak('A/D/G/rho', wc_rev=3)
+ expected_status.add({ 'A/D/G/bloo' :
+ Item(wc_rev=2, status=' ') })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # update whole wc to HEAD
+ expected_output = svntest.wc.State(wc_dir, { }) # no output
+ expected_status.tweak(wc_rev=3)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/pi', 'A/D/G/rho')
+ expected_disk.add({
+ 'A/D/G/rho' : Item("This is the file 'pi'.\n"),
+ })
+ expected_disk.add({
+ 'A/D/G/bloo' : Item("This is the file 'rho'.\n"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # 'svn cat bloo' --> should show rho's contents.
+ svntest.actions.run_and_verify_svn([ "This is the file 'rho'.\n"], [],
+ 'cat', bloo_path)
+
+ # svn cat -r1 bloo --> should still show rho's contents.
+ svntest.actions.run_and_verify_svn([ "This is the file 'rho'.\n"], [],
+ 'cat', '-r', '1', bloo_path)
+
+ # svn cat -r1 rho --> should show pi's contents.
+ svntest.actions.run_and_verify_svn([ "This is the file 'pi'.\n"], [],
+ 'cat', '-r', '1', rho_path)
+
+ # svn up -r1
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # svn cat -rHEAD rho --> should see 'unrelated object' error.
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cat', '-r', 'HEAD', rho_path)
+
+@Issue(1970)
+def cat_avoids_false_identities(sbox):
+ "verify that 'svn cat' avoids false identities"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Issue #1970
+ #
+ # Highlight a bug in the client side use of the repository's
+ # location searching algorithm.
+ #
+ # The buggy history-following algorithm determines the paths that a
+ # line of history would be *expected to be* found in a given revision,
+ # but doesn't treat copies as gaps in the historical sequence. If
+ # some other object fills those gaps at the same expected path, the
+ # client will find the wrong object.
+ #
+ # In the recipe below, iota gets created in r1. In r2, it is
+ # deleted and replaced with an unrelated object at the same path.
+ # In r3, the interloper is deleted. In r4, the original iota is
+ # resurrected via a copy from r1.
+ #
+ # ,- - - - - - --.
+ # o---| o---| o o----->
+ #
+ # | | | |
+ # r1 r2 r3 r4
+ #
+ # In a working copy at r4, running
+ #
+ # $ svn cat -r2 iota
+ #
+ # should result in an error, but with the bug it instead cats the r2
+ # interloper.
+ #
+ # To reassure yourself that that's wrong, recall that the above
+ # command is equivalent to
+ #
+ # $ svn cat -r2 iota@4
+ #
+ # Now do you see the evil that lies within us?
+
+ iota_path = os.path.join(wc_dir, 'iota')
+ iota_url = sbox.repo_url + '/iota'
+
+ # r2
+ svntest.main.run_svn(None, 'del', iota_path)
+ svntest.main.file_append(iota_path, "YOU SHOULD NOT SEE THIS\n")
+ svntest.main.run_svn(None, 'add', iota_path)
+ sbox.simple_commit(message='log msg')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r3
+ svntest.main.run_svn(None, 'del', iota_path)
+ sbox.simple_commit(message='log msg')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r4
+ svntest.main.run_svn(None, 'cp', iota_url + '@1', wc_dir)
+ sbox.simple_commit(message='log msg')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # 'svn cat -r2 iota' should error, because the line of history
+ # currently identified by /iota did not exist in r2, even though a
+ # totally unrelated file of the same name did.
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'cat', '-r', '2', iota_path)
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ cat_traces_renames,
+ cat_avoids_false_identities,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/import_tests.py b/subversion/tests/cmdline/import_tests.py
new file mode 100755
index 0000000..1f33186
--- /dev/null
+++ b/subversion/tests/cmdline/import_tests.py
@@ -0,0 +1,616 @@
+#!/usr/bin/env python
+#
+# import_tests.py: import tests
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import re, os.path, sys, stat
+
+# Our testing module
+import svntest
+from svntest import wc
+from prop_tests import create_inherited_ignores_config
+from svntest.main import SVN_PROP_INHERITABLE_IGNORES
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+# this test should be SKIPped on systems without the executable bit
+@SkipUnless(svntest.main.is_posix_os)
+def import_executable(sbox):
+ "import of executable files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create a new directory with files of various permissions
+ xt_path = os.path.join(wc_dir, "XT")
+ os.makedirs(xt_path)
+ all_path = os.path.join(wc_dir, "XT/all_exe")
+ none_path = os.path.join(wc_dir, "XT/none_exe")
+ user_path = os.path.join(wc_dir, "XT/user_exe")
+ group_path = os.path.join(wc_dir, "XT/group_exe")
+ other_path = os.path.join(wc_dir, "XT/other_exe")
+
+ for path in [all_path, none_path, user_path, group_path, other_path]:
+ svntest.main.file_append(path, "some text")
+
+ # set executable bits
+ os.chmod(all_path, svntest.main.S_ALL_RWX)
+ os.chmod(none_path, svntest.main.S_ALL_RW)
+ os.chmod(user_path, svntest.main.S_ALL_RW | stat.S_IXUSR)
+ os.chmod(group_path, svntest.main.S_ALL_RW | stat.S_IXGRP)
+ os.chmod(other_path, svntest.main.S_ALL_RW | stat.S_IXOTH)
+
+ # import new files into repository
+ url = sbox.repo_url
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'import',
+ '-m', 'Log message for new import', xt_path, url)
+
+ lastline = output.pop().strip()
+ cm = re.compile ("(Committed|Imported) revision [0-9]+.")
+ match = cm.search (lastline)
+ if not match:
+ ### we should raise a less generic error here. which?
+ raise svntest.Failure
+
+ # remove (uncontrolled) local files
+ svntest.main.safe_rmtree(xt_path)
+
+ # Create expected disk tree for the update (disregarding props)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'all_exe' : Item('some text', props={'svn:executable' : ''}),
+ 'none_exe' : Item('some text'),
+ 'user_exe' : Item('some text', props={'svn:executable' : ''}),
+ 'group_exe' : Item('some text'),
+ 'other_exe' : Item('some text'),
+ })
+
+ # Create expected status tree for the update (disregarding props).
+ # Newly imported file should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'all_exe' : Item(status=' ', wc_rev=2),
+ 'none_exe' : Item(status=' ', wc_rev=2),
+ 'user_exe' : Item(status=' ', wc_rev=2),
+ 'group_exe' : Item(status=' ', wc_rev=2),
+ 'other_exe' : Item(status=' ', wc_rev=2),
+ })
+
+ # Create expected output tree for the update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'all_exe' : Item(status='A '),
+ 'none_exe' : Item(status='A '),
+ 'user_exe' : Item(status='A '),
+ 'group_exe' : Item(status='A '),
+ 'other_exe' : Item(status='A '),
+ })
+ # do update and check three ways
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+def import_ignores(sbox):
+ 'do not import ignored files in imported dirs'
+
+ # The bug was that
+ #
+ # $ svn import dir
+ #
+ # where dir contains some items that match the ignore list and some
+ # do not would add all items, ignored or not.
+ #
+ # This has been fixed by testing each item with the new
+ # svn_wc_is_ignored function.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ dir_path = os.path.join(wc_dir, 'dir')
+ foo_c_path = os.path.join(dir_path, 'foo.c')
+ foo_o_path = os.path.join(dir_path, 'foo.o')
+
+ os.mkdir(dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ open(foo_c_path, 'w')
+ open(foo_o_path, 'w')
+
+ # import new dir into repository
+ url = sbox.repo_url + '/dir'
+
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'import',
+ '-m', 'Log message for new import',
+ dir_path, url)
+
+ lastline = output.pop().strip()
+ cm = re.compile ("(Committed|Imported) revision [0-9]+.")
+ match = cm.search (lastline)
+ if not match:
+ ### we should raise a less generic error here. which?
+ raise svntest.verify.SVNUnexpectedOutput
+
+ # remove (uncontrolled) local dir
+ svntest.main.safe_rmtree(dir_path)
+
+ # Create expected disk tree for the update (disregarding props)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'dir/foo.c' : Item(''),
+ })
+
+ # Create expected status tree for the update (disregarding props).
+ # Newly imported file should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'dir' : Item(status=' ', wc_rev=2),
+ 'dir/foo.c' : Item(status=' ', wc_rev=2),
+ })
+
+ # Create expected output tree for the update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'dir' : Item(status='A '),
+ 'dir/foo.c' : Item(status='A '),
+ })
+
+ # do update and check three ways
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+def import_no_ignores(sbox):
+ 'import ignored files in imported dirs'
+
+ # import ignored files using the "--no-ignore" option
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ dir_path = os.path.join(wc_dir, 'dir')
+ foo_c_path = os.path.join(dir_path, 'foo.c')
+ foo_o_path = os.path.join(dir_path, 'foo.o')
+ foo_lo_path = os.path.join(dir_path, 'foo.lo')
+ foo_rej_path = os.path.join(dir_path, 'foo.rej')
+
+ os.mkdir(dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ open(foo_c_path, 'w')
+ open(foo_o_path, 'w')
+ open(foo_lo_path, 'w')
+ open(foo_rej_path, 'w')
+
+ # import new dir into repository
+ url = sbox.repo_url + '/dir'
+
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'import',
+ '-m', 'Log message for new import', '--no-ignore',
+ dir_path, url)
+
+ lastline = output.pop().strip()
+ cm = re.compile ("(Committed|Imported) revision [0-9]+.")
+ match = cm.search (lastline)
+ if not match:
+ raise svntest.Failure
+
+ # remove (uncontrolled) local dir
+ svntest.main.safe_rmtree(dir_path)
+
+ # Create expected disk tree for the update (disregarding props)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'dir/foo.c' : Item(''),
+ 'dir/foo.o' : Item(''),
+ 'dir/foo.lo' : Item(''),
+ 'dir/foo.rej' : Item(''),
+ })
+
+ # Create expected status tree for the update (disregarding props).
+ # Newly imported file should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'dir' : Item(status=' ', wc_rev=2),
+ 'dir/foo.c' : Item(status=' ', wc_rev=2),
+ 'dir/foo.o' : Item(status=' ', wc_rev=2),
+ 'dir/foo.lo' : Item(status=' ', wc_rev=2),
+ 'dir/foo.rej' : Item(status=' ', wc_rev=2),
+ })
+
+ # Create expected output tree for the update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'dir' : Item(status='A '),
+ 'dir/foo.c' : Item(status='A '),
+ 'dir/foo.o' : Item(status='A '),
+ 'dir/foo.lo' : Item(status='A '),
+ 'dir/foo.rej' : Item(status='A '),
+ })
+
+ # do update and check three ways
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+#----------------------------------------------------------------------
+def import_avoid_empty_revision(sbox):
+ "avoid creating empty revisions with import"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create a new directory
+ empty_dir = os.path.join(wc_dir, "empty_dir")
+ os.makedirs(empty_dir)
+
+ url = sbox.repo_url
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '-m', 'Log message for new import',
+ empty_dir, url)
+
+ svntest.main.safe_rmtree(empty_dir)
+
+ # Verify that an empty revision has not been created
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(1),
+ [], "update",
+ empty_dir)
+#----------------------------------------------------------------------
+
+# test for issue 2433: "import" does not handle eol-style correctly
+# and for normalising files with mixed line-endings upon import (r1205193)
+@Issue(2433)
+def import_eol_style(sbox):
+ "import should honor the eol-style property"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ # setup a custom config, we need autoprops
+ config_contents = '''\
+[auth]
+password-stores =
+
+[miscellany]
+enable-auto-props = yes
+
+[auto-props]
+*.dsp = svn:eol-style=CRLF
+*.txt = svn:eol-style=native
+'''
+ config_dir = sbox.create_config_dir(config_contents)
+
+ # create a new file and import it
+ file_name = "test.dsp"
+ file_path = file_name
+ imp_dir_path = 'dir'
+ imp_file_path = os.path.join(imp_dir_path, file_name)
+
+ os.mkdir(imp_dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ svntest.main.file_write(imp_file_path, "This is file test.dsp.\n")
+
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '-m', 'Log message for new import',
+ imp_dir_path,
+ sbox.repo_url,
+ '--config-dir', config_dir)
+
+ svntest.main.run_svn(None, 'update', '.', '--config-dir', config_dir)
+
+ # change part of the file
+ svntest.main.file_append(file_path, "Extra line\n")
+
+ # get a diff of the file, if the eol style is handled correctly, we'll
+ # only see our added line here.
+ # Before the issue was fixed, we would have seen something like this:
+ # @@ -1 +1,2 @@
+ # -This is file test.dsp.
+ # +This is file test.dsp.
+ # +Extra line
+
+ # eol styl of test.dsp is CRLF, so diff will use that too. Make sure we
+ # define CRLF in a platform independent way.
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ expected_output = [
+ "Index: test.dsp\n",
+ "===================================================================\n",
+ "--- test.dsp\t(revision 2)\n",
+ "+++ test.dsp\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is file test.dsp." + crlf,
+ "+Extra line" + crlf
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff',
+ file_path,
+ '--config-dir', config_dir)
+
+ # create a file with inconsistent EOLs and eol-style=native, and import it
+ file_name = "test.txt"
+ file_path = file_name
+ imp_dir_path = 'dir2'
+ imp_file_path = os.path.join(imp_dir_path, file_name)
+
+ os.mkdir(imp_dir_path, svntest.main.S_ALL_RX | stat.S_IWUSR)
+ svntest.main.file_append_binary(imp_file_path,
+ "This is file test.txt.\n" + \
+ "The second line.\r\n" + \
+ "The third line.\r")
+
+ # The import should succeed and not error out
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '-m', 'Log message for new import',
+ imp_dir_path,
+ sbox.repo_url,
+ '--config-dir', config_dir)
+
+
+#----------------------------------------------------------------------
+@Issue(3983)
+def import_into_foreign_repo(sbox):
+ "import into a foreign repo"
+
+ sbox.build(read_only=True)
+
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.safe_rmtree(other_repo_dir, 1)
+ svntest.main.create_repos(other_repo_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '-m', 'Log message for new import',
+ sbox.ospath('A/mu'), other_repo_url + '/f')
+
+#----------------------------------------------------------------------
+def import_inherited_ignores(sbox):
+ 'import and inherited ignores'
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create this config file:
+ #
+ # [miscellany]
+ # global-ignores = *.boo *.goo
+ config_dir = create_inherited_ignores_config(sbox)
+
+ # Set some ignore properties.
+ sbox.simple_propset(SVN_PROP_INHERITABLE_IGNORES, '*.voo *.noo *.loo', '.')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_IGNORES, '*.yoo\t*.doo', 'A/B')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_IGNORES, '*.moo', 'A/D')
+ sbox.simple_propset('svn:ignore', '*.zoo\n*.foo\n*.poo', 'A/B/E')
+ sbox.simple_commit()
+
+ # Use this tree for importing:
+ #
+ # DIR1.noo
+ # DIR2.doo
+ # file1.txt
+ # DIR3.foo
+ # file2.txt
+ # DIR4.goo
+ # file3.txt
+ # file4.noo
+ # DIR5.moo
+ # file5.txt
+ # DIR6
+ # file6.foo
+ # DIR7
+ # file7.foo
+ # DIR8.noo
+ tmp_dir = os.path.abspath(svntest.main.temp_dir)
+ import_tree_dir = os.path.join(tmp_dir, 'import_tree_' + sbox.name)
+
+ # Relative WC paths of the imported tree.
+ dir1_path = os.path.join('DIR1.noo')
+ dir2_path = os.path.join('DIR2.doo')
+ file1_path = os.path.join('DIR2.doo', 'file1.txt')
+ dir3_path = os.path.join('DIR3.foo')
+ file2_path = os.path.join('DIR3.foo', 'file2.txt')
+ dir4_path = os.path.join('DIR4.goo')
+ file3_path = os.path.join('DIR4.goo', 'file3.txt')
+ file4_path = os.path.join('DIR4.goo', 'file4.noo')
+ dir5_path = os.path.join('DIR5.moo')
+ file5_path = os.path.join('DIR5.moo', 'file5.txt')
+ dir6_path = os.path.join('DIR6')
+ file6_path = os.path.join('DIR6', 'file6.foo')
+ dir7_path = os.path.join('DIR6', 'DIR7')
+ file7_path = os.path.join('DIR6', 'DIR7', 'file7.foo')
+ dir8_path = os.path.join('DIR6', 'DIR7', 'DIR8.noo')
+
+ import_dirs = [
+ dir1_path,
+ dir2_path,
+ dir3_path,
+ dir4_path,
+ dir5_path,
+ dir6_path,
+ dir7_path,
+ dir8_path,
+ ]
+ import_files = [
+ file1_path,
+ file2_path,
+ file3_path,
+ file4_path,
+ file5_path,
+ file6_path,
+ file7_path,
+ ]
+
+ # Create the (unversioned) tree to be imported.
+ os.mkdir(import_tree_dir)
+ for p in import_dirs:
+ os.mkdir(os.path.join(import_tree_dir, p))
+ for p in import_files:
+ svntest.main.file_write(os.path.join(import_tree_dir, p), 'A file')
+
+ # Import the tree to ^/A/B/E.
+ # We should not see any *.noo paths because those are blocked at the
+ # root of the repository by the svn:global-ignores property. Likewise
+ # *.doo paths are blocked by the svn:global-ignores on ^/A/B. Nor
+ # should we see and *.boo or *.goo paths, as those are blocked by the
+ # global-ignores config. Lastly, ^/A/B/E should not get any *.foo paths
+ # because of the svn:ignore property on ^/A/B/E, but non-immediate children
+ # of ^/A/B/E are permitted *.foo paths.
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '--config-dir', config_dir,
+ import_tree_dir,
+ sbox.repo_url + '/A/B/E',
+ '-m', 'import')
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Updating '" + wc_dir + "':\n",
+ 'A ' + os.path.join(E_path, dir5_path) + '\n',
+ 'A ' + os.path.join(E_path, file5_path) + '\n',
+ 'A ' + os.path.join(E_path, dir6_path) + '\n',
+ 'A ' + os.path.join(E_path, file6_path) + '\n',
+ 'A ' + os.path.join(E_path, dir7_path) + '\n',
+ 'A ' + os.path.join(E_path, file7_path) + '\n',
+ 'Updated to revision 3.\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir)
+
+ # Import the tree to ^/A/B/E/Z. The only difference from above is that
+ # DIR3.foo and its child file2.txt are also imported. Why? Because now
+ # we are creating a new directory in ^/A/B/E, so the svn:ignore property
+ # set on ^/A/B/E doesn't apply.
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '--config-dir', config_dir,
+ import_tree_dir,
+ sbox.repo_url + '/A/B/E/Z',
+ '-m', 'import')
+ Z_path = os.path.join(wc_dir, 'A', 'B', 'E', 'Z')
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Updating '" + wc_dir + "':\n",
+ 'A ' + os.path.join(Z_path) + '\n',
+ 'A ' + os.path.join(Z_path, dir5_path) + '\n',
+ 'A ' + os.path.join(Z_path, file5_path) + '\n',
+ 'A ' + os.path.join(Z_path, dir6_path) + '\n',
+ 'A ' + os.path.join(Z_path, file6_path) + '\n',
+ 'A ' + os.path.join(Z_path, dir7_path) + '\n',
+ 'A ' + os.path.join(Z_path, file7_path) + '\n',
+ 'A ' + os.path.join(Z_path, dir3_path) + '\n',
+ 'A ' + os.path.join(Z_path, file2_path) + '\n',
+ 'Updated to revision 4.\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir)
+
+ # Import the tree to ^/A/B/F with the --no-ignore option.
+ # No ignores should be considered and the whole tree should
+ # be imported.
+ svntest.actions.run_and_verify_svn(None, [], 'import',
+ '--config-dir', config_dir,
+ '--no-ignore', import_tree_dir,
+ sbox.repo_url + '/A/B/F',
+ '-m', 'import')
+ F_path = os.path.join(wc_dir, 'A', 'B', 'F')
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Updating '" + wc_dir + "':\n",
+ 'A ' + os.path.join(F_path, dir1_path) + '\n',
+ 'A ' + os.path.join(F_path, dir2_path) + '\n',
+ 'A ' + os.path.join(F_path, file1_path) + '\n',
+ 'A ' + os.path.join(F_path, dir3_path) + '\n',
+ 'A ' + os.path.join(F_path, file2_path) + '\n',
+ 'A ' + os.path.join(F_path, dir4_path) + '\n',
+ 'A ' + os.path.join(F_path, file3_path) + '\n',
+ 'A ' + os.path.join(F_path, file4_path) + '\n',
+ 'A ' + os.path.join(F_path, dir5_path) + '\n',
+ 'A ' + os.path.join(F_path, file5_path) + '\n',
+ 'A ' + os.path.join(F_path, dir6_path) + '\n',
+ 'A ' + os.path.join(F_path, file6_path) + '\n',
+ 'A ' + os.path.join(F_path, dir7_path) + '\n',
+ 'A ' + os.path.join(F_path, file7_path) + '\n',
+ 'A ' + os.path.join(F_path, dir8_path) + '\n',
+ 'Updated to revision 5.\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir)
+
+ # Try importing a single file into a directory which has svn:ignore set
+ # on it with a matching pattern of the imported file. The import should
+ # be a no-op.
+ svntest.actions.run_and_verify_svn([], [], 'import',
+ '--config-dir', config_dir,
+ os.path.join(import_tree_dir,
+ 'DIR6', 'file6.foo'),
+ sbox.repo_url + '/A/B/E/file6.foo',
+ '-m', 'This import should fail!')
+
+ # Try the above, but this time with --no-ignore, this time the import
+ # should succeed.
+ svntest.actions.run_and_verify_svn(None, [], 'import', '--no-ignore',
+ '--config-dir', config_dir,
+ os.path.join(import_tree_dir,
+ 'DIR6', 'file6.foo'),
+ sbox.repo_url + '/A/B/E/file6.foo',
+ '-m', 'import')
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Updating '" + wc_dir + "':\n",
+ 'A ' + os.path.join(E_path, 'file6.foo') + '\n',
+ 'Updated to revision 6.\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir)
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ import_executable,
+ import_ignores,
+ import_avoid_empty_revision,
+ import_no_ignores,
+ import_eol_style,
+ import_into_foreign_repo,
+ import_inherited_ignores,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/info_tests.py b/subversion/tests/cmdline/info_tests.py
new file mode 100755
index 0000000..187cd7f
--- /dev/null
+++ b/subversion/tests/cmdline/info_tests.py
@@ -0,0 +1,778 @@
+#!/usr/bin/env python
+#
+# info_tests.py: testing the svn info command
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# See basic-tests.py for more svn info tests.
+
+# General modules
+import shutil, stat, re, os, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+
+# Helpers for XML output
+def verify_xml_elements(lines, exprs):
+ """Verify that each of the given expressions matches exactly one XML
+ element in the list of lines. Each expression is a tuple containing
+ a name (a string), a set of attribute name-value pairs (a dict of
+ string->string), and element content (a string). The attribute dict
+ and the content string are by default empty.
+
+ Expression format: [ ('name', {'att': 'val', ...}, 'text') , ...]
+
+ Limitations:
+ We don't verify that the input is a valid XML document.
+ We can't verify text mixed with child elements.
+ We don't handle XML comments.
+ All of these are taken care of by the Relax NG schemas.
+ """
+ xml_version_re = re.compile(r"<\?xml\s+[^?]+\?>")
+
+ str = ""
+ for line in lines:
+ str += line
+ m = xml_version_re.match(str)
+ if m:
+ str = str[m.end():] # skip xml version tag
+ (unmatched_str, unmatched_exprs) = match_xml_element(str, exprs)
+ if unmatched_exprs:
+ logger.warn("Failed to find the following expressions:")
+ for expr in unmatched_exprs:
+ logger.warn(expr)
+ raise svntest.tree.SVNTreeUnequal
+
+def match_xml_element(str, exprs):
+ """Read from STR until the start of an element. If no element is found,
+ return the arguments. Get the element name, attributes and text content.
+ If not empty, call recursively on the text content. Compare the current
+ element to all expressions in EXPRS. If no elements were found in the
+ current element's text, include the text in the comparison (i.e., we
+ don't support mixed content). Return the unmatched part of the string
+ and any unmatched expressions.
+ """
+ start_tag_re = re.compile(r"[^<]*<(?P<name>[\w-]+)", re.M)
+ atttribute_re = re.compile(
+ r"\s+(?P<key>[\w-]+)\s*=\s*(['\"])(?P<val>[^'\"]*)\2", re.M)
+ self_closing_re = re.compile(r"\s*/>", re.M)
+ content_re_str = "\\s*>(?P<content>.*?)</%s\s*>"
+
+ m = start_tag_re.match(str)
+ if not m:
+ return (str, exprs)
+ name = m.group('name')
+ str = str[m.end():]
+ atts = {}
+ while True:
+ m = atttribute_re.match(str)
+ if not m:
+ break
+ else:
+ atts[m.group('key')] = m.group('val')
+ str = str[m.end():]
+ m = self_closing_re.match(str)
+ if m:
+ content = ''
+ str = str[m.end():]
+ else:
+ content_re = re.compile(content_re_str % name, re.DOTALL)
+ m = content_re.match(str)
+ if not m:
+ logger.warn("No XML end-tag for '%s' found in '%s...'" % (name, str[:100]))
+ raise(svntest.tree.SVNTreeUnequal)
+ content = m.group('content')
+ str = str[m.end():]
+ if content != '':
+ while True:
+ (new_content, exprs) = match_xml_element(content, exprs)
+ if new_content == content:
+ # there are no (more) child elements
+ break
+ else:
+ content = new_content
+ if exprs:
+ for expr in exprs:
+ # compare element names
+ e_name = expr[0]
+ if (e_name != name):
+ continue
+ # compare element attributes
+ e_atts = {}
+ if len(expr) > 1:
+ e_atts = expr[1]
+ if e_atts != atts:
+ continue
+ # compare element content (text only)
+ e_content = ''
+ if len(expr) > 2:
+ e_content = expr[2]
+ if (not re.search(e_content, content)):
+ continue
+ # success!
+ exprs.remove(expr)
+ return (str, exprs)
+
+def info_with_tree_conflicts(sbox):
+ "info with tree conflicts"
+
+ # Info messages reflecting tree conflict status.
+ # These tests correspond to use cases 1-3 in
+ # notes/tree-conflicts/use-cases.txt.
+
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+ G = os.path.join(wc_dir, 'A', 'D', 'G')
+
+ scenarios = [
+ # (filename, action, reason)
+ ('pi', 'edit', 'delete'),
+ ('rho', 'delete', 'edit'),
+ ('tau', 'delete', 'delete'),
+ ]
+
+ for fname, action, reason in scenarios:
+ path = os.path.join(G, fname)
+
+ # check plain info
+ expected_str1 = ".*local file %s, incoming file %s.*" % (reason, action)
+ expected_info = { 'Tree conflict' : expected_str1 }
+ svntest.actions.run_and_verify_info([expected_info], path)
+
+ # check XML info
+ exit_code, output, error = svntest.actions.run_and_verify_svn(None,
+ [], 'info',
+ path,
+ '--xml')
+
+ # In the XML, action and reason are past tense: 'edited' not 'edit'.
+ verify_xml_elements(output,
+ [('tree-conflict', {'victim' : fname,
+ 'kind' : 'file',
+ 'operation': 'update',
+ 'action' : action,
+ 'reason' : reason,
+ },
+ )])
+
+ # Check recursive info.
+ expected_infos = [{ 'Path' : re.escape(G) }]
+ for fname, action, reason in scenarios:
+ path = os.path.join(G, fname)
+ tree_conflict_re = ".*local file %s, incoming file %s.*" % (reason, action)
+ expected_infos.append({ 'Path' : re.escape(path),
+ 'Tree conflict' : tree_conflict_re })
+ expected_infos.sort(key=lambda info: info['Path'])
+ svntest.actions.run_and_verify_info(expected_infos, G, '-R')
+
+def info_on_added_file(sbox):
+ """info on added file"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create new file
+ new_file = os.path.join(wc_dir, 'new_file')
+ svntest.main.file_append(new_file, '')
+
+ svntest.main.run_svn(None, 'add', new_file)
+
+ uuid_regex = '[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}'
+
+ # check that we have a Repository Root and Repository UUID
+ expected = {'Path' : re.escape(new_file),
+ 'Name' : 'new_file',
+ 'URL' : '.*/new_file',
+ 'Relative URL' : '.*/new_file',
+ 'Repository Root' : '.*',
+ 'Node Kind' : 'file',
+ 'Schedule' : 'add',
+ 'Repository UUID' : uuid_regex,
+ }
+
+ svntest.actions.run_and_verify_info([expected], new_file)
+
+ # check XML info
+ exit_code, output, error = svntest.actions.run_and_verify_svn(None,
+ [], 'info',
+ new_file,
+ '--xml')
+
+ verify_xml_elements(output,
+ [('entry', {'kind' : 'file',
+ 'path' : new_file,
+ 'revision' : 'Resource is not under version control.'}),
+ ('url', {}, '.*/new_file'),
+ ('relative-url', {}, '.*/new_file'),
+ ('root', {}, '.*'),
+ ('uuid', {}, uuid_regex),
+ ('depth', {}, 'infinity'),
+ ('schedule', {}, 'add')])
+
+def info_on_mkdir(sbox):
+ """info on new dir with mkdir"""
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create a new directory using svn mkdir
+ new_dir = os.path.join(wc_dir, 'new_dir')
+ svntest.main.run_svn(None, 'mkdir', new_dir)
+
+ uuid_regex = '[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}'
+
+ # check that we have a Repository Root and Repository UUID
+ expected = {'Path' : re.escape(new_dir),
+ 'URL' : '.*/new_dir',
+ 'Relative URL' : '.*/new_dir',
+ 'Repository Root' : '.*',
+ 'Node Kind' : 'directory',
+ 'Schedule' : 'add',
+ 'Repository UUID' : uuid_regex,
+ }
+
+ svntest.actions.run_and_verify_info([expected], new_dir)
+
+ # check XML info
+ exit_code, output, error = svntest.actions.run_and_verify_svn(None,
+ [], 'info',
+ new_dir,
+ '--xml')
+ verify_xml_elements(output,
+ [('entry', {'kind' : 'dir',
+ 'path' : new_dir,
+ 'revision' : 'Resource is not under version control.'}),
+ ('url', {}, '.*/new_dir'),
+ ('relative-url', {}, '.*/new_dir'),
+ ('root', {}, '.*'),
+ ('uuid', {}, uuid_regex),
+ ('depth', {}, 'infinity'),
+ ('schedule', {}, 'add')])
+
+def info_wcroot_abspaths(sbox):
+ """wc root paths in 'svn info' output"""
+
+ def check_wcroot_paths(lines, wcroot_abspath):
+ "check that paths found on input lines beginning 'Path: ' are as expected"
+ path = None
+ target = None
+ for line in lines:
+ if line.startswith('Path: '):
+ target = line[6:].rstrip()
+ if line.startswith('Working Copy Root Path: '):
+ path = line[24:].rstrip()
+ if target is not None and path is not None:
+ break
+
+ if target is None:
+ target = "(UNKNOWN)"
+
+ if path is None:
+ logger.warn("No WC root path for '%s'", target)
+ raise svntest.Failure
+
+ if path != wcroot_abspath:
+ logger.warn("For target '%s'...", target)
+ logger.warn(" Reported WC root path: %s", path)
+ logger.warn(" Expected WC root path: %s", wcroot_abspath)
+ raise svntest.Failure
+
+ sbox.build(read_only=True)
+ exit_code, output, errput = svntest.main.run_svn(None, 'info', '-R', sbox.wc_dir)
+ check_wcroot_paths(output, os.path.abspath(sbox.wc_dir))
+
+def info_url_special_characters(sbox):
+ """special characters in svn info URL"""
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ special_urls = [sbox.repo_url + '/A' + '/%2E',
+ sbox.repo_url + '%2F' + 'A']
+
+ expected = {'Path' : 'A',
+ 'Repository Root' : re.escape(sbox.repo_url),
+ 'Revision' : '1',
+ 'Node Kind' : 'dir',
+ }
+
+ for url in special_urls:
+ svntest.actions.run_and_verify_info([expected], url)
+
+def info_multiple_targets(sbox):
+ "info multiple targets"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ def multiple_wc_targets():
+ "multiple wc targets"
+
+ alpha = sbox.ospath('A/B/E/alpha')
+ beta = sbox.ospath('A/B/E/beta')
+ non_existent_path = os.path.join(wc_dir, 'non-existent')
+
+ # All targets are existing
+ svntest.actions.run_and_verify_svn2(None, [],
+ 0, 'info', alpha, beta)
+
+ # One non-existing target
+ expected_err = ".*W155010.*\n\n.*E200009.*"
+ expected_err_re = re.compile(expected_err, re.DOTALL)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'info', alpha,
+ non_existent_path, beta)
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('info failed: expected error "%s", but received '
+ '"%s"' % (expected_err, "".join(error)))
+
+ def multiple_url_targets():
+ "multiple url targets"
+
+ alpha = sbox.repo_url + '/A/B/E/alpha'
+ beta = sbox.repo_url + '/A/B/E/beta'
+ non_existent_url = sbox.repo_url + '/non-existent'
+
+ # All targets are existing
+ svntest.actions.run_and_verify_svn2(None, [],
+ 0, 'info', alpha, beta)
+
+ # One non-existing target
+ expected_err = ".*W170000.*\n\n.*E200009.*"
+ expected_err_re = re.compile(expected_err, re.DOTALL)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'info', alpha,
+ non_existent_url, beta)
+
+ # Verify error
+ if not expected_err_re.match("".join(error)):
+ raise svntest.Failure('info failed: expected error "%s", but received '
+ '"%s"' % (expected_err, "".join(error)))
+ # Test one by one
+ multiple_wc_targets()
+ multiple_url_targets()
+
+def info_repos_root_url(sbox):
+ """verify values for repository root"""
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ expected_info = [
+ {
+ 'Path' : re.escape(os.path.basename(sbox.repo_dir)),
+ 'Repository Root' : re.escape(sbox.repo_url),
+ 'URL' : re.escape(sbox.repo_url),
+ 'Relative URL' : '\^/', # escape ^ -- this isn't a regexp
+ 'Revision' : '1',
+ 'Node Kind' : 'directory',
+ 'Last Changed Rev' : '1',
+ },
+ {
+ 'Path' : 'iota',
+ 'Name' : 'iota',
+ 'Repository Root' : re.escape(sbox.repo_url),
+ 'URL' : re.escape(sbox.repo_url + '/iota'),
+ 'Relative URL' : '\^/iota', # escape ^ -- this isn't a regexp
+ 'Revision' : '1',
+ 'Node Kind' : 'file',
+ 'Last Changed Rev' : '1',
+ }
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info, sbox.repo_url,
+ '--depth', 'files')
+
+@Issue(3787)
+def info_show_exclude(sbox):
+ "tests 'info --depth' variants on excluded node"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = os.path.join(wc_dir, 'A')
+ iota = os.path.join(wc_dir, 'iota')
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', A_path)
+ wc_uuid = svntest.actions.get_wc_uuid(wc_dir)
+
+ expected_info = [{
+ 'Path' : re.escape(wc_dir),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : wc_uuid,
+ }]
+
+ svntest.actions.run_and_verify_info(expected_info, '--depth', 'empty',
+ wc_dir)
+
+ expected_info = [{
+ 'Path' : '.*%sA' % re.escape(os.sep),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : wc_uuid,
+ 'Depth' : 'exclude',
+ }]
+
+ svntest.actions.run_and_verify_info(expected_info, '--depth',
+ 'empty', A_path)
+ svntest.actions.run_and_verify_info(expected_info, '--depth',
+ 'infinity', A_path)
+ svntest.actions.run_and_verify_info(expected_info, '--depth',
+ 'immediates', A_path)
+
+ expected_info = [{
+ 'Path' : '.*%siota' % re.escape(os.sep),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : wc_uuid,
+ }]
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', iota)
+ svntest.actions.run_and_verify_info(expected_info, iota)
+
+ # And now get iota back, to allow testing other states
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='A '),
+ })
+
+ expected_status = svntest.wc.State(iota, {
+ '' : Item(status=' ', wc_rev='1')
+ })
+
+ svntest.actions.run_and_verify_update(iota,
+ expected_output, None, expected_status)
+
+ sbox.simple_rm('iota')
+ sbox.simple_commit()
+
+ expected_error = 'svn: E200009: Could not display info for all targets.*'
+
+ # Expect error on iota (status = not-present)
+ svntest.actions.run_and_verify_svn([], expected_error, 'info', iota)
+
+ sbox.simple_update()
+
+ # Expect error on iota (unversioned)
+ svntest.actions.run_and_verify_svn([], expected_error, 'info', iota)
+
+@Issue(3998)
+def binary_tree_conflict(sbox):
+ "svn info shouldn't crash on conflict"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type', 'binary/octet-stream',
+ sbox.ospath('iota'))
+ sbox.simple_commit()
+
+ iota = sbox.ospath('iota')
+
+ svntest.main.file_write(iota, 'something-else')
+ sbox.simple_commit()
+
+ svntest.main.file_write(iota, 'third')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='C '),
+ })
+ expected_status = svntest.wc.State(iota, {
+ '' : Item(status='C ', wc_rev='2')
+ })
+ svntest.actions.run_and_verify_update(iota,
+ expected_output, None, expected_status,
+ [], False,
+ iota, '-r', '2')
+
+ expected_info = [{
+ 'Path' : '%s' % re.escape(iota),
+ 'Conflict Previous Base File' : re.escape(iota + '.r3'),
+ 'Conflict Current Base File' : re.escape(iota + '.r2'),
+ }]
+ svntest.actions.run_and_verify_info(expected_info, iota)
+
+ expected_info = [{
+ 'Path' : '%s' % re.escape(wc_dir),
+ }]
+ svntest.actions.run_and_verify_info(expected_info, wc_dir)
+
+def relpath_escaping(sbox):
+ "relpath escaping should be usable as-is"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ name = 'path with space, +, % and #'
+ name2 = 'path with %20'
+ sbox.simple_copy('iota', name)
+ sbox.simple_copy('iota', name2)
+ sbox.simple_commit()
+
+ testpath = sbox.ospath(name)
+
+ expected = {'Path' : re.escape(testpath),
+ 'URL' : '.*/path.*with.*space.*',
+ 'Relative URL' : '.*/path.*with.*space.*',
+ }
+
+ svntest.actions.run_and_verify_info([expected], sbox.ospath(name))
+
+ info = svntest.actions.run_and_parse_info(sbox.ospath(name), sbox.ospath(name2))
+
+ # And now verify that the returned URL and relative url are usable
+
+ # Also test the local path (to help resolving the relative path) and an
+ # unescaped path which the client should automatically encode
+ svntest.actions.run_and_verify_svn(None, [], 'info',
+ info[0]['Relative URL'],
+ info[0]['URL'],
+ testpath,
+ '^/' + name,
+
+ info[1]['Relative URL'],
+ info[1]['URL'])
+
+ # And now do the same thing with a the file external handling
+ sbox.simple_propset('svn:externals',
+ info[0]['Relative URL'] + " f1\n" +
+ info[0]['URL'] + " f2\n" +
+ '"^/' + name + "\" f3\n" +
+
+ info[1]['Relative URL'] + " g1\n" +
+ info[1]['URL'] + " g2\n",
+ ''
+ )
+
+ # And now we expect to see 3 file externals
+ expected_output = svntest.wc.State(wc_dir, {
+ 'f1' : Item(status='A '),
+ 'f2' : Item(status='A '),
+ 'f3' : Item(status='A '),
+
+ 'g1' : Item(status='A '),
+ 'g2' : Item(status='A '),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None, None)
+
+def node_hidden_info(sbox):
+ "fetch svn info on 'hidden' nodes"
+
+ sbox.build()
+
+ sbox.simple_rm('A/B/E/alpha')
+ sbox.simple_commit()
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude',
+ sbox.ospath('A/B/E/beta'))
+
+ sbox.simple_copy('A/B/E', 'E')
+
+ # Running info on BASE not-present fails
+ expected_err = '.*(E|W)155010: The node \'.*alpha\' was not found.*'
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'info', sbox.ospath('A/B/E/alpha'))
+
+ expected_info = [
+ {
+ 'Path': re.escape(sbox.ospath('A/B/E/beta')),
+ 'Schedule': 'normal',
+ 'Depth': 'exclude',
+ 'Node Kind': 'file',
+ },
+ {
+ 'Path': re.escape(sbox.ospath('E/alpha')),
+ 'Schedule': 'delete',
+ 'Depth': 'exclude',
+ 'Node Kind': 'unknown',
+ },
+ {
+ 'Path': re.escape(sbox.ospath('E/beta')),
+ 'Schedule': 'normal',
+ 'Depth': 'exclude',
+ 'Node Kind': 'file',
+ }
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info,
+ sbox.ospath('A/B/E/beta'),
+ sbox.ospath('E/alpha'),
+ sbox.ospath('E/beta'))
+
+
+def info_item_simple(sbox):
+ "show one info item"
+
+ sbox.build(read_only=True)
+ svntest.actions.run_and_verify_svn(
+ ['1'], [],
+ 'info', '--show-item=revision', '--no-newline',
+ sbox.ospath(''))
+
+
+def info_item_simple_multiple(sbox):
+ "show one info item with multiple targets"
+
+ sbox.build(read_only=True)
+
+ svntest.actions.run_and_verify_svn(
+ r'^jrandom\s+\S+(/|\\)info_tests-\d+((/|\\)[^/\\]+)?$', [],
+ 'info', '--show-item=last-changed-author',
+ '--depth=immediates', sbox.ospath(''))
+
+ svntest.actions.run_and_verify_svn(
+ r'^1\s+\S+(/|\\)info_tests-\d+(/|\\)[^/\\]+$', [],
+ 'info', '--show-item=last-changed-revision',
+ sbox.ospath('A'), sbox.ospath('iota'))
+
+
+def info_item_url(sbox):
+ "show one info item with URL targets"
+
+ sbox.build(create_wc=False, read_only=True)
+
+ svntest.actions.run_and_verify_svn(
+ '1', [],
+ 'info', '--show-item=last-changed-revision',
+ sbox.repo_url)
+
+
+ svntest.actions.run_and_verify_svn(
+ r'^1\s+[^/:]+://.+/repos/[^/]+$', [],
+ 'info', '--show-item=last-changed-revision',
+ sbox.repo_url + '/A', sbox.repo_url + '/iota')
+
+
+ # Empty working copy root on URL targets
+ svntest.actions.run_and_verify_svn(
+ '', [],
+ 'info', '--show-item=wc-root',
+ sbox.repo_url)
+
+
+def info_item_uncommmitted(sbox):
+ "show one info item on uncommitted targets"
+
+ sbox.build()
+
+ svntest.main.file_write(sbox.ospath('newfile'), 'newfile')
+ sbox.simple_add('newfile')
+ sbox.simple_mkdir('newdir')
+
+ svntest.actions.run_and_verify_svn(
+ '', [],
+ 'info', '--show-item=last-changed-revision',
+ sbox.ospath('newfile'))
+
+ svntest.actions.run_and_verify_svn(
+ '', [],
+ 'info', '--show-item=last-changed-author',
+ sbox.ospath('newdir'))
+
+ svntest.actions.run_and_verify_svn(
+ r'\s+\S+(/|\\)new(file|dir)', [],
+ 'info', '--show-item=last-changed-date',
+ sbox.ospath('newfile'), sbox.ospath('newdir'))
+
+ svntest.actions.run_and_verify_svn(
+ r'\^/new(file|dir)\s+\S+(/|\\)new(file|dir)', [],
+ 'info', '--show-item=relative-url',
+ sbox.ospath('newfile'), sbox.ospath('newdir'))
+
+
+def info_item_failures(sbox):
+ "failure modes of 'svn info --show-item'"
+
+ sbox.build(read_only=True)
+
+ svntest.actions.run_and_verify_svn(
+ None, r'.*E200009:.*',
+ 'info', '--show-item=revision',
+ sbox.ospath('not-there'))
+
+ svntest.actions.run_and_verify_svn(
+ None, r".*E205000: .*; did you mean 'wc-root'\?",
+ 'info', '--show-item=root',
+ sbox.ospath(''))
+
+ svntest.actions.run_and_verify_svn(
+ None, (r".*E205000: --show-item is not valid in --xml mode"),
+ 'info', '--show-item=revision', '--xml',
+ sbox.ospath(''))
+
+ svntest.actions.run_and_verify_svn(
+ None, (r".*E205000: --incremental is only valid in --xml mode"),
+ 'info', '--show-item=revision', '--incremental',
+ sbox.ospath(''))
+
+ svntest.actions.run_and_verify_svn(
+ None, (r".*E205000: --no-newline is only available.*"),
+ 'info', '--show-item=revision', '--no-newline',
+ sbox.ospath('A'), sbox.ospath('iota'))
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ info_with_tree_conflicts,
+ info_on_added_file,
+ info_on_mkdir,
+ info_wcroot_abspaths,
+ info_url_special_characters,
+ info_multiple_targets,
+ info_repos_root_url,
+ info_show_exclude,
+ binary_tree_conflict,
+ relpath_escaping,
+ node_hidden_info,
+ info_item_simple,
+ info_item_simple_multiple,
+ info_item_url,
+ info_item_uncommmitted,
+ info_item_failures,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/input_validation_tests.py b/subversion/tests/cmdline/input_validation_tests.py
new file mode 100755
index 0000000..e1d74a6
--- /dev/null
+++ b/subversion/tests/cmdline/input_validation_tests.py
@@ -0,0 +1,333 @@
+#!/usr/bin/env python
+#
+# input_validation_tests.py: testing input validation
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Utilities
+
+# Common URL targets to pass where only path arguments are expected.
+_invalid_wc_path_targets = ['file:///', '^/']
+
+def run_and_verify_svn_in_wc(sbox, expected_stderr, *varargs):
+ """Like svntest.actions.run_and_verify_svn, but temporarily
+ changes the current working directory to the sandboxes'
+ working copy and only checks the expected error output."""
+
+ wc_dir = sbox.wc_dir
+ old_dir = os.getcwd()
+ try:
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([], expected_stderr,
+ *varargs)
+ finally:
+ os.chdir(old_dir)
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+#----------------------------------------------------------------------
+
+def invalid_wcpath_add(sbox):
+ "non-working copy paths for 'add'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'add', target)
+
+def invalid_wcpath_changelist(sbox):
+ "non-working copy paths for 'changelist'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'changelist',
+ 'foo', target)
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'changelist',
+ '--remove', target)
+
+def invalid_wcpath_cleanup(sbox):
+ "non-working copy paths for 'cleanup'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'cleanup',
+ target)
+
+def invalid_wcpath_commit(sbox):
+ "non-working copy paths for 'commit'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: '.*' is not a local path", 'commit', target)
+
+def invalid_copy_sources(sbox):
+ "invalid sources for 'copy'"
+ sbox.build(read_only=True)
+ for (src1, src2) in [("iota", "^/"), ("^/", "iota"), ("file://", "iota")]:
+ run_and_verify_svn_in_wc(sbox, "svn: E200007: Cannot mix repository and working " +
+ "copy sources", 'copy', src1, src2, "A")
+
+def invalid_copy_target(sbox):
+ "invalid target for 'copy'"
+ sbox.build(read_only=True)
+ mu_path = os.path.join('A', 'mu')
+ C_path = os.path.join('A', 'C')
+ run_and_verify_svn_in_wc(sbox, "svn: E155(007|010): Path '.*' is not a directory",
+ 'copy', mu_path, C_path, "iota")
+
+def invalid_delete_targets(sbox):
+ "invalid targets for 'delete'"
+ sbox.build(read_only=True)
+ for (target1, target2) in [("iota", "^/"), ("file://", "iota")]:
+ run_and_verify_svn_in_wc(sbox, "svn: E200009: Cannot mix repository and working "
+ "copy targets", 'delete', target1, target2)
+
+def invalid_diff_targets(sbox):
+ "invalid targets for 'diff'"
+ sbox.build(read_only=True)
+ for (target1, target2, target3) in [("iota", "^/", "A/mu"), ("file://", "iota", "A/mu")]:
+ run_and_verify_svn_in_wc(sbox, "svn: E200009: Cannot mix repository and working "
+ "copy targets", 'diff', target1, target2, target3)
+
+def invalid_export_targets(sbox):
+ "invalid targets for 'export'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn: (E000017|E720183): Can't create directory '.*iota':.*",
+ 'export', '.', 'iota')
+ for target in ["^/", "file://"]:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path",
+ 'export', '.', target)
+
+def invalid_import_args(sbox):
+ "invalid arguments for 'import'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path",
+ 'import', '^/', '^/')
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path",
+ 'import', '^/', 'iota')
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: Invalid URL 'iota'",
+ 'import', 'iota', 'iota')
+
+def invalid_log_targets(sbox):
+ "invalid targets for 'log'"
+ sbox.build(read_only=True)
+ for (target1, target2) in [('^/', '/a/b/c'), ('^/', '^/'), ('^/', 'file://')]:
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: Only relative paths can be " +
+ "specified after a URL for 'svn log', but.*is " +
+ "not a relative path", 'log', target1, target2)
+
+def invalid_merge_args(sbox):
+ "invalid arguments for 'merge'"
+ sbox.build(read_only=True)
+ for args in [('iota', 'A/mu@HEAD'),
+ ('iota@BASE', 'A/mu@HEAD')]:
+ run_and_verify_svn_in_wc(sbox, "svn: E195002: .* working copy .* revision",
+ 'merge', *args)
+ for args in [(sbox.repo_url, 'A@1', 'A'),
+ ('^/A', 'A@HEAD', 'A'),
+ ('A@HEAD', '^/A', 'A'),
+ ('A@HEAD', '^/A')]:
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: Merge sources must both "
+ "be either paths or URLs", 'merge', *args)
+ run_and_verify_svn_in_wc(sbox, "svn: E155010: .* was not found",
+ 'merge', '^/@0', '^/@1', 'nonexistent')
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: Too many arguments given",
+ 'merge', '-c42', '^/A/B', '^/A/C', 'iota')
+ run_and_verify_svn_in_wc(sbox, "svn: E205000: Cannot specify a revision range with" +
+ " two URLs", 'merge', '-c42', '^/mu', '^/')
+ run_and_verify_svn_in_wc(sbox, "svn: E155010: .* was not found",
+ 'merge', '-c42', '^/mu', 'nonexistent')
+
+def invalid_wcpath_upgrade(sbox):
+ "non-working copy paths for 'upgrade'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'upgrade',
+ target, target)
+
+def invalid_resolve_targets(sbox):
+ "non-working copy paths for 'resolve'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'resolve',
+ '--accept', 'base', target)
+
+def invalid_resolved_targets(sbox):
+ "non-working copy paths for 'resolved'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'resolved',
+ target)
+
+def invalid_revert_targets(sbox):
+ "non-working copy paths for 'revert'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'revert',
+ target)
+
+def invalid_lock_targets(sbox):
+ "wc paths and repo URL target mixture for 'lock'"
+ sbox.build(read_only=True)
+ for (target1, target2) in [("iota", "^/"), ("file://", "iota")]:
+ run_and_verify_svn_in_wc(sbox, "svn: E200009: Cannot mix repository and working "
+ "copy targets", 'lock', target1, target2)
+
+def invalid_unlock_targets(sbox):
+ "wc paths and repo URL target mixture for 'unlock'"
+ sbox.build(read_only=True)
+ for (target1, target2) in [("iota", "^/"), ("file://", "iota")]:
+ run_and_verify_svn_in_wc(sbox, "svn: E200009: Cannot mix repository and working "
+ "copy targets", 'unlock', target1, target2)
+
+def invalid_status_targets(sbox):
+ "non-working copy paths for 'status'"
+ sbox.build(read_only=True)
+ for target in _invalid_wc_path_targets:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'status',
+ target)
+
+def invalid_patch_targets(sbox):
+ "non-working copy paths for 'patch'"
+ sbox.build(read_only=True)
+ for (target1, target2) in [("foo", "^/"), ("^/", "^/"), ("^/", "foo")]:
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'patch',
+ target1, target2)
+
+def invalid_switch_targets(sbox):
+ "non-working copy paths for 'switch'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'switch',
+ "^/", "^/")
+
+def invalid_relocate_targets(sbox):
+ "non-working copy paths for 'relocate'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'relocate',
+ "^/", "^/", "^/")
+
+# See also basic_tests.py:basic_mkdir_mix_targets(), which tests
+# the same thing the other way around.
+def invalid_mkdir_targets(sbox):
+ "invalid targets for 'mkdir'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn: E200009: Cannot mix repository and working "
+ "copy targets", 'mkdir', "folder", "^/folder")
+
+def invalid_update_targets(sbox):
+ "non-working copy paths for 'update'"
+ sbox.build(read_only=True)
+ run_and_verify_svn_in_wc(sbox, "svn:.*is not a local path", 'update',
+ "^/")
+
+def delete_repos_root(sbox):
+ "do stupid things with the repository root"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_status.tweak('A/D/G', switched='S')
+ expected_status.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A/D/G'),
+ repo_url,
+ None, None, expected_status,
+ [], False,
+ '--set-depth', 'empty', '--ignore-ancestry')
+
+ expected_status.tweak('A/B/F', switched='S')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A/B/F'),
+ repo_url,
+ None, None, expected_status,
+ [], False,
+ '--depth', 'empty', '--ignore-ancestry')
+
+ # Delete the wcroot (which happens to be the repository root)
+ expected_error = 'svn: E155035: \'.*\' is the root of a working copy ' + \
+ 'and cannot be deleted'
+ svntest.actions.run_and_verify_svn([], expected_error,
+ 'rm', wc_dir)
+
+ # This should produce some error, because we can never commit this
+ expected_error = '.*repository root.*'
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'mv', sbox.ospath('A/D/G'),
+ sbox.ospath('Z'))
+
+ # And this currently fails with another nasty error about a wc-lock
+ expected_error = '.*repository root.*'
+ svntest.actions.run_and_verify_svn([], expected_error,
+ 'rm', sbox.ospath('A/B/F'))
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ invalid_wcpath_add,
+ invalid_wcpath_changelist,
+ invalid_wcpath_cleanup,
+ invalid_wcpath_commit,
+ invalid_copy_sources,
+ invalid_copy_target,
+ invalid_delete_targets,
+ invalid_diff_targets,
+ invalid_export_targets,
+ invalid_import_args,
+ invalid_log_targets,
+ invalid_merge_args,
+ invalid_wcpath_upgrade,
+ invalid_resolve_targets,
+ invalid_resolved_targets,
+ invalid_revert_targets,
+ invalid_lock_targets,
+ invalid_unlock_targets,
+ invalid_status_targets,
+ invalid_patch_targets,
+ invalid_switch_targets,
+ invalid_relocate_targets,
+ invalid_mkdir_targets,
+ invalid_update_targets,
+ delete_repos_root,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/iprop_authz_tests.py b/subversion/tests/cmdline/iprop_authz_tests.py
new file mode 100755
index 0000000..835cd37
--- /dev/null
+++ b/subversion/tests/cmdline/iprop_authz_tests.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# iprop_authz_tests.py: iprop tests that need to write an authz file
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+
+from svntest.main import write_restrictive_svnserve_conf
+from svntest.main import write_authz_file
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+# Property inheritance with read restrictions on parent paths.
+@Skip(svntest.main.is_ra_type_file)
+def iprops_authz(sbox):
+ "property inheritance and read restricted parents"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - Set properties at various levels.
+ sbox.simple_propset('RootProp', 'Root-Prop-Val', '.')
+ sbox.simple_propset('BranchProp', 'Branch-Prop-Val', 'A')
+ sbox.simple_propset('RandomProp1', 'Random-Prop-Val-1', 'A/D')
+ sbox.simple_propset('RandomProp2', 'Random-Prop-Val-2', 'A/D/H')
+ sbox.simple_propset('FileProp1', 'File-Prop-Val-1', 'A/D/H/psi')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some properties',
+ wc_dir)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # Check that a restricted user can only see inherited props from
+ # parent paths which he has read access to.
+
+ # Grant access only to ^/A/D/H/psi. No inherited properties should
+ # be shown.
+ write_authz_file(sbox, {
+ "/A/D/H/psi" : svntest.main.wc_author + "=rw",})
+
+ expected_iprops = {}
+ expected_explicit_props = {'FileProp1' : 'File-Prop-Val-1'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D/H/psi', expected_iprops, expected_explicit_props)
+
+ # Grant access to ^/A/D/H/psi and the repos root but not the intermediate
+ # paths between the two.
+ write_authz_file(sbox, {
+ "/" : svntest.main.wc_author + "=rw",
+ "/A" : svntest.main.wc_author + "=",
+ "/A/D/H/psi" : svntest.main.wc_author + "=rw",})
+
+ expected_iprops = {
+ sbox.repo_url : {'RootProp' : 'Root-Prop-Val'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D/H/psi', expected_iprops, expected_explicit_props)
+
+ # Grant access to ^/A/D/H/psi, the repos root, and the intermediate path
+ # ^/A/D. Everything else is still blocked.
+ write_authz_file(sbox, {
+ "/" : svntest.main.wc_author + "=rw",
+ "/A" : svntest.main.wc_author + "=",
+ "/A/D" : svntest.main.wc_author + "=rw",
+ "/A/D/H" : svntest.main.wc_author + "=",
+ "/A/D/H/psi" : svntest.main.wc_author + "=rw",})
+
+ expected_iprops = {
+ sbox.repo_url : {'RootProp' : 'Root-Prop-Val'},
+ sbox.repo_url + '/A/D': {'RandomProp1' : 'Random-Prop-Val-1'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D/H/psi', expected_iprops, expected_explicit_props)
+
+ # Grant read access to everything except ^/A/D/H/psi. In this case we
+ # should get an authorization failed error. It doesn't matter that we can
+ # read the parents.
+ write_authz_file(sbox, {
+ "/" : svntest.main.wc_author + "=rw",
+ "/A/D/H/psi" : svntest.main.wc_author + "=",})
+ if sbox.repo_url.startswith("http"):
+ expected_err = ".*[Ff]orbidden.*"
+ else:
+ expected_err = ".*svn: E170001: Authorization failed.*"
+ svntest.actions.run_and_verify_svn(
+ None, expected_err, 'proplist', '-v',
+ '--show-inherited-props', sbox.repo_url + '/A/D/H/psi')
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ iprops_authz,
+ ]
+
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+### End of file.
diff --git a/subversion/tests/cmdline/iprop_tests.py b/subversion/tests/cmdline/iprop_tests.py
new file mode 100755
index 0000000..bff8a38
--- /dev/null
+++ b/subversion/tests/cmdline/iprop_tests.py
@@ -0,0 +1,1692 @@
+#!/usr/bin/env python
+#
+# iprop_tests.py: testing versioned inherited properties
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+# Working property inheritance, uniform revision WC.
+def iprops_basic_working(sbox):
+ "basic inherited working properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Paths of note.
+ mu_path = sbox.ospath('A/mu')
+ D_path = sbox.ospath('A/D')
+ psi_path = sbox.ospath('A/D/H/psi')
+ iota_path = sbox.ospath('iota')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ G_path = sbox.ospath('A/D/G')
+ rho_path = sbox.ospath('A/D/G/rho')
+
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val1', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val2', '.')
+ sbox.simple_propset('DirProp2', 'Dir-Prop-Val-Root', '.')
+ sbox.simple_propset('FileProp1', 'File-Prop-Val1', 'iota')
+ sbox.simple_propset('FileProp2', 'File-Prop-Val2', 'A/D/H/psi')
+ sbox.simple_propset('DirProp1', 'Dir-Prop-Val1', 'A/D')
+ sbox.simple_propset('DirProp2', 'Dir-Prop-Val2', 'A/D')
+ sbox.simple_propset('DirProp3', 'Dir-Prop-Val3', 'A/D')
+ sbox.simple_propset('SomeProp', 'Some-Prop-Val1', 'A/D/G')
+ sbox.simple_propset('SomeProp', 'Some-Prop-Val2', 'A/D/G/rho')
+
+ ### Proplist Directory Targets
+
+ # Proplist directory target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'DirProp2' : 'Dir-Prop-Val-Root',
+ 'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ wc_dir, expected_iprops, expected_explicit_props)
+
+ # Proplist directory target with only inherited props.
+ expected_iprops = {wc_dir : {'DirProp2' : 'Dir-Prop-Val-Root',
+ 'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ alpha_path, expected_iprops, expected_explicit_props)
+
+ # Proplist directory target with inherited and explicit props.
+ expected_iprops = {wc_dir : {'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2',
+ 'DirProp2' : 'Dir-Prop-Val-Root'}}
+ expected_explicit_props = {'DirProp1' : 'Dir-Prop-Val1',
+ 'DirProp2' : 'Dir-Prop-Val2',
+ 'DirProp3' : 'Dir-Prop-Val3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props)
+
+ ### Propget Directory Targets
+
+ # Propget directory target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'RootProp2' : 'Root-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ wc_dir, expected_iprops, expected_explicit_props, 'RootProp2')
+
+ # Propget directory target with only inherited props.
+ expected_iprops = {wc_dir : {'RootProp2': 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ alpha_path, expected_iprops, expected_explicit_props, 'RootProp2')
+
+ # Propget directory target with inherited and explicit props.
+ expected_iprops = {wc_dir : {'DirProp2' : 'Dir-Prop-Val-Root',}}
+ expected_explicit_props = {'DirProp2' : 'Dir-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'DirProp2')
+
+ ### Propget File Targets
+
+ # Propget file target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'FileProp1' : 'File-Prop-Val1'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ iota_path, expected_iprops, expected_explicit_props, 'FileProp1')
+
+ # Propget file target with only inherited props.
+ expected_iprops = {wc_dir : {'RootProp2': 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ alpha_path, expected_iprops, expected_explicit_props, 'RootProp2')
+
+ # Propget file target with inherited and explicit props.
+ expected_iprops = {G_path : {'SomeProp' : 'Some-Prop-Val1',}}
+ expected_explicit_props = {'SomeProp' : 'Some-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ rho_path, expected_iprops, expected_explicit_props, 'SomeProp')
+
+ ### Proplist File Targets
+
+ # Proplist file target with only inherited props.
+ expected_iprops = {wc_dir : {'DirProp2' : 'Dir-Prop-Val-Root',
+ 'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ mu_path, expected_iprops, expected_explicit_props)
+
+ # Proplist file target with inherited and explicit props.
+ expected_iprops = {wc_dir : {'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2',
+ 'DirProp2' : 'Dir-Prop-Val-Root'},
+ D_path : {'DirProp1' : 'Dir-Prop-Val1',
+ 'DirProp2' : 'Dir-Prop-Val2',
+ 'DirProp3' : 'Dir-Prop-Val3'}}
+ expected_explicit_props = {'FileProp2' : 'File-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ psi_path, expected_iprops, expected_explicit_props)
+
+ # Proplist file target with only explicit props.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', wc_dir)
+ expected_iprops = {}
+ expected_explicit_props = {'FileProp1' : 'File-Prop-Val1'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ iota_path, expected_iprops, expected_explicit_props)
+
+#----------------------------------------------------------------------
+# Property inheritance with repository targets.
+def iprops_basic_repos(sbox):
+ "basic inherited repository properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Paths of note.
+ D_path = sbox.ospath('A/D')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+
+ sbox.simple_propset('FileProp1', 'File-Prop-Val1', 'iota')
+ sbox.simple_propset('FileProp2', 'File-Prop-Val2', 'A/D/H/psi')
+ sbox.simple_propset('SomeProp', 'Some-Prop-Val2', 'A/D/G/rho')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some file properties',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val1', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val2', '.')
+ sbox.simple_propset('DirProp2', 'Dir-Prop-Val-Root', '.')
+ sbox.simple_propset('DirProp1', 'Dir-Prop-Val1', 'A/D')
+ sbox.simple_propset('DirProp2', 'Dir-Prop-Val2', 'A/D')
+ sbox.simple_propset('DirProp3', 'Dir-Prop-Val3', 'A/D')
+ sbox.simple_propset('SomeProp', 'Some-Prop-Val1', 'A/D/G')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some dir properties',
+ wc_dir)
+
+ ### Proplist Directory Targets
+
+ # Proplist directory target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'DirProp2' : 'Dir-Prop-Val-Root',
+ 'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url, expected_iprops, expected_explicit_props)
+
+ # Proplist directory target with only inherited props.
+ expected_iprops = {sbox.repo_url : {'DirProp2' : 'Dir-Prop-Val-Root',
+ 'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/B/E/alpha', expected_iprops, expected_explicit_props)
+
+ # Proplist directory target with inherited and explicit props.
+ expected_iprops = {sbox.repo_url : {'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2',
+ 'DirProp2' : 'Dir-Prop-Val-Root'}}
+ expected_explicit_props = {'DirProp1' : 'Dir-Prop-Val1',
+ 'DirProp2' : 'Dir-Prop-Val2',
+ 'DirProp3' : 'Dir-Prop-Val3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props)
+
+ ### Propget Directory Targets
+
+ # Propget directory target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'RootProp2' : 'Root-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url, expected_iprops, expected_explicit_props, 'RootProp2')
+
+ # Propget directory target with only inherited props.
+ expected_iprops = {sbox.repo_url : {'RootProp2': 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/B/E/alpha', expected_iprops, expected_explicit_props,
+ 'RootProp2')
+
+ # Propget directory target with inherited and explicit props.
+ expected_iprops = {sbox.repo_url : {'DirProp2' : 'Dir-Prop-Val-Root',}}
+ expected_explicit_props = {'DirProp2' : 'Dir-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp2')
+
+ ### Proplist File Targets
+
+ # Proplist file target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'FileProp1' : 'File-Prop-Val1'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/iota', expected_iprops, expected_explicit_props,
+ 'FileProp1', 2)
+
+ # Proplist file target with only inherited props.
+ expected_iprops = {sbox.repo_url : {'RootProp1' : 'Root-Prop-Val1'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/mu', expected_iprops, expected_explicit_props,
+ 'RootProp1')
+
+ # Proplist file target with inherited and explicit props.
+ expected_iprops = {sbox.repo_url : {'RootProp1' : 'Root-Prop-Val1',
+ 'RootProp2' : 'Root-Prop-Val2',
+ 'DirProp2' : 'Dir-Prop-Val-Root'},
+ sbox.repo_url + '/A/D' : {'DirProp1' : 'Dir-Prop-Val1',
+ 'DirProp2' : 'Dir-Prop-Val2',
+ 'DirProp3' : 'Dir-Prop-Val3'}}
+ expected_explicit_props = {'FileProp2' : 'File-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D/H/psi', expected_iprops, expected_explicit_props)
+
+ ### Propget File Targets
+
+ # Propget file target with only explicit props.
+ expected_iprops = {}
+ expected_explicit_props = {'FileProp1' : 'File-Prop-Val1'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/iota', expected_iprops, expected_explicit_props,
+ 'FileProp1', 2)
+
+ # Propget file target with only inherited props.
+ expected_iprops = {sbox.repo_url : {'RootProp2': 'Root-Prop-Val2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/B/E/alpha', expected_iprops, expected_explicit_props,
+ 'RootProp2')
+
+ # Propget file target with inherited and explicit props.
+ expected_iprops = {sbox.repo_url + '/A/D/G' : {
+ 'SomeProp' : 'Some-Prop-Val1',}}
+ expected_explicit_props = {'SomeProp' : 'Some-Prop-Val2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D/G/rho', expected_iprops, expected_explicit_props,
+ 'SomeProp')
+
+#----------------------------------------------------------------------
+# Property inheritance in a WC with switched subtrees.
+def iprops_switched_subtrees(sbox):
+ "inherited properties in switched subtrees"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Paths of note.
+ branch2_path = sbox.ospath('branch2')
+ branch2_B_path = sbox.ospath('branch2/B')
+ branch2_lambda_path = sbox.ospath('branch2/B/lambda')
+
+ # r2-3 - Create two branches
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/branch1', '-m', 'Make branch1')
+
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/branch2', '-m', 'Make branch2')
+
+ # Create a root property and two branch properties
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ sbox.simple_propset('Root-Prop-1', 'Root-Prop-Val1', '.')
+ sbox.simple_propset('Branch-Name', 'Feature #1', 'branch1')
+ sbox.simple_propset('Branch-Name', 'Feature #2', 'branch2')
+
+ # Switch a subtree of branch2 to branch1:
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/branch1/B',
+ branch2_B_path)
+
+ # Check for inherited props on branch2/B/lambda. Since the prop changes
+ # made above have not been committed, there should be none.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ branch2_B_path, expected_iprops, expected_explicit_props)
+
+ # r4 - Commit the prop changes made above.
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some dir properties',
+ wc_dir)
+
+ # Again check for inherited props on branch2/B/lambda. And again there
+ # should be none because branch2/B is switched to ^/branch1/B@3.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ branch2_lambda_path, expected_iprops, expected_explicit_props)
+
+ # Now update the WC, now branch2/B is switched to ^/branch1/B@4
+ # which does inherit properties from ^/branch1 and ^/. The inherited
+ # properties cache should be updated to reflect this when asking what
+ # properties branch2/B/lambda inherits.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_iprops = {
+ sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1'},
+ sbox.repo_url + '/branch1' : {'Branch-Name' : 'Feature #1'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ branch2_lambda_path, expected_iprops, expected_explicit_props)
+
+ # Now update the WC back to r3, where there are no properties. The
+ # inheritable properties cache for the WC-root at branch2/B should be
+ # cleared and no inheritable properties found for branch2/B/lambda.
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r3', wc_dir)
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ branch2_lambda_path, expected_iprops, expected_explicit_props)
+ # Update back to HEAD=r4 before continuing.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Now unswitch branch2/B and check branch2/B/lambda's inherited props.
+ # Now no iprop cache for branch2/B should exist and branch2/B/lambda
+ # should inherit from branch2 and '.'.
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/branch2/B',
+ branch2_B_path)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_iprops = {
+ ### Working copy parents! ###
+ wc_dir : {'Root-Prop-1' : 'Root-Prop-Val1'},
+ branch2_path : {'Branch-Name' : 'Feature #2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ branch2_lambda_path, expected_iprops, expected_explicit_props)
+
+ # Now switch the root of the WC to ^/branch2 and check the inherited
+ # properties on B/lambda. It should inherit the explicit property
+ # on the WC path '.' (i.e. ^/branch2) and the property on the root
+ # of the repos via the inherited props cache.
+ svntest.main.run_svn(None, 'switch', '--ignore-ancestry',
+ sbox.repo_url + '/branch2', wc_dir)
+ expected_iprops = {
+ ### Root if a repos parent ###
+ sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1'},
+ ### Branch root is a working copy parent ###
+ wc_dir : {'Branch-Name' : 'Feature #2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('B/lambda'),
+ expected_iprops, expected_explicit_props)
+
+ # Check that switched files have properties cached too.
+ # Switch the root of the WC to ^/A, then switch mu to ^/branch1/mu.
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A', wc_dir)
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/branch1/mu',
+ sbox.ospath('mu'))
+ expected_iprops = {
+ sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1'},
+ sbox.repo_url + '/branch1' : {'Branch-Name' : 'Feature #1'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('mu'),
+ expected_iprops, expected_explicit_props)
+
+#----------------------------------------------------------------------
+# Property inheritance with pegged wc and repos targets.
+def iprops_pegged_wc_targets(sbox):
+ "iprops of pegged wc targets at operative revs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Paths of note.
+ C_path = sbox.ospath('A/C')
+ D_path = sbox.ospath('A/D')
+ G_path = sbox.ospath('A/D/G')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ replaced_alpha_path = sbox.ospath('A/D/G/E/alpha')
+
+ # r2 - Set some root properties and a property on A/D, and make an edit
+ # to A/B/E/alpha.
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val-1-set-in-r2', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val-2-set-in-r2', '.')
+ sbox.simple_propset('D-Prop', 'D-Prop-Val-set-in-r2', 'A/D')
+ svntest.main.file_write(alpha_path, "Edit in r2.\n")
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some properties',
+ wc_dir)
+
+ # r3 - Change all of the properties.
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val-1-set-in-r3', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val-2-set-in-r3', '.')
+ sbox.simple_propset('D-Prop', 'D-Prop-Val-set-in-r3', 'A/D')
+ svntest.main.run_svn(None, 'commit', '-m', 'Modify some properties',
+ wc_dir)
+
+ # Set some working properties.
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val-1-WORKING', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val-2-WORKING', '.')
+ sbox.simple_propset('D-Prop', 'D-Prop-Val-WORKING', 'A/D')
+
+ ### Peg Revision = HEAD
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD',
+ '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD',
+ '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD',
+ '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD',
+ '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD',
+ '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | HEAD | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'HEAD',
+ '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | HEAD | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'HEAD',
+ '-rHEAD')
+
+ ### Peg Revision = Unspecified
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | unspecified
+ expected_iprops = {
+ wc_dir : {'RootProp1' : 'Root-Prop-Val-1-WORKING',
+ 'RootProp2' : 'Root-Prop-Val-2-WORKING'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-WORKING'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props)
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | unspecified
+ expected_iprops = {
+ wc_dir : {'RootProp1' : 'Root-Prop-Val-1-WORKING'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, None, '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', None,
+ '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | revision=COMMITTED (i.e. r3)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, None,
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', None,
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | revision=PREV (i.e. r2)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, None, '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', None,
+ '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | revision=BASE (i.e. r3)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, None, '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | revision=BASE (i.e. r3)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', None,
+ '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | unspecified | revision=HEAD (i.e. r3)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, None, '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | unspecified | revision=HEAD (i.e. r3)
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', None,
+ '-rHEAD')
+
+ ### Peg Revision = rN
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=1 | unspecified
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '1')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=1 | unspecified
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '1')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=1 | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '1', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=1 | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '1', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=1 | COMMITTED
+ # The last committed revision for A/D is r3.
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '1',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=1 | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '1',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=3 | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '3', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=3 | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '3',
+ '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=1 | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '1', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=1 | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '1',
+ '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | revision=2 | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, '2', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | revision=1 | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', '1',
+ '-rHEAD')
+
+ ### Peg Revision = PREV
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | revision=3
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV', '-r3')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | revision=3
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV',
+ '-r3')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | COMMITTED
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV',
+ '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | PREV
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV',
+ '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV',
+ '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | PREV | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, None, 'PREV', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | PREV | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ D_path, expected_iprops, expected_explicit_props, 'RootProp1', 'PREV',
+ '-rHEAD')
+
+ ### Peg Revision = BASE
+
+ # Replace A/D/G with a copy of ^/A/B.
+ # Check inherited props on base of A/D/G/E/alpha.
+ # Inherited props should always come from the repository parent of
+ # ^/A/B/E/alpha and so should not include the property (working or
+ # otherwise) on A/D.
+ svntest.actions.run_and_verify_svn(None, [], 'delete', G_path)
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.repo_url + '/A/B', G_path)
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE')
+
+# Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | COMMITTED
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE', '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | revision=2
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE', '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | COMMITTED
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ # so PREV=r1, but there are no properties at all in r1.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | COMMITTED
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ # so PREV=r1, but there are no properties at all in r1.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | BASE | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | BASE | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE', '-rHEAD')
+
+ ### Peg Revision = COMMITTED
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | unspecified
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | unspecified
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | revision=3
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED', '-r3')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | revision=3
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED', '-r3')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | COMMITTED
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED', '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | COMMITTED
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED', '-rCOMMITTED')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | PREV
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ # so PREV=r1, but there are no properties at all in r1.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | PREV
+ # The most recent change on the copy source, ^/A/B/E/alpha is r2
+ # so PREV=r1, but there are no properties at all in r1.
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED', '-rPREV')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | BASE
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED', '-rBASE')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | WC | COMMITTED | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | WC | COMMITTED | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED', '-rHEAD')
+
+ # Revert the replacement with history of A/D/G and once again
+ # replace A/D/G, but this time without history (using and export
+ # of A/B.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', G_path, '-R')
+ svntest.actions.run_and_verify_svn(None, [], 'delete', G_path)
+ svntest.actions.run_and_verify_svn(None, [], 'export',
+ sbox.repo_url + '/A/B', G_path)
+ svntest.actions.run_and_verify_svn(None, [], 'add', G_path)
+ # Set a working prop on a file within the replaced tree, we should *never*
+ # see this property if asking about the
+ # file@[HEAD | PREV | COMMITTED | BASE]
+ sbox.simple_propset('FileProp', 'File-Prop-WORKING-NO-BASE',
+ 'A/D/G/E/alpha')
+
+ # There is no HEAD, PREV, COMMITTED, or BASE revs for A/D/G/E/alpha in this
+ # case # so be sure requests for such error out or return nothing as per the
+ # existing behavior for proplist and propget sans the --show-inherited-props
+ # option.
+ #
+ # proplist/propget WC-PATH@HEAD
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Unknown node kind for '" + sbox.repo_url + "/A/D/G/E/alpha'\n",
+ 'pl', '-v', '--show-inherited-props', replaced_alpha_path + '@HEAD')
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*'" + sbox.repo_url + "/A/D/G/E/alpha' does not exist in revision 3\n",
+ 'pg', 'RootProp1', '-v', '--show-inherited-props',
+ replaced_alpha_path + '@HEAD')
+ # proplist/propget WC-PATH@PREV
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Path '.*alpha' has no committed revision\n",
+ 'pl', '-v', '--show-inherited-props', replaced_alpha_path + '@PREV')
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Path '.*alpha' has no committed revision\n",
+ 'pg', 'RootProp1', '-v', '--show-inherited-props', replaced_alpha_path + '@PREV')
+ # proplist/propget WC-PATH@COMMITTED
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'COMMITTED')
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'COMMITTED')
+ # proplist/propget WC-PATH@BASE
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props, None,
+ 'BASE')
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ replaced_alpha_path, expected_iprops, expected_explicit_props,
+ 'RootProp1', 'BASE')
+
+#----------------------------------------------------------------------
+# Property inheritance with pegged repos targets at operative revs.
+def iprops_pegged_url_targets(sbox):
+ "iprops of pegged url targets at operative revs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - Set some root properties and some properties on A/D.
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val-1-set-in-r2', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val-2-set-in-r2', '.')
+ sbox.simple_propset('DirProp', 'Dir-Prop-Val-set-in-r2', '.')
+ sbox.simple_propset('DirProp', 'Dir-Prop-Val-set-in-r2-on-D', 'A/D')
+ sbox.simple_propset('D-Prop', 'D-Prop-Val-set-in-r2', 'A/D')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some properties',
+ wc_dir)
+
+ # r3 - Make another change to all of the properties set in r2.
+ sbox.simple_propset('RootProp1', 'Root-Prop-Val-1-set-in-r3', '.')
+ sbox.simple_propset('RootProp2', 'Root-Prop-Val-2-set-in-r3', '.')
+ sbox.simple_propset('DirProp', 'Dir-Prop-Val-set-in-r3', '.')
+ sbox.simple_propset('DirProp', 'Dir-Prop-Val-set-in-r3-on-D', 'A/D')
+ sbox.simple_propset('D-Prop', 'D-Prop-Val-set-in-r3', 'A/D')
+ svntest.main.run_svn(None, 'commit', '-m', 'Modify some properties',
+ wc_dir)
+
+ ### Peg Revision = Unspecified
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | unspecified | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props)
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | unspecified | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | unspecified | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props, None,
+ None, '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | unspecified | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', None, '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | unspecified | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ None, None, '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | unspecified | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', None, '-rHEAD')
+
+ ### Peg Revision = rN
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | revision=2 | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ None, '2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | revision=2 | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', '2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | revision=2 | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props, None,
+ '2', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | revision=2 | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', '2', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | revision=2 | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ None, '2', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | revision=2 | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', '2', '-rHEAD')
+
+ ### Peg Revision = HEAD
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | HEAD | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ None, 'HEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | HEAD | unspecified
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', 'HEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | HEAD | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r2',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r2',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props, None,
+ 'HEAD', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | HEAD | revision=2
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r2'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r2-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', 'HEAD', '-r2')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # proplist | URL | HEAD | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'RootProp1' : 'Root-Prop-Val-1-set-in-r3',
+ 'RootProp2' : 'Root-Prop-Val-2-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'D-Prop' : 'D-Prop-Val-set-in-r3',
+ 'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ None, 'HEAD', '-rHEAD')
+
+ # Operation | Target | Peg Revision | Operative Revision
+ # propget | URL | HEAD | HEAD
+ expected_iprops = {
+ sbox.repo_url : {'DirProp' : 'Dir-Prop-Val-set-in-r3'}}
+ expected_explicit_props = {'DirProp' : 'Dir-Prop-Val-set-in-r3-on-D'}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.repo_url + '/A/D', expected_iprops, expected_explicit_props,
+ 'DirProp', 'HEAD', '-rHEAD')
+
+#----------------------------------------------------------------------
+# Inherited property caching during shallow updates.
+def iprops_shallow_operative_depths(sbox):
+ "iprop caching works with shallow updates"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - Create a branch..
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/branch1', '-m', 'Make branch1')
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # r3 - Create a root property and some branch properties
+ sbox.simple_propset('Root-Prop-1', 'Root-Prop-Val1', '.')
+ sbox.simple_propset('Branch-Name', 'Feature #1', 'branch1')
+ sbox.simple_propset('Branch-Name', 'Trunk', 'A')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add some properties',
+ wc_dir)
+
+ # r4 - Change the root and a branch properties added in r3.
+ sbox.simple_propset('Root-Prop-1', 'Root-Prop-Val1.1', '.')
+ sbox.simple_propset('Branch-Name', 'Feature No. 1', 'branch1')
+ sbox.simple_propset('Branch-Name', 'Trunk Branch', 'A')
+ svntest.main.run_svn(None, 'commit', '-m', 'Change some properties',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Switch the WC to ^/branch1:
+ svntest.main.run_svn(None, 'switch', '--ignore-ancestry',
+ sbox.repo_url + '/branch1', wc_dir)
+ # Switch the B to ^/A/B:
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/B',
+ sbox.ospath('B'))
+ # Switch the mu to ^/A/mu:
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/mu',
+ sbox.ospath('mu'))
+ # Update the whole WC back to r3.
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r3', wc_dir)
+
+ # Check the inherited props on B/E within the switched subtree
+ # and the switched file mu. The props should all be inherited
+ # from repository locations and reflect the values at r3.
+ expected_iprops = {
+ sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1'},
+ sbox.repo_url + '/A' : {'Branch-Name' : 'Trunk'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('B/E'), expected_iprops, expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('mu'), expected_iprops, expected_explicit_props)
+
+ # Update only the root of the WC (to HEAD=r4) using a shallow update.
+ # Again check the inherited props on B/E. This shouldn't affect the
+ # switched subtree at all, the props it inherits should still reflect
+ # the values at r3.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--depth=empty', wc_dir)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('B/E'), expected_iprops, expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('mu'), expected_iprops, expected_explicit_props)
+
+ # Update the root of the WC (to HEAD=r4) at depth=files. B/E should
+ # still inherit vales from r3, but mu should now inherit props from r4.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--depth=files', wc_dir)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('B/E'), expected_iprops, expected_explicit_props)
+ expected_iprops = {
+ sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1.1'},
+ sbox.repo_url + '/A' : {'Branch-Name' : 'Trunk Branch'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('mu'), expected_iprops, expected_explicit_props)
+
+ # Update the root of the WC (to HEAD=r4) at depth=immediates. Now both B/E
+ # and mu inherit props from r4.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--depth=immediates', wc_dir)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('B/E'), expected_iprops, expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('mu'), expected_iprops, expected_explicit_props)
+
+#----------------------------------------------------------------------
+# Inherited property caching by directory externals.
+def iprops_with_directory_externals(sbox):
+ "iprop caching works with directory externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a second repository with the original greek tree
+ repo_dir = sbox.repo_dir
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ other_wc_dir = sbox.add_wc_path("other")
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url,
+ other_wc_dir)
+
+ # Create a root property on the first WC.
+ sbox.simple_propset('Prime-Root-Prop', 'Root-Prop-Val1', '.')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add a root property',
+ wc_dir)
+
+ # Create a root property on the "other" WC.
+ svntest.actions.run_and_verify_svn(None, [], 'ps', 'Other-Root-Prop',
+ 'Root-Prop-Val-from-other', other_wc_dir)
+ svntest.main.run_svn(None, 'commit', '-m', 'Add a root property',
+ other_wc_dir)
+
+ # Switch the root of the first WC to a repository non-root, it will
+ # now have cached iprops from the first repos.
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/B',
+ wc_dir, '--ignore-ancestry')
+
+ # Create an external in the first WC that points to a location in the
+ # "other" WC.
+ sbox.simple_propset('svn:externals',
+ other_repo_url + '/A/D/G X-Other-Repos',
+ 'E')
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Add external point to other WC',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Create an external in the first WC that points to a location in the
+ # same WC.
+ sbox.simple_propset('svn:externals',
+ sbox.repo_url + '/A/D/H X-Same-Repos',
+ 'F')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Add external pointing to same repos',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Check the properties inherited by the external from the same repository.
+ # It should inherit the props from the root of the same repository.
+ expected_iprops = {
+ sbox.repo_url : {'Prime-Root-Prop' : 'Root-Prop-Val1'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('F/X-Same-Repos'), expected_iprops, expected_explicit_props)
+
+ # Check the properties inherited by the external from the "other"
+ # repository. It should inherit from the root of the other repos,
+ # despite being located in the first repository's WC.
+ expected_iprops = {
+ other_repo_url : {'Other-Root-Prop' : 'Root-Prop-Val-from-other'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('E/X-Other-Repos'), expected_iprops, expected_explicit_props)
+
+#----------------------------------------------------------------------
+# Inherited property caching by file externals.
+def iprops_with_file_externals(sbox):
+ "iprop caching works with file externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a root property.
+ sbox.simple_propset('Prime-Root-Prop', 'Root-Prop-Val1', '.')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add a root property',
+ wc_dir)
+
+ # Create a "branch" property on 'A/D'.
+ sbox.simple_propset('Prime-Branch-Prop', 'Branch-Prop-Val1', 'A/D')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add a branch property',
+ wc_dir)
+
+ # Create two file externals, one pegged to a fixed revision.
+ sbox.simple_propset('svn:externals',
+ sbox.repo_url + '/A/D/H/psi file-external',
+ 'A/B/E')
+ sbox.simple_propset('svn:externals',
+ sbox.repo_url + '/A/D/H/psi@4 file-external-pegged',
+ 'A/B/F')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Add a file external', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Check the properties inherited by the external files. Both should
+ # inherit the properties from ^/ and ^/A/D.
+ expected_iprops = {
+ sbox.repo_url : {'Prime-Root-Prop' : 'Root-Prop-Val1'},
+ sbox.repo_url + '/A/D' : {'Prime-Branch-Prop' : 'Branch-Prop-Val1'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/E/file-external'), expected_iprops,
+ expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/F/file-external-pegged'), expected_iprops,
+ expected_explicit_props)
+
+ # Modify the "branch" property on 'A/D'.
+ sbox.simple_propset('Prime-Branch-Prop', 'Branch-Prop-Val2', 'A/D')
+ svntest.main.run_svn(None, 'commit', '-m', 'Add a branch property',
+ wc_dir)
+
+ # There should be no change in the external file's
+ # inherited properties until...
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/E/file-external'), expected_iprops,
+ expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/F/file-external-pegged'), expected_iprops,
+ expected_explicit_props)
+
+ # ...We update the external:
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ # The pegged file external's iprops should remain unchanged.
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/F/file-external-pegged'), expected_iprops,
+ expected_explicit_props)
+ # But the other's should be updated.
+ expected_iprops = {
+ sbox.repo_url : {'Prime-Root-Prop' : 'Root-Prop-Val1'},
+ sbox.repo_url + '/A/D' : {'Prime-Branch-Prop' : 'Branch-Prop-Val2'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/E/file-external'), expected_iprops,
+ expected_explicit_props)
+
+def iprops_survive_commit(sbox):
+ "verify that iprops survive a commit"
+
+ sbox.build()
+ sbox.simple_propset('key', 'D', 'A/B',)
+ sbox.simple_commit()
+
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/B/E',
+ sbox.ospath('A/D'), '--ignore-ancestry')
+ svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/B/F',
+ sbox.ospath('iota'), '--ignore-ancestry')
+ expected_iprops = {
+ sbox.repo_url + '/A/B' : {'key' : 'D'},
+ }
+
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(sbox.ospath('A/D'),
+ expected_iprops,
+ expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(sbox.ospath('iota'),
+ expected_iprops,
+ expected_explicit_props)
+
+ sbox.simple_propset('new', 'V', 'A/D', 'iota')
+ sbox.simple_commit()
+
+ expected_explicit_props = {'new': 'V'}
+ svntest.actions.run_and_verify_inherited_prop_xml(sbox.ospath('A/D'),
+ expected_iprops,
+ expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(sbox.ospath('iota'),
+ expected_iprops,
+ expected_explicit_props)
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ iprops_basic_working,
+ iprops_basic_repos,
+ iprops_switched_subtrees,
+ iprops_pegged_wc_targets,
+ iprops_pegged_url_targets,
+ iprops_shallow_operative_depths,
+ iprops_with_directory_externals,
+ iprops_with_file_externals,
+ iprops_survive_commit,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+### End of file.
diff --git a/subversion/tests/cmdline/legacy/utf8_tests.py b/subversion/tests/cmdline/legacy/utf8_tests.py
new file mode 100755
index 0000000..9b903a8
--- /dev/null
+++ b/subversion/tests/cmdline/legacy/utf8_tests.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# utf8_tests.py: testing the svn client's utf8 (i18n) handling
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, locale
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+#--------------------------------------------------------------------
+# Data
+
+# Here's a filename and a log message which contain some high-ascii
+# data. In theory this data has different interpretations when
+# converting from 2 different charsets into UTF-8.
+
+### "b" in ISO-8859-1 encoding:
+i18n_filename = 'b\xd4\xe7\xc5'
+
+### "drieëntwintig keer was één keer teveel" in ISO-8859-1 encoding:
+i18n_logmsg = 'drie\xc3\xabntwintig keer was \xc3\xa9\xc3\xa9n keer teveel'
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+@Skip()
+def basic_utf8_conversion(sbox):
+ "conversion of paths and logs to/from utf8"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create the new i18n file and schedule it for addition
+ svntest.main.file_append(os.path.join(wc_dir, i18n_filename), "hi")
+ svntest.actions.run_and_verify_svn(
+ "Failed to schedule i18n filename for addition", None, [],
+ 'add', os.path.join(wc_dir, i18n_filename))
+
+ svntest.actions.run_and_verify_svn(
+ "Failed to commit i18n filename", None, [],
+ 'commit', '-m', i18n_logmsg, wc_dir)
+
+# Here's how the test should really work:
+
+# 1. sh LC_ALL=ISO-8859-1 svn commit <filename> -m "<logmsg>"
+
+# 2. sh LC_ALL=UTF-8 svn log -rHEAD > output
+
+# 3. verify that output is the exact UTF-8 data that we expect.
+
+# 4. repeat the process using some other locale other than ISO8859-1,
+# preferably some locale which will convert the high-ascii data to
+# *different* UTF-8.
+
+
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+try:
+ # Generic setlocale so that getlocale returns something sensible
+ locale.setlocale(locale.LC_ALL, '')
+
+ # Try to make these test run in an ISO-8859-1 environment, otherwise
+ # they would run in whatever random locale the testing platform
+ # happens to have, and then we couldn't predict the exact results.
+ if svntest.main.windows:
+ # In this case, it would probably be "english_usa.1252", but you should
+ # be able to set just the encoding by using ".1252" (that's codepage
+ # 1252, which is almost but not quite entirely unlike tea; um, I mean
+ # it's very similar to ISO-8859-1).
+ # -- Branko ÄŒibej <brane@xbc.nu>
+ locale.setlocale(locale.LC_ALL, '.1252')
+ else:
+ locale.setlocale(locale.LC_ALL, 'en_US.ISO8859-1')
+
+ if os.putenv:
+ # propagate to the svn* executables, so they do the correct translation
+ # the line below works for Linux systems if they have the particular
+ # locale installed
+ os.environ['LC_ALL'] = "en_US.ISO8859-1"
+except:
+ pass
+
+# Check to see if the locale uses ISO-8859-1 encoding. The regex is necessary
+# because some systems ommit the first hyphen or use lowercase letters for ISO.
+if sys.platform == 'win32':
+ localematch = 1
+else:
+ localeenc = locale.getlocale()[1]
+ if localeenc:
+ localeregex = re.compile('^ISO-?8859-1$', re.I)
+ localematch = localeregex.search(localeenc)
+ try:
+ svntest.actions.run_and_verify_svn(None, svntest.SVNAnyOutput, [],"help")
+ except:
+ # We won't be able to run the client; this might be because the
+ # system does not support the iso-8859-1 locale. Anyhow, it makes
+ # no sense to run the test.
+ localematch = None
+ else:
+ localematch = None
+
+# Also check that the environment contains the expected locale settings
+# either by default, or because we set them above.
+if localematch:
+ localeregex = re.compile('^en_US\.ISO-?8859-1$', re.I)
+ for env in [ 'LC_ALL', 'LC_CTYPE', 'LANG' ]:
+ env_value = os.getenv(env)
+ if env_value:
+ if localeregex.search(env_value):
+ break
+ else:
+ localematch = None
+ break
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_utf8_conversion,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/lock-helper.c b/subversion/tests/cmdline/lock-helper.c
new file mode 100644
index 0000000..47a333c
--- /dev/null
+++ b/subversion/tests/cmdline/lock-helper.c
@@ -0,0 +1,75 @@
+/*
+ * lock-helper.c : create locks with an expiry date
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+
+#include "svn_pools.h"
+#include "svn_dirent_uri.h"
+#include "svn_fs.h"
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ const char *fs_path, *file_path, *username;
+ apr_int64_t seconds;
+ apr_time_t expiration_date;
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *lock;
+
+ if (argc != 5)
+ {
+ fprintf(stderr, "usage: lock-helper repo_path file_path user seconds\n");
+ exit(1);
+ }
+
+ if (apr_initialize() != APR_SUCCESS)
+ {
+ fprintf(stderr, "apr_initialize() failed.\n");
+ exit(1);
+ }
+
+ pool = svn_pool_create(NULL);
+
+ fs_path = svn_dirent_internal_style(argv[1], pool);
+ fs_path = svn_dirent_join(fs_path, "db", pool);
+ file_path = svn_dirent_canonicalize(argv[2], pool);
+ username = argv[3];
+ SVN_INT_ERR(svn_cstring_atoi64(&seconds, argv[4]));
+
+ SVN_INT_ERR(svn_fs_open2(&fs, fs_path, NULL, pool, pool));
+ SVN_INT_ERR(svn_fs_create_access(&access, username, pool));
+ SVN_INT_ERR(svn_fs_set_access(fs, access));
+
+ expiration_date = apr_time_now() + apr_time_from_sec(seconds);
+
+ SVN_INT_ERR(svn_fs_lock(&lock, fs, file_path, NULL, "created by lock-helper",
+ FALSE, expiration_date, SVN_INVALID_REVNUM, FALSE,
+ pool));
+
+ svn_pool_destroy(pool);
+ apr_terminate();
+
+ return EXIT_SUCCESS;
+}
diff --git a/subversion/tests/cmdline/lock_tests.py b/subversion/tests/cmdline/lock_tests.py
new file mode 100755
index 0000000..cd8e0d2
--- /dev/null
+++ b/subversion/tests/cmdline/lock_tests.py
@@ -0,0 +1,2557 @@
+#!/usr/bin/env python
+# encoding=utf-8
+#
+# lock_tests.py: testing versioned properties
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import re, os, stat, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+# Helpers
+
+def check_writability(path, writable):
+ bits = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE
+ mode = os.stat(path)[0]
+ if bool(mode & bits) != writable:
+ raise svntest.Failure("path '%s' is unexpectedly %s (mode %o)"
+ % (path, ["writable", "read-only"][writable], mode))
+
+def is_writable(path):
+ "Raise if PATH is not writable."
+ check_writability(path, True)
+
+def is_readonly(path):
+ "Raise if PATH is not readonly."
+ check_writability(path, False)
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+# Each test refers to a section in
+# notes/locking/locking-functional-spec.txt
+
+# II.A.2, II.C.2.a: Lock a file in wc A as user FOO and make sure we
+# have a representation of it. Checkout wc B as user BAR. Verify
+# that user BAR cannot commit changes to the file nor its properties.
+def lock_file(sbox):
+ "lock a file and verify that it's locked"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ file_path = sbox.ospath('iota')
+ file_path_b = sbox.ospath('iota', wc_dir=wc_b)
+
+ svntest.main.file_append(file_path, "This represents a binary file\n")
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', file_path)
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_path)
+
+ # --- Meanwhile, in our other working copy... ---
+ err_re = "(svn\: E195022\: File '.*iota' is locked in another)|" + \
+ "(svn\: E160039: User '?jconstant'? does not own lock on path.*iota')"
+
+ svntest.main.run_svn(None, 'update', wc_b)
+ # -- Try to change a file --
+ # change the locked file
+ svntest.main.file_append(file_path_b, "Covert tweak\n")
+
+ # attempt (and fail) to commit as user Sally
+ svntest.actions.run_and_verify_commit(wc_b, None, None, err_re,
+ '--username',
+ svntest.main.wc_author2,
+ '-m', '', file_path_b)
+
+ # Revert our change that we failed to commit
+ svntest.main.run_svn(None, 'revert', file_path_b)
+
+ # -- Try to change a property --
+ # change the locked file's properties
+ svntest.main.run_svn(None, 'propset', 'sneakyuser', 'Sally', file_path_b)
+
+ err_re = "(svn\: E195022\: File '.*iota' is locked in another)|" + \
+ "(svn\: E160039\: User '?jconstant'? does not own lock on path)"
+
+ # attempt (and fail) to commit as user Sally
+ svntest.actions.run_and_verify_commit(wc_b, None, None, err_re,
+ '--username',
+ svntest.main.wc_author2,
+ '-m', '', file_path_b)
+
+
+
+
+#----------------------------------------------------------------------
+# II.C.2.b.[12]: Lock a file and commit using the lock. Make sure the
+# lock is released. Repeat, but request that the lock not be
+# released. Make sure the lock is retained.
+def commit_file_keep_lock(sbox):
+ "commit a file and keep lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock 'A/mu' as wc_author
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'some lock comment',
+ sbox.ospath('A/mu'))
+
+ # make a change and commit it, holding lock
+ sbox.simple_append('A/mu', 'Tweak!\n')
+ svntest.main.run_svn(None, 'commit', '-m', '', '--no-unlock',
+ sbox.ospath('A/mu'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2, writelocked='K')
+
+ # Make sure the file is still locked
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def commit_file_unlock(sbox):
+ "commit a file and release lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock A/mu and iota as wc_author
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'some lock comment',
+ sbox.ospath('A/mu'),
+ sbox.ospath('iota'))
+
+ # make a change and commit it, allowing lock to be released
+ sbox.simple_append('A/mu', 'Tweak!\n')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ # Make sure both iota an mu are unlocked, but only mu is bumped
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+def commit_propchange(sbox):
+ "commit a locked file with a prop change"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock A/mu as wc_author
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'some lock comment',
+ sbox.ospath('A/mu'))
+
+ # make a property change and commit it, allowing lock to be released
+ sbox.simple_propset('blue', 'azul', 'A/mu')
+ sbox.simple_commit('A/mu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ # Make sure the file is unlocked
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# II.C.2.c: Lock a file in wc A as user FOO. Attempt to unlock same
+# file in same wc as user BAR. Should fail.
+#
+# Attempt again with --force. Should succeed.
+#
+# II.C.2.c: Lock a file in wc A as user FOO. Attempt to unlock same
+# file in wc B as user FOO. Should fail.
+#
+# Attempt again with --force. Should succeed.
+def break_lock(sbox):
+ "lock a file and verify lock breaking behavior"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ file_path = sbox.ospath('iota')
+ file_path_b = sbox.ospath('iota', wc_dir=wc_b)
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_path)
+
+ # --- Meanwhile, in our other working copy... ---
+
+ svntest.main.run_svn(None, 'update', wc_b)
+
+ # attempt (and fail) to unlock file
+
+ # This should give a "iota' is not locked in this working copy" error
+ svntest.actions.run_and_verify_svn(None, ".*not locked",
+ 'unlock',
+ file_path_b)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [],
+ 'unlock', '--force',
+ file_path_b)
+
+#----------------------------------------------------------------------
+# II.C.2.d: Lock a file in wc A as user FOO. Attempt to lock same
+# file in wc B as user BAR. Should fail.
+#
+# Attempt again with --force. Should succeed.
+#
+# II.C.2.d: Lock a file in wc A as user FOO. Attempt to lock same
+# file in wc B as user FOO. Should fail.
+#
+# Attempt again with --force. Should succeed.
+def steal_lock(sbox):
+ "lock a file and verify lock stealing behavior"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ file_path = sbox.ospath('iota')
+ file_path_b = sbox.ospath('iota', wc_dir=wc_b)
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_path)
+
+ # --- Meanwhile, in our other working copy... ---
+
+ svntest.main.run_svn(None, 'update', wc_b)
+
+ # attempt (and fail) to lock file
+
+ # This should give a "iota' is already locked error
+ svntest.actions.run_and_verify_svn(None,
+ ".*already locked",
+ 'lock',
+ '-m', 'trying to break', file_path_b)
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [],
+ 'lock', '--force',
+ '-m', 'trying to break', file_path_b)
+
+#----------------------------------------------------------------------
+# II.B.2, II.C.2.e: Lock a file in wc A. Query wc for the
+# lock and verify that all lock fields are present and correct.
+def examine_lock(sbox):
+ "examine the fields of a lockfile for correctness"
+
+ sbox.build()
+
+ # lock a file as wc_author
+ svntest.actions.run_and_validate_lock(sbox.ospath('iota'),
+ svntest.main.wc_author)
+
+#----------------------------------------------------------------------
+# II.C.1: Lock a file in wc A. Check out wc B. Break the lock in wc
+# B. Verify that wc A gracefully cleans up the lock via update as
+# well as via commit.
+def handle_defunct_lock(sbox):
+ "verify behavior when a lock in a wc is defunct"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # set up our expected status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # lock the file
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', sbox.ospath('iota'))
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+ file_path_b = sbox.ospath('iota', wc_dir=wc_b)
+
+ # --- Meanwhile, in our other working copy... ---
+
+ # Try unlocking the file in the second wc.
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ file_path_b)
+
+
+ # update the 1st wc, which should clear the lock there
+ sbox.simple_update()
+
+ # Make sure the file is unlocked
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+
+#----------------------------------------------------------------------
+# II.B.1: Set "svn:needs-lock" property on file in wc A. Checkout wc
+# B and verify that that file is set as read-only.
+#
+# Tests propset, propdel, lock, and unlock
+def enforce_lock(sbox):
+ "verify svn:needs-lock read-only behavior"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+
+ # svn:needs-lock value should be forced to a '*'
+ svntest.actions.set_prop('svn:needs-lock', 'foo', iota_path)
+ svntest.actions.set_prop('svn:needs-lock', '*', lambda_path)
+ expected_err = ".*svn: warning: W125005: To turn off the svn:needs-lock property,.*"
+ svntest.actions.set_prop('svn:needs-lock', ' ', mu_path, expected_err)
+
+ # Check svn:needs-lock
+ svntest.actions.check_prop('svn:needs-lock', iota_path, [b'*'])
+ svntest.actions.check_prop('svn:needs-lock', lambda_path, [b'*'])
+ svntest.actions.check_prop('svn:needs-lock', mu_path, [b'*'])
+
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', iota_path, lambda_path, mu_path)
+
+ # Now make sure that the perms were flipped on all files
+ if os.name == 'posix':
+ mode = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE
+ if ((os.stat(iota_path)[0] & mode)
+ or (os.stat(lambda_path)[0] & mode)
+ or (os.stat(mu_path)[0] & mode)):
+ logger.warn("Setting 'svn:needs-lock' property on a file failed to set")
+ logger.warn("file mode to read-only.")
+ raise svntest.Failure
+
+ # obtain a lock on one of these files...
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', iota_path)
+
+ # ...and verify that the write bit gets set...
+ if not (os.stat(iota_path)[0] & mode):
+ logger.warn("Locking a file with 'svn:needs-lock' failed to set write bit.")
+ raise svntest.Failure
+
+ # ...and unlock it...
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ iota_path)
+
+ # ...and verify that the write bit gets unset
+ if (os.stat(iota_path)[0] & mode):
+ logger.warn("Unlocking a file with 'svn:needs-lock' failed to unset write bit.")
+ raise svntest.Failure
+
+ # Verify that removing the property restores the file to read-write
+ svntest.main.run_svn(None, 'propdel', 'svn:needs-lock', iota_path)
+ if not (os.stat(iota_path)[0] & mode):
+ logger.warn("Deleting 'svn:needs-lock' failed to set write bit.")
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Test that updating a file with the "svn:needs-lock" property works,
+# especially on Windows, where renaming A to B fails if B already
+# exists and has its read-only bit set. See also issue #2278.
+@Issue(2278)
+def update_while_needing_lock(sbox):
+ "update handles svn:needs-lock correctly"
+
+ sbox.build()
+
+ sbox.simple_propset('svn:needs-lock', 'foo', 'iota')
+ sbox.simple_commit('iota')
+ sbox.simple_update()
+
+ # Lock, modify, commit, unlock, to create r3.
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', sbox.ospath('iota'))
+ sbox.simple_append('iota', 'This line added in r2.\n')
+ sbox.simple_commit('iota') # auto-unlocks
+
+ # Backdate to r2.
+ sbox.simple_update(revision=2)
+
+ # Try updating forward to r3 again. This is where the bug happened.
+ sbox.simple_update(revision=3)
+
+
+#----------------------------------------------------------------------
+# Tests update / checkout with changing props
+def defunct_lock(sbox):
+ "verify svn:needs-lock behavior with defunct lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ iota_path = sbox.ospath('iota')
+ iota_path_b = sbox.ospath('iota', wc_dir=wc_b)
+
+ mode = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE
+
+# Set the prop in wc a
+ sbox.simple_propset('svn:needs-lock', 'foo', 'iota')
+
+ # commit r2
+ sbox.simple_commit('iota')
+
+ # update wc_b
+ svntest.main.run_svn(None, 'update', wc_b)
+
+ # lock iota in wc_b
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', iota_path_b)
+
+
+ # break the lock iota in wc a
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--force',
+ '-m', '', iota_path)
+ # update wc_b
+ svntest.main.run_svn(None, 'update', wc_b)
+
+ # make sure that iota got set to read-only
+ if (os.stat(iota_path_b)[0] & mode):
+ logger.warn("Upon removal of a defunct lock, a file with 'svn:needs-lock'")
+ logger.warn("was not set back to read-only")
+ raise svntest.Failure
+
+
+
+#----------------------------------------------------------------------
+# Tests dealing with a lock on a deleted path
+def deleted_path_lock(sbox):
+ "verify lock removal on a deleted path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + '/iota'
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', iota_path)
+
+ sbox.simple_rm('iota')
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '--no-unlock',
+ '-m', '', iota_path)
+
+ # Now make sure that we can delete the lock from iota via a URL
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ iota_url)
+
+
+
+#----------------------------------------------------------------------
+# Tests dealing with locking and unlocking
+def lock_unlock(sbox):
+ "lock and unlock some files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ tau_path = sbox.ospath('A/D/G/tau')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', writelocked='K')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', pi_path, rho_path, tau_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.tweak('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', writelocked=None)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ pi_path, rho_path, tau_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Tests dealing with directory deletion and locks
+def deleted_dir_lock(sbox):
+ "verify removal of a directory with locks inside"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ tau_path = sbox.ospath('A/D/G/tau')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', pi_path, rho_path, tau_path)
+
+ sbox.simple_rm('A/D/G') # the parent directory
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '--no-unlock',
+ '-m', '', sbox.ospath('A/D/G'))
+
+#----------------------------------------------------------------------
+# III.c : Lock a file and check the output of 'svn stat' from the same
+# working copy and another.
+def lock_status(sbox):
+ "verify status of lock in working copy"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+
+ sbox.simple_append('iota', "This is a spreadsheet\n")
+ sbox.simple_commit('iota')
+
+ svntest.main.run_svn(None, 'lock', '-m', '', sbox.ospath('iota'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, writelocked='K')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Verify status again after modifying the file
+ sbox.simple_append('iota', 'check stat output after mod')
+
+ expected_status.tweak('iota', status='M ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Verify status of lock from another working copy
+ svntest.main.run_svn(None, 'update', wc_b)
+ expected_status = svntest.actions.get_virginal_state(wc_b, 2)
+ expected_status.tweak('iota', writelocked='O')
+
+ svntest.actions.run_and_verify_status(wc_b, expected_status)
+
+#----------------------------------------------------------------------
+# III.c : Steal lock on a file from another working copy with 'svn lock
+# --force', and check the status of lock in the repository from the
+# working copy in which the file was initially locked.
+def stolen_lock_status(sbox):
+ "verify status of stolen lock"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_path_b = os.path.join(wc_b, fname)
+
+ svntest.main.file_append(file_path, "This is a spreadsheet\n")
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', file_path)
+
+ svntest.main.run_svn(None, 'lock',
+ '-m', '', file_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak(fname, wc_rev=2)
+ expected_status.tweak(fname, writelocked='K')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Forcibly lock same file (steal lock) from another working copy
+ svntest.main.run_svn(None, 'update', wc_b)
+ svntest.main.run_svn(None, 'lock',
+ '-m', '', '--force', file_path_b)
+
+ # Verify status from working copy where file was initially locked
+ expected_status.tweak(fname, writelocked='T')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# III.c : Break lock from another working copy with 'svn unlock --force'
+# and verify the status of the lock in the repository with 'svn stat -u'
+# from the working copy in the file was initially locked
+def broken_lock_status(sbox):
+ "verify status of broken lock"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ # lock a file as wc_author
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_path_b = os.path.join(wc_b, fname)
+
+ svntest.main.file_append(file_path, "This is a spreadsheet\n")
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', file_path)
+ svntest.main.run_svn(None, 'lock',
+ '-m', '', file_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak(fname, wc_rev=2)
+ expected_status.tweak(fname, writelocked='K')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Forcibly unlock the same file (break lock) from another working copy
+ svntest.main.run_svn(None, 'update', wc_b)
+ svntest.main.run_svn(None, 'unlock',
+ '--force', file_path_b)
+
+ # Verify status from working copy where file was initially locked
+ expected_status.tweak(fname, writelocked='B')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Invalid input test - lock non-existent file
+def lock_non_existent_file(sbox):
+ "verify error on locking non-existent file"
+
+ sbox.build()
+ fname = 'A/foo'
+ file_path = os.path.join(sbox.wc_dir, fname)
+
+ exit_code, output, error = svntest.main.run_svn(1, 'lock',
+ '-m', '', file_path)
+
+ error_msg = "The node '%s' was not found." % os.path.abspath(file_path)
+ for line in error:
+ if line.find(error_msg) != -1:
+ break
+ else:
+ logger.warn("Error: %s : not found in: %s" % (error_msg, error))
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Check that locking an out-of-date file fails.
+def out_of_date(sbox):
+ "lock an out-of-date file and ensure failure"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_path_b = os.path.join(wc_b, fname)
+
+ # Make a new revision of the file in the first WC.
+ svntest.main.file_append(file_path, "This represents a binary file\n")
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', file_path)
+
+ # --- Meanwhile, in our other working copy... ---
+ svntest.actions.run_and_verify_svn(None,
+ ".*newer version of '/iota' exists",
+ 'lock',
+ '--username', svntest.main.wc_author2,
+ '-m', '', file_path_b)
+
+#----------------------------------------------------------------------
+# Tests reverting a svn:needs-lock file
+def revert_lock(sbox):
+ "verify svn:needs-lock behavior with revert"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+
+ mode = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE
+
+ # set the prop in wc
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:needs-lock', 'foo', iota_path)
+
+ # commit r2
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', '', iota_path)
+
+ # make sure that iota got set to read-only
+ if (os.stat(iota_path)[0] & mode):
+ logger.warn("Committing a file with 'svn:needs-lock'")
+ logger.warn("did not set the file to read-only")
+ raise svntest.Failure
+
+ # verify status is as we expect
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # remove read-only-ness
+ svntest.actions.run_and_verify_svn(None, [], 'propdel',
+ 'svn:needs-lock', iota_path)
+
+ # make sure that iota got read-only-ness removed
+ if (os.stat(iota_path)[0] & mode == 0):
+ logger.warn("Deleting the 'svn:needs-lock' property ")
+ logger.warn("did not remove read-only-ness")
+ raise svntest.Failure
+
+ # revert the change
+ svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path)
+
+ # make sure that iota got set back to read-only
+ if (os.stat(iota_path)[0] & mode):
+ logger.warn("Reverting a file with 'svn:needs-lock'")
+ logger.warn("did not set the file back to read-only")
+ raise svntest.Failure
+
+ # try propdel and revert from a different directory so
+ # full filenames are used
+ extra_name = 'xx'
+
+ # now lock the file
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', iota_path)
+
+ # modify it
+ svntest.main.file_append(iota_path, "This line added\n")
+
+ expected_status.tweak(wc_rev=1)
+ expected_status.tweak('iota', wc_rev=2)
+ expected_status.tweak('iota', status='M ', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # revert it
+ svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path)
+
+ # make sure it is still writable since we have the lock
+ if (os.stat(iota_path)[0] & mode == 0):
+ logger.warn("Reverting a 'svn:needs-lock' file (with lock in wc) ")
+ logger.warn("did not leave the file writable")
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+def examine_lock_via_url(sbox):
+ "examine the fields of a lock from a URL"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ fname = 'iota'
+ comment = 'This is a lock test.'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_url = sbox.repo_url + '/' + fname
+
+ # lock the file url and check the contents of lock
+ svntest.actions.run_and_validate_lock(file_url,
+ svntest.main.wc_author2)
+
+#----------------------------------------------------------------------
+def lock_several_files(sbox):
+ "lock/unlock several files in one go"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Deliberately have no direct child of A as a target
+ iota_path = os.path.join(sbox.wc_dir, 'iota')
+ lambda_path = os.path.join(sbox.wc_dir, 'A', 'B', 'lambda')
+ alpha_path = os.path.join(sbox.wc_dir, 'A', 'B', 'E', 'alpha')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '--username', svntest.main.wc_author2,
+ '-m', 'lock several',
+ iota_path, lambda_path, alpha_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ '--username', svntest.main.wc_author2,
+ iota_path, lambda_path, alpha_path)
+
+ expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+def lock_switched_files(sbox):
+ "lock/unlock switched files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ gamma_path = sbox.ospath('A/D/gamma')
+ lambda_path = sbox.ospath('A/B/lambda')
+ iota_URL = sbox.repo_url + '/iota'
+ alpha_URL = sbox.repo_url + '/A/B/E/alpha'
+
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ iota_URL, gamma_path,
+ '--ignore-ancestry')
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ alpha_URL, lambda_path,
+ '--ignore-ancestry')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', 'A/B/lambda', switched='S')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'lock several',
+ gamma_path, lambda_path)
+
+ expected_status.tweak('A/D/gamma', 'A/B/lambda', writelocked='K')
+
+ # In WC-NG locks are kept per working copy, not per file
+ expected_status.tweak('A/B/E/alpha', 'iota', writelocked='K')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ gamma_path, lambda_path)
+
+ expected_status.tweak('A/D/gamma', 'A/B/lambda', writelocked=None)
+ expected_status.tweak('A/B/E/alpha', 'iota', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def lock_uri_encoded(sbox):
+ "lock and unlock a file with an URI-unsafe name"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock a file as wc_author
+ fname = 'amazing space'
+ file_path = sbox.ospath(fname)
+
+ svntest.main.file_append(file_path, "This represents a binary file\n")
+ svntest.actions.run_and_verify_svn(None, [], "add", file_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ fname : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({ fname: Item(wc_rev=2, status=' ') })
+
+ # Commit the file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ file_path)
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_path)
+
+ # Make sure that the file was locked.
+ expected_status.tweak(fname, writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ file_path)
+
+ # Make sure it was successfully unlocked again.
+ expected_status.tweak(fname, writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # And now the URL case.
+ file_url = sbox.repo_url + '/' + fname
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_url)
+
+ # Make sure that the file was locked.
+ expected_status.tweak(fname, writelocked='O')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ file_url)
+
+ # Make sure it was successfully unlocked again.
+ expected_status.tweak(fname, writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# A regression test for a bug when svn:needs-lock and svn:executable
+# interact badly. The bug was fixed in trunk @ r854933.
+@SkipUnless(svntest.main.is_posix_os)
+def lock_and_exebit1(sbox):
+ "svn:needs-lock and svn:executable, part I"
+
+ mode_w = stat.S_IWUSR
+ mode_x = stat.S_IXUSR
+ mode_r = stat.S_IRUSR
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ gamma_path = sbox.ospath('A/D/gamma')
+
+ expected_err = ".*svn: warning: W125005: To turn off the svn:needs-lock property,.*"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 0,
+ 'ps', 'svn:needs-lock', ' ', gamma_path)
+
+ expected_err = ".*svn: warning: W125005: To turn off the svn:executable property,.*"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 0,
+ 'ps', 'svn:executable', ' ', gamma_path)
+
+ # commit
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', '', gamma_path)
+ # mode should be +r, -w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Committing a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-only, executable")
+ raise svntest.Failure
+
+ # lock
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', gamma_path)
+ # mode should be +r, +w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or not gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Locking a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-write, executable")
+ raise svntest.Failure
+
+ # modify
+ svntest.main.file_append(gamma_path, "check stat output after mod & unlock")
+
+ # unlock
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ gamma_path)
+
+ # Mode should be +r, -w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Unlocking a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-only, executable")
+ raise svntest.Failure
+
+ # ci
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', '', gamma_path)
+
+ # Mode should be still +r, -w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Commiting a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("after unlocking modified file's permissions")
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# A variant of lock_and_exebit1: same test without unlock
+@SkipUnless(svntest.main.is_posix_os)
+def lock_and_exebit2(sbox):
+ "svn:needs-lock and svn:executable, part II"
+
+ mode_w = stat.S_IWUSR
+ mode_x = stat.S_IXUSR
+ mode_r = stat.S_IRUSR
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ gamma_path = sbox.ospath('A/D/gamma')
+
+ expected_err = ".*svn: warning: W125005: To turn off the svn:needs-lock property,.*"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 0,
+ 'ps', 'svn:needs-lock', ' ', gamma_path)
+
+ expected_err = ".*svn: warning: W125005: To turn off the svn:executable property,.*"
+ svntest.actions.run_and_verify_svn2(None, expected_err, 0,
+ 'ps', 'svn:executable', ' ', gamma_path)
+
+ # commit
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', '', gamma_path)
+ # mode should be +r, -w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Committing a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-only, executable")
+ raise svntest.Failure
+
+ # lock
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', gamma_path)
+ # mode should be +r, +w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or not gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Locking a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-write, executable")
+ raise svntest.Failure
+
+ # modify
+ svntest.main.file_append(gamma_path, "check stat output after mod & unlock")
+
+ # commit
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', '', gamma_path)
+
+ # Mode should be +r, -w, +x
+ gamma_stat = os.stat(gamma_path)[0]
+ if (not gamma_stat & mode_r
+ or gamma_stat & mode_w
+ or not gamma_stat & mode_x):
+ logger.warn("Commiting a file with 'svn:needs-lock, svn:executable'")
+ logger.warn("did not set the file to read-only, executable")
+ raise svntest.Failure
+
+def commit_xml_unsafe_file_unlock(sbox):
+ "commit file with xml-unsafe name and release lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ fname = 'foo & bar'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ svntest.main.file_append(file_path, "Initial data.\n")
+ svntest.main.run_svn(None, 'add', file_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', '', file_path)
+
+ # lock fname as wc_author
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'some lock comment', file_path)
+
+ # make a change and commit it, allowing lock to be released
+ svntest.main.file_append(file_path, "Followup data.\n")
+ svntest.main.run_svn(None,
+ 'commit', '-m', '', file_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({ fname : Item(status=' ', wc_rev=3), })
+
+ # Make sure the file is unlocked
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+def repos_lock_with_info(sbox):
+ "verify info path@X or path -rY return repos lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ fname = 'iota'
+ comment = 'This is a lock test.'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_url = sbox.repo_url + '/' + fname
+
+ # lock wc file
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '--username', svntest.main.wc_author2,
+ '-m', comment, file_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak(fname, writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Steal lock on wc file
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '--username', svntest.main.wc_author2,
+ '--force',
+ '-m', comment, file_url)
+ expected_status.tweak(fname, writelocked='T')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Get repository lock token
+ repos_lock_token \
+ = svntest.actions.run_and_parse_info(file_url)[0]['Lock Token']
+
+ # info with revision option
+ expected_infos = [
+ { 'Lock Token' : repos_lock_token },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, file_path, '-r1')
+
+ # info with peg revision
+ svntest.actions.run_and_verify_info(expected_infos, file_path + '@1')
+
+
+#----------------------------------------------------------------------
+@Issue(4126)
+def unlock_already_unlocked_files(sbox):
+ "(un)lock set of files, one already (un)locked"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Deliberately have no direct child of A as a target
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ gamma_path = sbox.ospath('A/D/gamma')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '--username', svntest.main.wc_author2,
+ '-m', 'lock several',
+ iota_path, lambda_path, alpha_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ error_msg = ".*Path '/A/B/E/alpha' is already locked by user '" + \
+ svntest.main.wc_author2 + "'.*"
+ svntest.actions.run_and_verify_svn(None, error_msg,
+ 'lock',
+ '--username', svntest.main.wc_author2,
+ alpha_path, gamma_path)
+ expected_status.tweak('A/D/gamma', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock',
+ '--username', svntest.main.wc_author2,
+ lambda_path)
+
+ expected_status.tweak('A/B/lambda', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ error_msg = "(.*No lock on path '/A/B/lambda'.*)" + \
+ "|(.*'A/B/lambda' is not locked.*)"
+ svntest.actions.run_and_verify_svn(None, error_msg,
+ 'unlock',
+ '--username', svntest.main.wc_author2,
+ '--force',
+ iota_path, lambda_path, alpha_path)
+
+
+ expected_status.tweak('iota', 'A/B/E/alpha', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+def info_moved_path(sbox):
+ "show correct lock info on moved path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ fname = sbox.ospath("iota")
+ fname2 = sbox.ospath("iota2")
+
+ # Move iota, creating r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ "mv", fname, fname2)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota2' : Item(verb='Adding'),
+ 'iota' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "iota2" : Item(status=' ', wc_rev=2)
+ })
+ expected_status.remove("iota")
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Create a new, unrelated iota, creating r3.
+ svntest.main.file_append(fname, "Another iota")
+ svntest.actions.run_and_verify_svn(None, [],
+ "add", fname)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Adding'),
+ })
+ expected_status.add({
+ "iota" : Item(status=' ', wc_rev=3)
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Lock the new iota.
+ svntest.actions.run_and_verify_svn(".*locked by user", [],
+ "lock", fname)
+ expected_status.tweak("iota", writelocked="K")
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Get info for old iota at r1. This shouldn't give us any lock info.
+ expected_infos = [
+ { 'URL' : '.*' ,
+ 'Lock Token' : None },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, fname2, '-r1')
+
+#----------------------------------------------------------------------
+def ls_url_encoded(sbox):
+ "ls locked path needing URL encoding"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ dirname = sbox.ospath("space dir")
+ fname = os.path.join(dirname, "f")
+
+ # Create a dir with a space in its name and a file therein.
+ svntest.actions.run_and_verify_svn(None, [],
+ "mkdir", dirname)
+ svntest.main.file_append(fname, "someone was here")
+ svntest.actions.run_and_verify_svn(None, [],
+ "add", fname)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'space dir' : Item(verb='Adding'),
+ 'space dir/f' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "space dir" : Item(status=' ', wc_rev=2),
+ "space dir/f" : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Lock the file.
+ svntest.actions.run_and_verify_svn(".*locked by user",
+ [], "lock", fname)
+
+ # Make sure ls shows it being locked.
+ expected_output = " +2 " + re.escape(svntest.main.wc_author) + " +O .+f|" \
+ " +2 " + re.escape(svntest.main.wc_author) + " .+\./"
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ "list", "-v", dirname)
+
+#----------------------------------------------------------------------
+# Make sure unlocking a path with the wrong lock token fails.
+@Issue(3794)
+def unlock_wrong_token(sbox):
+ "verify unlocking with wrong lock token"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock a file as wc_author
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_url = sbox.repo_url + "/iota"
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ file_path)
+
+ # Steal the lock as the same author, but using a URL to keep the old token
+ # in the WC.
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ "--force", file_url)
+
+ # Then, unlocking the WC path should fail.
+ ### The error message returned is actually this, but let's worry about that
+ ### another day...
+ svntest.actions.run_and_verify_svn(None, ".*(No lock on path)",
+ 'unlock', file_path)
+
+#----------------------------------------------------------------------
+# Verify that info shows lock info for locked files with URI-unsafe names
+# when run in recursive mode.
+def examine_lock_encoded_recurse(sbox):
+ "verify recursive info shows lock info"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ fname = 'A/B/F/one iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+
+ svntest.main.file_append(file_path, "This represents a binary file\n")
+ svntest.actions.run_and_verify_svn(None, [], "add", file_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ fname : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({ fname: Item(wc_rev=2, status=' ') })
+
+ # Commit the file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ file_path)
+
+ # lock the file and validate the contents
+ svntest.actions.run_and_validate_lock(file_path,
+ svntest.main.wc_author)
+
+# Trying to unlock someone else's lock with --force should fail.
+@Issue(3801)
+def unlocked_lock_of_other_user(sbox):
+ "unlock file locked by other user"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock a file with user jrandom
+ pi_path = sbox.ospath('A/D/G/pi')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', writelocked='K')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', pi_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # now try to unlock with user jconstant, should fail but exit 0.
+ expected_err = "svn: warning: W160039: User '%s' is trying to use a lock owned by "\
+ "'%s'.*" % (svntest.main.wc_author2, svntest.main.wc_author)
+ svntest.actions.run_and_verify_svn([], expected_err,
+ 'unlock',
+ '--username', svntest.main.wc_author2,
+ pi_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+def lock_funky_comment_chars(sbox):
+ "lock a file using a comment with xml special chars"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock a file as wc_author
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+
+ svntest.main.file_append(file_path, "This represents a binary file\n")
+ svntest.main.run_svn(None, 'commit',
+ '-m', '', file_path)
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', 'lock & load', file_path)
+
+#----------------------------------------------------------------------
+# Check that the svn:needs-lock usage applies to a specific location
+# in a working copy, not to the working copy overall.
+def lock_twice_in_one_wc(sbox):
+ "try to lock a file twice in one working copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ mu2_path = sbox.ospath('A/B/mu')
+
+ # Create a needs-lock file
+ svntest.actions.set_prop('svn:needs-lock', '*', mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', wc_dir, '-m', '')
+
+ # Mark the file readonly
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ # Switch a second location for the same file in the same working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', sbox.repo_url + '/A',
+ sbox.ospath('A/B'),
+ '--ignore-ancestry')
+
+ # Lock location 1
+ svntest.actions.run_and_verify_svn(None, [],
+ 'lock', mu_path, '-m', 'Locked here')
+
+ # Locking in location 2 should fail
+ svntest.actions.run_and_verify_svn(None, ".*is already locked.*",
+ 'lock', '-m', '', mu2_path)
+
+ # Change the file anyway
+ os.chmod(mu2_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
+ svntest.main.file_append(mu2_path, "Updated text")
+
+ # Commit will just succeed as the DB owns the lock. It's a user decision
+ # to commit the other target instead of the one originally locked
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', mu2_path, '-m', '')
+
+#----------------------------------------------------------------------
+# Test for issue #3524 'Locking path via ra_serf which doesn't exist in
+# HEAD triggers assert'
+@Issue(3524)
+def lock_path_not_in_head(sbox):
+ "lock path that does not exist in HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ D_path = sbox.ospath('A/D')
+ lambda_path = sbox.ospath('A/B/lambda')
+
+ # Commit deletion of A/D and A/B/lambda as r2, then update the WC
+ # back to r1. Then attempt to lock some paths that no longer exist
+ # in HEAD. These should fail gracefully.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete', lambda_path, D_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Some deletions', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r1', wc_dir)
+ expected_lock_fail_err_re = "svn: warning: W160042: " \
+ "(Path .* doesn't exist in HEAD revision)"
+ # Issue #3524 These lock attemtps were triggering an assert over ra_serf:
+ #
+ # working_copies\lock_tests-37>svn lock A\D
+ # ..\..\..\subversion\libsvn_client\ra.c:275: (apr_err=235000)
+ # svn: In file '..\..\..\subversion\libsvn_ra_serf\util.c' line 1120:
+ # assertion failed (ctx->status_code)
+ #
+ # working_copies\lock_tests-37>svn lock A\B\lambda
+ # ..\..\..\subversion\libsvn_client\ra.c:275: (apr_err=235000)
+ # svn: In file '..\..\..\subversion\libsvn_ra_serf\util.c' line 1120:
+ # assertion failed (ctx->status_code)
+ svntest.actions.run_and_verify_svn(None, expected_lock_fail_err_re,
+ 'lock', lambda_path)
+
+ expected_err = 'svn: E155008: The node \'.*D\' is not a file'
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'lock', D_path)
+
+
+#----------------------------------------------------------------------
+def verify_path_escaping(sbox):
+ "verify escaping of lock paths"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add test paths using two characters that need escaping in a url, but
+ # are within the normal ascii range
+ file1 = sbox.ospath('file #1')
+ file2 = sbox.ospath('file #2')
+ file3 = sbox.ospath('file #3')
+
+ svntest.main.file_write(file1, 'File 1')
+ svntest.main.file_write(file2, 'File 2')
+ svntest.main.file_write(file3, 'File 3')
+
+ svntest.main.run_svn(None, 'add', file1, file2, file3)
+
+ sbox.simple_commit(message='commit')
+
+ svntest.main.run_svn(None, 'lock', '-m', 'lock 1', file1)
+ svntest.main.run_svn(None, 'lock', '-m', 'lock 2', sbox.repo_url + '/file%20%232')
+ svntest.main.run_svn(None, 'lock', '-m', 'lock 3', file3)
+ svntest.main.run_svn(None, 'unlock', sbox.repo_url + '/file%20%233')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add(
+ {
+ 'file #1' : Item(status=' ', writelocked='K', wc_rev='2'),
+ 'file #2' : Item(status=' ', writelocked='O', wc_rev='2'),
+ 'file #3' : Item(status=' ', writelocked='B', wc_rev='2')
+ })
+
+ # Make sure the file locking is reported correctly
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Issue #3674: Replace + propset of locked file fails over DAV
+@Issue(3674)
+def replace_and_propset_locked_path(sbox):
+ "test replace + propset of locked file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ G_path = sbox.ospath('A/D/G')
+ rho_path = sbox.ospath('A/D/G/rho')
+
+ # Lock mu and A/D/G/rho.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'lock', mu_path, rho_path,
+ '-m', 'Locked')
+
+ # Now replace and propset on mu.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--keep-local', mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'bar', mu_path)
+
+ # Commit mu.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', '', mu_path)
+
+ # Let's try this again where directories are involved, shall we?
+ # Replace A/D/G and A/D/G/rho, propset on A/D/G/rho.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', G_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', G_path)
+ svntest.main.file_append(rho_path, "This is the new file 'rho'.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', rho_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'bar', rho_path)
+
+ # And commit G.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', '', G_path)
+
+
+#----------------------------------------------------------------------
+def cp_isnt_ro(sbox):
+ "uncommitted svn:needs-lock add/cp not read-only"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_URL = sbox.repo_url + '/A/mu'
+ mu_path = sbox.ospath('A/mu')
+ mu2_path = sbox.ospath('A/mu2')
+ mu3_path = sbox.ospath('A/mu3')
+ kappa_path = sbox.ospath('kappa')
+ open(kappa_path, 'w').write("This is the file 'kappa'.\n")
+
+ ## added file
+ sbox.simple_add('kappa')
+ svntest.actions.set_prop('svn:needs-lock', 'yes', kappa_path)
+ is_writable(kappa_path)
+ sbox.simple_commit('kappa')
+ is_readonly(kappa_path)
+
+ ## versioned file
+ svntest.actions.set_prop('svn:needs-lock', 'yes', mu_path)
+ is_writable(mu_path)
+ sbox.simple_commit('A/mu')
+ is_readonly(mu_path)
+
+ # At this point, mu has 'svn:needs-lock' set
+
+ ## wc->wc copied file
+ svntest.main.run_svn(None, 'copy', mu_path, mu2_path)
+ is_writable(mu2_path)
+ sbox.simple_commit('A/mu2')
+ is_readonly(mu2_path)
+
+ ## URL->wc copied file
+ svntest.main.run_svn(None, 'copy', mu_URL, mu3_path)
+ is_writable(mu3_path)
+ sbox.simple_commit('A/mu3')
+ is_readonly(mu3_path)
+
+
+#----------------------------------------------------------------------
+# Issue #3525: Locked file which is scheduled for delete causes tree
+# conflict
+@Issue(3525)
+def update_locked_deleted(sbox):
+ "updating locked scheduled-for-delete file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+
+ svntest.main.run_svn(None, 'lock', '-m', 'locked', mu_path, iota_path,
+ alpha_path)
+ sbox.simple_rm('iota')
+ sbox.simple_rm('A/mu')
+ sbox.simple_rm('A/B/E')
+
+ # Create expected output tree for an update.
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', status='D ')
+ expected_status.tweak('iota', 'A/mu', 'A/B/E/alpha',
+ status='D ', writelocked='K')
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ None, expected_status)
+
+ # Now we steal the lock of iota and A/mu via URL and retry
+ svntest.main.run_svn(None, 'lock', '-m', 'locked', sbox.repo_url + '/iota',
+ '--force', sbox.repo_url + '/A/mu',
+ sbox.repo_url + '/A/B/E/alpha')
+
+ expected_status.tweak('iota', 'A/mu', 'A/B/E/alpha',
+ status='D ', writelocked='O')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='B '),
+ 'A/B/E/alpha' : Item(status='B '),
+ 'iota' : Item(status='B '),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ None, expected_status)
+
+
+#----------------------------------------------------------------------
+def block_unlock_if_pre_unlock_hook_fails(sbox):
+ "block unlock operation if pre-unlock hook fails"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ svntest.actions.create_failing_hook(repo_dir, "pre-unlock", "error text")
+
+ # lock a file.
+ pi_path = sbox.ospath('A/D/G/pi')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', writelocked='K')
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', pi_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Make sure the unlock operation fails as pre-unlock hook blocks it.
+ expected_unlock_fail_err_re = ".*error text"
+ svntest.actions.run_and_verify_svn(None, expected_unlock_fail_err_re,
+ 'unlock', pi_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+def lock_invalid_token(sbox):
+ "verify pre-lock hook returning invalid token"
+
+ sbox.build()
+
+ hook_path = os.path.join(sbox.repo_dir, 'hooks', 'pre-lock')
+ svntest.main.create_python_hook_script(hook_path,
+ '# encoding=utf-8\n'
+ 'import sys\n'
+ 'if sys.version_info < (3, 0):\n'
+ ' sys.stdout.write("теÑÑ‚")\n'
+ 'else:\n'
+ ' sys.stdout.buffer.write(("теÑÑ‚").encode("utf-8"))\n'
+ 'sys.exit(0)\n')
+
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+
+ svntest.actions.run_and_verify_svn(None,
+ "svn: warning: W160037: " \
+ ".*scheme.*'opaquelocktoken'",
+ 'lock', '-m', '', file_path)
+
+@Issue(3105)
+def lock_multi_wc(sbox):
+ "obtain locks in multiple working copies in one go"
+
+ sbox.build()
+
+ sbox2 = sbox.clone_dependent(copy_wc=True)
+
+ wc_name = os.path.basename(sbox.wc_dir)
+ wc2_name = os.path.basename(sbox2.wc_dir)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ '\'%s\' locked by user \'jrandom\'.\n' % sbox.ospath('iota'),
+ '\'%s\' locked by user \'jrandom\'.\n' % sbox2.ospath('A/mu'),
+ ])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'lock', sbox.ospath('iota'),
+ sbox2.ospath('A/mu'))
+
+ expected_output = svntest.verify.UnorderedOutput([
+ '\'%s\' unlocked.\n' % sbox.ospath('iota'),
+ '\'%s\' unlocked.\n' % sbox2.ospath('A/mu'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'unlock', sbox.ospath('iota'),
+ sbox2.ospath('A/mu'))
+
+@Issue(3378)
+def locks_stick_over_switch(sbox):
+ "locks are kept alive over switching"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', sbox.ospath('A'), repo_url + '/AA',
+ '-m', '')
+
+ expected_output = svntest.verify.UnorderedOutput([
+ '\'iota\' locked by user \'jrandom\'.\n',
+ '\'%s\' locked by user \'jrandom\'.\n' % os.path.join('A', 'D', 'H', 'chi'),
+ '\'%s\' locked by user \'jrandom\'.\n' % os.path.join('A', 'mu'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'lock', sbox.ospath('A/D/H/chi'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('iota'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/chi', 'A/mu', 'iota', writelocked='K')
+
+ # Make sure the file is still locked
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('', wc_rev=1)
+ expected_status.tweak('iota', writelocked='K', wc_rev=1)
+
+ switched_status = expected_status.copy()
+ switched_status.tweak(writelocked=None)
+ switched_status.tweak('iota', writelocked='K')
+ switched_status.tweak('A', switched='S')
+
+ svntest.actions.run_and_verify_switch(wc_dir, sbox.ospath('A'),
+ repo_url + '/AA',
+ expected_output, None, switched_status)
+
+ # And now switch back to verify that the locks reappear
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+ svntest.actions.run_and_verify_switch(wc_dir, sbox.ospath('A'),
+ repo_url + '/A',
+ expected_output, None, expected_status)
+
+@Issue(4304)
+def lock_unlock_deleted(sbox):
+ "lock/unlock a deleted file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.ospath('A/mu'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = '\'mu\' locked by user \'jrandom\'.'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'lock', sbox.ospath('A/mu'))
+ expected_status.tweak('A/mu', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = '\'mu\' unlocked.'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'unlock', sbox.ospath('A/mu'))
+ expected_status.tweak('A/mu', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(4369)
+def commit_stolen_lock(sbox):
+ "commit with a stolen lock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu', 'zig-zag')
+ sbox.simple_lock('A/mu')
+
+ expected_output = '\'.*mu\' locked by user \'jrandom\'.'
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'lock', '--force',
+ sbox.repo_url + '/A/mu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', writelocked='T')
+ err_re = "(.*E160037: Cannot verify lock on path '/A/mu')|" + \
+ "(.*E160038: '/.*/A/mu': no lock token available)"
+ svntest.actions.run_and_verify_commit(wc_dir,
+ [],
+ expected_status,
+ err_re)
+
+# When removing directories, the locks of contained files were not
+# correctly removed from the working copy database, thus they later
+# magically reappeared when new files or directories with the same
+# pathes were added.
+@Issue(4364)
+def drop_locks_on_parent_deletion(sbox):
+ "drop locks when the parent is deleted"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # lock some files, and remove them.
+ sbox.simple_lock('A/B/lambda')
+ sbox.simple_lock('A/B/E/alpha')
+ sbox.simple_lock('A/B/E/beta')
+ sbox.simple_rm('A/B')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove_subtree('A/B')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ [],
+ expected_status)
+
+ # now re-add entities to the deleted pathes.
+ sbox.simple_mkdir('A/B')
+ sbox.simple_add_text('new file replacing old file', 'A/B/lambda')
+ sbox.simple_add_text('file replacing former dir', 'A/B/F')
+ # The bug also resurrected locks on directories when their path
+ # matched a former file.
+ sbox.simple_mkdir('A/B/E', 'A/B/E/alpha')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/F',
+ 'A/B/lambda',
+ wc_rev='3')
+ expected_status.remove('A/B/E/beta')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ [],
+ expected_status)
+
+
+def copy_with_lock(sbox):
+ """copy with lock on source"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ lock_url = sbox.repo_url + '/A/B/E/alpha'
+
+ svntest.actions.run_and_validate_lock(lock_url, svntest.main.wc_author)
+ sbox.simple_copy('A/B/E', 'A/B/E2')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', writelocked='O')
+ expected_status.add({
+ 'A/B/E2' : Item(status=' ', wc_rev=2),
+ 'A/B/E2/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/E2/beta' : Item(status=' ', wc_rev=2),
+ })
+
+ # This is really a regression test for httpd: 2.2.25 and 2.4.6, and
+ # earlier, have a bug that causes mod_dav to check for locks on the
+ # copy source and so the commit fails.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+def lock_hook_messages(sbox):
+ "verify (un)lock message is transferred correctly"
+
+ sbox.build(create_wc = False)
+ repo_dir = sbox.repo_dir
+
+ iota_url = sbox.repo_url + "/iota"
+ mu_url = sbox.repo_url + "/A/mu"
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ iota_url)
+
+ error_msg = "Text with <angle brackets> & ampersand"
+ svntest.actions.create_failing_hook(repo_dir, "pre-lock", error_msg)
+ svntest.actions.create_failing_hook(repo_dir, "pre-unlock", error_msg)
+
+ _, _, actual_stderr = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'lock', mu_url)
+ if len(actual_stderr) > 4:
+ actual_stderr = actual_stderr[-4:-2] + actual_stderr[-1:]
+ expected_err = [
+ 'svn: warning: W165001: ' + svntest.actions.hook_failure_message('pre-lock'),
+ error_msg + "\n",
+ "svn: E200009: One or more locks could not be obtained\n",
+ ]
+ svntest.verify.compare_and_display_lines(None, 'STDERR',
+ expected_err, actual_stderr)
+
+
+ _, _, actual_stderr = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'unlock', iota_url)
+ if len(actual_stderr) > 4:
+ actual_stderr = actual_stderr[-4:-2] + actual_stderr[-1:]
+ expected_err = [
+ 'svn: warning: W165001: ' + svntest.actions.hook_failure_message('pre-unlock'),
+ error_msg + "\n",
+ "svn: E200009: One or more locks could not be released\n",
+ ]
+ svntest.verify.compare_and_display_lines(None, 'STDERR',
+ expected_err, actual_stderr)
+
+
+def failing_post_hooks(sbox):
+ "locking with failing post-lock and post-unlock"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ svntest.actions.create_failing_hook(repo_dir, "post-lock", "error text")
+ svntest.actions.create_failing_hook(repo_dir, "post-unlock", "error text")
+
+ pi_path = sbox.ospath('A/D/G/pi')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', writelocked='K')
+
+ if svntest.main.is_ra_type_dav():
+ expected_lock_err = []
+ expected_unlock_err = '.*svn: E165009: Unlock succeeded.*' #
+ else:
+ expected_unlock_err = expected_lock_err = ".*error text"
+
+ # Failing post-lock doesn't stop lock being created.
+ svntest.actions.run_and_verify_svn("'pi' locked by user",
+ expected_lock_err,
+ 'lock', '-m', '', pi_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.tweak('A/D/G/pi', writelocked=None)
+
+ # Failing post-unlock doesn't stop lock being removed.
+ svntest.actions.run_and_verify_svn("'pi' unlocked",
+ expected_unlock_err,
+ 'unlock', pi_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def break_delete_add(sbox):
+ "break a lock, delete and add the file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(".*locked by user", [],
+ 'lock',
+ '-m', 'some lock comment',
+ sbox.ospath('A/mu'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".*unlocked", [],
+ 'unlock', '--force',
+ sbox.repo_url + '/A/mu')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm',
+ '-m', 'delete file',
+ sbox.repo_url + '/A/mu')
+
+ # Update removes the locked file and should remove the lock token.
+ sbox.simple_update()
+
+ # Lock token not visible on newly added file.
+ sbox.simple_append('A/mu', 'another mu')
+ sbox.simple_add('A/mu')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status='A ', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ ### XFAIL Broken lock token now visible in status.
+ sbox.simple_commit()
+ expected_status.tweak('A/mu', status=' ', wc_rev=3)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def dav_lock_timeout(sbox):
+ "unlock a lock with timeout"
+
+ # Locks with timeouts are only created by generic DAV clients but a
+ # Subversion client may need to view or unlock one over any RA
+ # layer.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999)
+ # Lock should have an expiration date
+ expiration_date = svntest.actions.run_and_parse_info(sbox.repo_url + '/iota')[0]['Lock Expires']
+
+ # Verify that there is a lock, by trying to obtain one
+ svntest.actions.run_and_verify_svn(None, ".*locked by user",
+ 'lock', '-m', '', sbox.ospath('iota'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', writelocked='O')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # This used to fail over serf with a parse error of the timeout.
+ expected_err = "svn: warning: W160039:"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'unlock', sbox.repo_url + '/iota')
+
+ # Force unlock via working copy, this also used to fail over serf.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'unlock', sbox.ospath('iota'), '--force')
+ expected_status.tweak('iota', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Lock again
+ svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999)
+ expected_status.tweak('iota', writelocked='O')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Force unlock via URL, this also used to fail over serf
+ svntest.actions.run_and_verify_svn(None, [],
+ 'unlock', sbox.repo_url + '/iota',
+ '--force')
+ expected_status.tweak('iota', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Lock again
+ svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999)
+ expected_status.tweak('iota', writelocked='O')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Force lock via working copy, this also used to fail over serf.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'lock', sbox.ospath('iota'), '--force')
+ expected_status.tweak('iota', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def create_dav_lock_timeout(sbox):
+ "create generic DAV lock with timeout"
+
+ import base64
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ lock_body = '<?xml version="1.0" encoding="utf-8" ?>' \
+ '<D:lockinfo xmlns:D="DAV:">' \
+ ' <D:lockscope><D:exclusive/></D:lockscope>' \
+ ' <D:locktype><D:write/></D:locktype>' \
+ ' <D:owner>' \
+ ' <D:href>http://a/test</D:href>' \
+ ' </D:owner>' \
+ '</D:lockinfo>'
+
+ lock_headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jrandom:rayjandom').decode(),
+ 'Timeout': 'Second-86400'
+ }
+
+ h.request('LOCK', sbox.repo_url + '/iota', lock_body, lock_headers)
+
+ r = h.getresponse()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', writelocked='O')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Lock should have an expiration date
+ expiration_date = svntest.actions.run_and_parse_info(sbox.repo_url + '/iota')[0]['Lock Expires']
+
+def non_root_locks(sbox):
+ "locks for working copies not at repos root"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', sbox.repo_url, sbox.repo_url + '/X',
+ '-m', 'copy greek tree')
+
+ sbox.simple_switch(sbox.repo_url + '/X')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Lock a file
+ svntest.actions.run_and_verify_svn(".*locked by user", [],
+ 'lock', sbox.ospath('A/D/G/pi'),
+ '-m', '')
+ expected_status.tweak('A/D/G/pi', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Updates don't break the lock
+ sbox.simple_update('A/D')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ sbox.simple_update('')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Break the lock
+ svntest.actions.run_and_verify_svn(None, [],
+ 'unlock', sbox.repo_url + '/X/A/D/G/pi')
+
+ # Subdir update reports the break
+ sbox.simple_update('A/D')
+ expected_status.tweak('A/D/G/pi', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Relock and break
+ svntest.actions.run_and_verify_svn(".*locked by user", [],
+ 'lock', sbox.ospath('A/D/G/pi'),
+ '-m', '')
+ expected_status.tweak('A/D/G/pi', writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'unlock', sbox.repo_url + '/X/A/D/G/pi')
+
+ # Root update reports the break
+ sbox.simple_update('')
+ expected_status.tweak('A/D/G/pi', writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def many_locks_hooks(sbox):
+ "many locks with hooks"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Prevent locking '/A/D/G/pi'.
+ svntest.main.create_python_hook_script(os.path.join(sbox.repo_dir,
+ 'hooks', 'pre-lock'),
+ 'import sys\n'
+ 'if sys.argv[2] == "/A/D/G/pi":\n'
+ ' sys.exit(1)\n'
+ 'sys.exit(0)\n')
+
+ # Prevent unlocking '/A/mu'.
+ svntest.main.create_python_hook_script(os.path.join(sbox.repo_dir,
+ 'hooks', 'pre-unlock'),
+ 'import sys\n'
+ 'if sys.argv[2] == "/A/mu":\n'
+ ' sys.exit(1)\n'
+ 'sys.exit(0)\n')
+
+ svntest.actions.run_and_verify_svn(".* locked",
+ "svn: warning: W165001: .*",
+ 'lock',
+ sbox.ospath('iota'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/D/G/pi'),
+ sbox.ospath('A/D/G/rho'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/mu', 'A/B/E/alpha', 'A/D/G/rho',
+ writelocked='K')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(".* unlocked",
+ "svn: warning: W165001: .*",
+ 'unlock',
+ sbox.ospath('iota'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/D/G/rho'))
+
+ expected_status.tweak('iota', 'A/B/E/alpha', 'A/D/G/rho',
+ writelocked=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+
+@Issue(3515)
+@SkipUnless(svntest.main.is_ra_type_dav)
+def dav_lock_refresh(sbox):
+ "refresh timeout of DAV lock"
+
+ try:
+ # Python <3.0
+ import httplib
+ except ImportError:
+ # Python >=3.0
+ import http.client as httplib
+
+ import base64
+
+ sbox.build(create_wc = False)
+
+ # Acquire lock on 'iota'
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ sbox.repo_url + '/iota')
+
+ # Try to refresh lock using 'If' header
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ lock_token = svntest.actions.run_and_parse_info(sbox.repo_url + '/iota')[0]['Lock Token']
+
+ lock_headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jrandom:rayjandom').decode(),
+ 'If': '(<' + lock_token + '>)',
+ 'Timeout': 'Second-7200'
+ }
+
+ h.request('LOCK', sbox.repo_url + '/iota', '', lock_headers)
+
+ # XFAIL Refreshing of DAV lock fails with error '412 Precondition Failed'
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Lock refresh failed: %d %s' % (r.status, r.reason))
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def delete_locked_file_with_percent(sbox):
+ "lock and delete a file called 'a %( ) .txt'"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ locked_filename = 'a %( ) .txt'
+ locked_path = sbox.ospath(locked_filename)
+ svntest.main.file_write(locked_path, "content\n")
+ sbox.simple_add(locked_filename)
+ sbox.simple_commit()
+
+ sbox.simple_lock(locked_filename)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'a %( ) .txt' : Item(status=' ', wc_rev='2', writelocked='K')
+ })
+ expected_infos = [
+ { 'Lock Owner' : 'jrandom' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.path('a %( ) .txt'),
+ '-rHEAD')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ sbox.simple_rm(locked_filename)
+
+ # XFAIL: With a 1.8.x client, this commit fails with:
+ # svn: E175002: Unexpected HTTP status 400 'Bad Request' on '/svn-test-work/repositories/lock_tests-52/!svn/txr/2-2/a%20%25(%20)%20.txt'
+ # and the following error in the httpd error log:
+ # Invalid percent encoded URI in tagged If-header [400, #104]
+ sbox.simple_commit()
+
+def lock_commit_bump(sbox):
+ "a commit should not bump just locked files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_lock('iota')
+
+ changed_file = sbox.ospath('changed')
+ sbox.simple_append('changed', 'Changed!')
+
+ svntest.actions.run_and_verify_svn(None, [], 'unlock', '--force',
+ sbox.repo_url + '/iota')
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url, '-m', 'Q',
+ 'put', changed_file, 'iota')
+
+ sbox.simple_append('A/mu', 'GOAAAAAAAAL!')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # We explicitly check both the Revision and Last Changed Revision.
+ expected_infos = [ {
+ 'Revision' : '1' ,
+ 'Last Changed Rev' : '1' ,
+ 'URL' : '.*',
+ 'Lock Token' : None, }
+ ]
+ svntest.actions.run_and_verify_info(expected_infos,
+ sbox.ospath('iota'))
+
+def copy_dir_with_locked_file(sbox):
+ "copy a directory containing a locked file"
+
+ sbox.build()
+ AA_url = sbox.repo_url + '/AA'
+ AA2_url = sbox.repo_url + '/AA2'
+ A_url = sbox.repo_url + '/A'
+ mu_url = A_url + '/mu'
+
+ svntest.main.run_svn(None, 'lock', '-m', 'locked', mu_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', A_url, AA_url,
+ '-m', '')
+
+ expected_err = "svn: E160037: .*no matching lock-token available"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv', A_url, AA2_url,
+ '-m', '')
+
+@Issue(4557)
+def delete_dir_with_lots_of_locked_files(sbox):
+ "delete a directory containing lots of locked files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A lot of paths.
+ nfiles = 75 # NOTE: test XPASSES with 50 files!!!
+ locked_paths = []
+ for i in range(nfiles):
+ locked_paths.append(sbox.ospath("A/locked_files/file-%i" % i))
+
+ # Create files at these paths
+ os.mkdir(sbox.ospath("A/locked_files"))
+ for file_path in locked_paths:
+ svntest.main.file_write(file_path, "This is '%s'.\n" % (file_path,))
+ sbox.simple_add("A/locked_files")
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # lock all the files
+ svntest.actions.run_and_verify_svn(None, [], 'lock',
+ '-m', 'All locks',
+ *locked_paths)
+ # Locally delete A (regression against earlier versions, which
+ # always used a special non-standard request)
+ sbox.simple_rm("A")
+
+ # Commit the deletion
+ # XFAIL: As of 1.8.10, this commit fails with:
+ # svn: E175002: Unexpected HTTP status 400 'Bad Request' on '<path>'
+ # and the following error in the httpd error log:
+ # request failed: error reading the headers
+ # This problem was introduced on the 1.8.x branch in r1606976.
+ sbox.simple_commit()
+
+def delete_locks_on_depth_commit(sbox):
+ "delete locks on depth-limited commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [], 'lock',
+ '-m', 'All files',
+ *(sbox.ospath(x)
+ for x in ['iota', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/lambda',
+ 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H/chi',
+ 'A/D/H/omega', 'A/D/H/psi',
+ 'A/D/gamma', 'A/mu']))
+
+ sbox.simple_rm("A")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status, [],
+ wc_dir, '--depth', 'immediates')
+
+ sbox.simple_update() # r2
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.repo_url + '/A@1', sbox.ospath('A'))
+
+ expected_output = [
+ 'Adding %s\n' % sbox.ospath('A'),
+ 'svn: The depth of this commit is \'immediates\', but copies ' \
+ 'are always performed recursively in the repository.\n',
+ 'Committing transaction...\n',
+ 'Committed revision 3.\n',
+ ]
+
+ # Verifying the warning line... so can't use verify_commit()
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'commit', wc_dir, '--depth', 'immediates',
+ '-mm')
+
+ # Verify that all locks are gone at the server and at the client
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('', 'iota', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(4634)
+@XFail(svntest.main.is_ra_type_dav)
+def replace_dir_with_lots_of_locked_files(sbox):
+ "replace directory containing lots of locked files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A lot of paths.
+ nfiles = 75 # NOTE: test XPASSES with 50 files!!!
+ locked_paths = []
+ for i in range(nfiles):
+ locked_paths.append(sbox.ospath("A/locked_files/file-%i" % i))
+
+ # Create files at these paths
+ os.mkdir(sbox.ospath("A/locked_files"))
+ for file_path in locked_paths:
+ svntest.main.file_write(file_path, "This is '%s'.\n" % (file_path,))
+ sbox.simple_add("A/locked_files")
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # lock all the files
+ svntest.actions.run_and_verify_svn(None, [], 'lock',
+ '-m', 'All locks',
+ *locked_paths)
+ # Locally delete A (regression against earlier versions, which
+ # always used a special non-standard request)
+ sbox.simple_rm("A")
+
+ # But a further replacement never worked
+ sbox.simple_mkdir("A")
+ # And an additional propset didn't work either
+ # (but doesn't require all lock tokens recursively)
+ sbox.simple_propset("k", "v", "A")
+
+ # Commit the deletion
+ # XFAIL: As of 1.8.10, this commit fails with:
+ # svn: E175002: Unexpected HTTP status 400 'Bad Request' on '<path>'
+ # and the following error in the httpd error log:
+ # request failed: error reading the headers
+ # This problem was introduced on the 1.8.x branch in r1606976.
+ sbox.simple_commit()
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ lock_file,
+ commit_file_keep_lock,
+ commit_file_unlock,
+ commit_propchange,
+ break_lock,
+ steal_lock,
+ examine_lock,
+ handle_defunct_lock,
+ enforce_lock,
+ defunct_lock,
+ deleted_path_lock,
+ lock_unlock,
+ deleted_dir_lock,
+ lock_status,
+ stolen_lock_status,
+ broken_lock_status,
+ lock_non_existent_file,
+ out_of_date,
+ update_while_needing_lock,
+ revert_lock,
+ examine_lock_via_url,
+ lock_several_files,
+ lock_switched_files,
+ lock_uri_encoded,
+ lock_and_exebit1,
+ lock_and_exebit2,
+ commit_xml_unsafe_file_unlock,
+ repos_lock_with_info,
+ unlock_already_unlocked_files,
+ info_moved_path,
+ ls_url_encoded,
+ unlock_wrong_token,
+ examine_lock_encoded_recurse,
+ unlocked_lock_of_other_user,
+ lock_funky_comment_chars,
+ lock_twice_in_one_wc,
+ lock_path_not_in_head,
+ verify_path_escaping,
+ replace_and_propset_locked_path,
+ cp_isnt_ro,
+ update_locked_deleted,
+ block_unlock_if_pre_unlock_hook_fails,
+ lock_invalid_token,
+ lock_multi_wc,
+ locks_stick_over_switch,
+ lock_unlock_deleted,
+ commit_stolen_lock,
+ drop_locks_on_parent_deletion,
+ copy_with_lock,
+ lock_hook_messages,
+ failing_post_hooks,
+ break_delete_add,
+ dav_lock_timeout,
+ create_dav_lock_timeout,
+ non_root_locks,
+ many_locks_hooks,
+ dav_lock_refresh,
+ delete_locked_file_with_percent,
+ lock_commit_bump,
+ copy_dir_with_locked_file,
+ delete_dir_with_lots_of_locked_files,
+ delete_locks_on_depth_commit,
+ replace_dir_with_lots_of_locked_files,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/log_tests.py b/subversion/tests/cmdline/log_tests.py
new file mode 100755
index 0000000..484103a
--- /dev/null
+++ b/subversion/tests/cmdline/log_tests.py
@@ -0,0 +1,2840 @@
+#!/usr/bin/env python
+#
+# log_tests.py: testing "svn log"
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import re, os, sys
+
+# Our testing module
+import svntest
+from svntest import wc
+
+from svntest.main import server_has_mergeinfo
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.mergetrees import set_up_branch
+from svntest.verify import make_diff_header, make_no_diff_deleted_header
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+######################################################################
+#
+# The Plan:
+#
+# Get a repository, commit about 6 or 7 revisions to it, each
+# involving different kinds of operations. Make sure to have some
+# add, del, mv, cp, as well as file modifications, and make sure that
+# some files are modified more than once.
+#
+# Give each commit a recognizable log message. Test all combinations
+# of -r options, including none. Then test with -v, which will
+# (presumably) show changed paths as well.
+#
+######################################################################
+
+
+
+######################################################################
+# Globals
+#
+
+# These variables are set by guarantee_repos_and_wc().
+max_revision = 0 # Highest revision in the repos
+
+# What separates log msgs from one another in raw log output.
+msg_separator = '------------------------------------' \
+ + '------------------------------------\n'
+
+
+# (abbreviation)
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Utilities
+#
+
+def guarantee_repos_and_wc(sbox):
+ "Make a repos and wc, commit max_revision revs."
+ global max_revision
+
+ sbox.build()
+ wc_path = sbox.wc_dir
+ msg_file=os.path.join(sbox.repo_dir, 'log-msg')
+ msg_file=os.path.abspath(msg_file)
+
+ # Now we have a repos and wc at revision 1.
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_path)
+
+ # Set up the paths we'll be using most often.
+ iota_path = os.path.join('iota')
+ mu_path = os.path.join('A', 'mu')
+ B_path = os.path.join('A', 'B')
+ omega_path = os.path.join('A', 'D', 'H', 'omega')
+ pi_path = os.path.join('A', 'D', 'G', 'pi')
+ rho_path = os.path.join('A', 'D', 'G', 'rho')
+ alpha_path = os.path.join('A', 'B', 'E', 'alpha')
+ beta_path = os.path.join('A', 'B', 'E', 'beta')
+ psi_path = os.path.join('A', 'D', 'H', 'psi')
+ epsilon_path = os.path.join('A', 'C', 'epsilon')
+
+ # Do a varied bunch of commits. No copies yet, we'll wait till Ben
+ # is done for that.
+
+ # Revision 2: edit iota
+ msg=""" Log message for revision 2
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.file_append(iota_path, "2")
+ svntest.main.run_svn(None,
+ 'ci', '-F', msg_file)
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 3: edit A/D/H/omega, A/D/G/pi, A/D/G/rho, and A/B/E/alpha
+ svntest.main.file_append(omega_path, "3")
+ svntest.main.file_append(pi_path, "3")
+ svntest.main.file_append(rho_path, "3")
+ svntest.main.file_append(alpha_path, "3")
+ svntest.main.run_svn(None,
+ 'ci', '-m', "Log message for revision 3")
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 4: edit iota again, add A/C/epsilon
+ msg=""" Log message for revision 4
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.file_append(iota_path, "4")
+ svntest.main.file_append(epsilon_path, "4")
+ svntest.main.run_svn(None, 'add', epsilon_path)
+ svntest.main.run_svn(None,
+ 'ci', '-F', msg_file)
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 5: edit A/C/epsilon, delete A/D/G/rho
+ svntest.main.file_append(epsilon_path, "5")
+ svntest.main.run_svn(None, 'rm', rho_path)
+ svntest.main.run_svn(None,
+ 'ci', '-m', "Log message for revision 5")
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 6: prop change on A/B, edit A/D/H/psi
+ msg=""" Log message for revision 6
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.run_svn(None, 'ps', 'blue', 'azul', B_path)
+ svntest.main.file_append(psi_path, "6")
+ svntest.main.run_svn(None,
+ 'ci', '-F', msg_file)
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 7: edit A/mu, prop change on A/mu
+ svntest.main.file_append(mu_path, "7")
+ svntest.main.run_svn(None, 'ps', 'red', 'burgundy', mu_path)
+ svntest.main.run_svn(None,
+ 'ci', '-m', "Log message for revision 7")
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 8: edit iota yet again, re-add A/D/G/rho
+ msg=""" Log message for revision 8
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.file_append(iota_path, "8")
+ svntest.main.file_append(rho_path, "88") # More than one char so libmagic
+ # treats it as text.
+ svntest.main.run_svn(None, 'add', rho_path)
+ svntest.main.run_svn(None,
+ 'ci', '-F', msg_file)
+ svntest.main.run_svn(None,
+ 'up')
+
+ # Revision 9: edit A/B/E/beta, delete A/B/E/alpha
+ svntest.main.file_append(beta_path, "9")
+ svntest.main.run_svn(None, 'rm', alpha_path)
+ svntest.main.run_svn(None,
+ 'ci', '-m', "Log message for revision 9")
+ svntest.main.run_svn(None,
+ 'up')
+
+ max_revision = 9
+
+ # Restore.
+ os.chdir(was_cwd)
+
+ # Let's run 'svn status' and make sure the working copy looks
+ # exactly the way we think it should. Start with a generic
+ # greek-tree-list, where every local and repos revision is at 9.
+ expected_status = svntest.actions.get_virginal_state(wc_path, 9)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.add({
+ 'A/C/epsilon' : Item(status=' ', wc_rev=9),
+ })
+
+ # props exist on A/B and A/mu
+ expected_status.tweak('A/B', 'A/mu', status=' ')
+
+ # Run 'svn st -uv' and compare the actual results with our tree.
+ svntest.actions.run_and_verify_status(wc_path, expected_status)
+
+
+def merge_history_repos(sbox):
+ """Make a repos with varied and interesting merge history, similar
+ to the repos found at: log_tests_data/merge_history_repo.png"""
+
+ upsilon_path = os.path.join('A', 'upsilon')
+ omicron_path = os.path.join('blocked', 'omicron')
+ branch_a = os.path.join('branches', 'a')
+ branch_b = os.path.join('branches', 'b')
+ branch_c = os.path.join('branches', 'c')
+
+ # Create an empty repository - r0
+ sbox.build(empty=True)
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+
+ # Create trunk/tags/branches - r1
+ svntest.main.run_svn(None, 'mkdir', 'trunk')
+ svntest.main.run_svn(None, 'mkdir', 'tags')
+ svntest.main.run_svn(None, 'mkdir', 'branches')
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Add trunk/tags/branches structure.')
+
+ # Import greek tree to trunk - r2
+ svntest.main.greek_state.write_to_disk('trunk')
+ svntest.main.run_svn(None, 'add', os.path.join('trunk', 'A'),
+ os.path.join('trunk', 'iota'))
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Import greek tree into trunk.')
+
+ # Update from the repository to avoid a mix-rev working copy
+ svntest.main.run_svn(None, 'up')
+
+ # Create a branch - r3
+ svntest.main.run_svn(None, 'cp', 'trunk', branch_a)
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Create branches/a from trunk.',
+ '--username', svntest.main.wc_author2)
+
+ # Some changes on the branch - r4
+ svntest.main.file_append_binary(os.path.join(branch_a, 'iota'),
+ "'A' has changed a bit.\n")
+ svntest.main.file_append_binary(os.path.join(branch_a, 'A', 'mu'),
+ "Don't forget to look at 'upsilon', too.")
+ svntest.main.file_write(os.path.join(branch_a, upsilon_path),
+ "This is the file 'upsilon'.\n", "wb")
+ svntest.main.run_svn(None, 'add',
+ os.path.join(branch_a, upsilon_path))
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Add the file 'upsilon', and change some other files.")
+
+ # Create another branch - r5
+ svntest.main.run_svn(None, 'cp', 'trunk', branch_c)
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Create branches/c from trunk.',
+ '--username', svntest.main.wc_author2)
+
+ # Do some mergeing - r6
+ # From branch_a to trunk: add 'upsilon' and modify 'iota' and 'mu'.
+ #
+ # Mergeinfo changes on /trunk:
+ # Merged /branches/a:r3-5
+ os.chdir('trunk')
+ svntest.main.run_svn(None, 'merge', os.path.join('..', branch_a) + '@HEAD')
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Merged branches/a to trunk.',
+ '--username', svntest.main.wc_author2)
+ os.chdir('..')
+
+ # Add 'blocked/omicron' to branches/a - r7
+ svntest.main.run_svn(None, 'mkdir', os.path.join(branch_a, 'blocked'))
+ svntest.main.file_write(os.path.join(branch_a, omicron_path),
+ "This is the file 'omicron'.\n")
+ svntest.main.run_svn(None, 'add',
+ os.path.join(branch_a, omicron_path))
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Add omicron to branches/a. " +
+ "It will be blocked from merging in r8.")
+
+ # Block r7 from being merged to trunk - r8
+ #
+ # Mergeinfo changes on /trunk:
+ # Merged /branches/a:r7
+ os.chdir('trunk')
+ svntest.main.run_svn(None, 'merge', '--allow-mixed-revisions',
+ '--record-only', '-r6:7',
+ os.path.join('..', branch_a))
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Block r7 from merging to trunk.",
+ '--username', svntest.main.wc_author2)
+ os.chdir('..')
+
+ # Wording change in mu - r9
+ svntest.main.file_write(os.path.join('trunk', 'A', 'mu'),
+ "This is the file 'mu'.\n" +
+ "Don't forget to look at 'upsilon', as well.", "wb")
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Wording change in mu.")
+
+ # Update from the repository to avoid a mix-rev working copy
+ svntest.main.run_svn(None, 'up')
+
+ # Create another branch - r10
+ svntest.main.run_svn(None, 'cp', 'trunk', branch_b)
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Create branches/b from trunk",
+ '--username', svntest.main.wc_author2)
+
+ # Add another file, make some changes on branches/a - r11
+ svntest.main.file_append_binary(os.path.join(branch_a, upsilon_path),
+ "There is also the file 'xi'.")
+ svntest.main.file_write(os.path.join(branch_a, 'A', 'xi'),
+ "This is the file 'xi'.\n", "wb")
+ svntest.main.run_svn(None, 'add',
+ os.path.join(branch_a, 'A', 'xi'))
+ svntest.main.file_write(os.path.join(branch_a, 'iota'),
+ "This is the file 'iota'.\n" +
+ "'A' has changed a bit, with 'upsilon', and 'xi'.",
+ "wb")
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Added 'xi' to branches/a, made a few other changes.")
+
+ # Merge branches/a to branches/b - r12
+ #
+ # Mergeinfo changes on /branches/b:
+ # Merged /branches/a:r6,8-11
+ os.chdir(branch_b)
+ svntest.main.run_svn(None, 'merge', os.path.join('..', 'a') + '@HEAD')
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Merged branches/a to branches/b.",
+ '--username', svntest.main.wc_author2)
+ os.chdir(os.path.join('..', '..'))
+
+ # More wording changes - r13
+ svntest.main.file_append_binary(os.path.join(branch_b, 'A', 'D', 'gamma'),
+ "Watch out for the rays!")
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Modify 'gamma' on branches/b.")
+
+ # More merging - r14
+ #
+ # Mergeinfo changes on /trunk:
+ # Merged /branches/a:r6,8-11
+ # Merged /branches/b:r10-13
+ os.chdir('trunk')
+ svntest.main.run_svn(None, 'merge', os.path.join('..', branch_b) + '@HEAD')
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Merged branches/b to trunk.",
+ '--username', svntest.main.wc_author2)
+ os.chdir('..')
+
+ # Even more merging - r15
+ #
+ # Mergeinfo changes on /branches/c:
+ # Merged /trunk:r3-14
+ # Merged /branches/a:r3-11
+ # Merged /branches/b:r10-13
+ os.chdir(branch_c)
+ svntest.main.run_svn(None, 'merge',
+ os.path.join('..', '..', 'trunk') + '@HEAD')
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Bring branches/c up to date with trunk.",
+ '--username', svntest.main.wc_author2)
+ os.chdir(os.path.join('..', '..'))
+
+ # Modify a file on branches/c - r16
+ svntest.main.file_append_binary(os.path.join(branch_c, 'A', 'mu'),
+ "\nThis is yet more content in 'mu'.")
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Modify 'mu' on branches/c.")
+
+ # Merge branches/c to trunk - r17
+ #
+ # Mergeinfo changes on /trunk:
+ # Merged /branches/c:r5-16
+ os.chdir('trunk')
+ svntest.main.run_svn(None, 'up')
+ svntest.main.run_svn(None, 'merge', '--reintegrate',
+ os.path.join('..', branch_c) + '@HEAD')
+ svntest.main.run_svn(None, 'ci', '-m',
+ "Merge branches/c to trunk.",
+ '--username', svntest.main.wc_author2)
+ os.chdir('..')
+
+ # Restore working directory
+ os.chdir(was_cwd)
+
+# For errors seen while parsing log data.
+class SVNLogParseError(Exception):
+ pass
+
+
+def parse_log_output(log_lines, with_diffs=False):
+ """Return a log chain derived from LOG_LINES.
+ A log chain is a list of hashes; each hash represents one log
+ message, in the order it appears in LOG_LINES (the first log
+ message in the data is also the first element of the list, and so
+ on).
+
+ Each hash contains the following keys/values:
+
+ 'revision' ===> number
+ 'author' ===> string
+ 'date' ===> string
+ 'msg' ===> string (the log message itself)
+ 'lines' ===> number (so that it may be checked against rev)
+
+ If LOG_LINES contains changed-path information, then the hash
+ also contains
+
+ 'paths' ===> list of tuples of the form (X, PATH), where X is the
+ first column of verbose output, and PATH is the affected path.
+
+ If LOG_LINES contains merge result information, then the hash also contains
+
+ 'merges' ===> list of forward-merging revisions that resulted in this
+ log being part of the list of messages.
+
+ 'reverse_merges' ===> list of reverse-merging revisions that resulted
+ in this log being part of the list of messages.
+
+ If LOG_LINES contains diffs and WITH_DIFFS=True, then the hash also contains
+
+ 'diff_lines' ===> list of strings (diffs)
+ """
+
+ # Here's some log output to look at while writing this function:
+
+ # ------------------------------------------------------------------------
+ # r5 | kfogel | Tue 6 Nov 2001 17:18:19 | 1 line
+ #
+ # Log message for revision 5.
+ # ------------------------------------------------------------------------
+ # r4 | kfogel | Tue 6 Nov 2001 17:18:18 | 3 lines
+ #
+ # Log message for revision 4
+ # but with multiple lines
+ # to test the code.
+ # ------------------------------------------------------------------------
+ # r3 | kfogel | Tue 6 Nov 2001 17:18:17 | 1 line
+ #
+ # Log message for revision 3.
+ # ------------------------------------------------------------------------
+ # r2 | kfogel | Tue 6 Nov 2001 17:18:16 | 3 lines
+ #
+ # Log message for revision 2
+ # but with multiple lines
+ # to test the code.
+ # ------------------------------------------------------------------------
+ # r1 | foo | Tue 6 Nov 2001 15:27:57 | 1 line
+ #
+ # Log message for revision 1.
+ # ------------------------------------------------------------------------
+
+ # Regular expression to match the header line of a log message, with
+ # these groups: (revision number), (author), (date), (num lines).
+ header_re = re.compile('^r([0-9]+) \| ' \
+ + '([^|]*) \| ([^|]*) \| ([0-9]+) lines?')
+
+ # The log chain to return.
+ chain = []
+
+ # Filter debug lines from the output.
+ log_lines = [line for line in log_lines if not line.startswith('DBG:')]
+
+ this_item = None
+ while True:
+ try:
+ this_line = log_lines.pop(0)
+ except IndexError:
+ return chain
+
+ match = header_re.search(this_line)
+ if match and match.groups():
+ is_result = 0
+ is_result_reverse = 0
+ this_item = {}
+ this_item['revision'] = int(match.group(1))
+ this_item['author'] = match.group(2)
+ this_item['date'] = match.group(3)
+ lines = int(match.group(4))
+ this_item['lines'] = lines
+
+ # Parse verbose output, starting with "Changed paths"
+ next_line = log_lines.pop(0)
+ if next_line.strip() == 'Changed paths:':
+ paths = []
+ path_line = log_lines.pop(0).strip()
+
+ # Stop on either a blank line or a "(Reverse) Merged via: ..." line
+ while (path_line != ''
+ and path_line[0:6] != 'Merged'
+ and path_line[0:14] != 'Reverse merged'):
+ paths.append( (path_line[0], path_line[2:]) )
+ path_line = log_lines.pop(0).strip()
+
+ this_item['paths'] = paths
+
+ if path_line[0:6] == 'Merged':
+ is_result = 1
+ result_line = path_line
+ elif path_line[0:14] == 'Reverse merged':
+ is_result_reverse = 1
+ result_line = path_line
+
+ elif next_line[0:6] == 'Merged':
+ is_result = 1
+ result_line = next_line.strip()
+ elif next_line[0:14] == 'Reverse merged':
+ is_result_reverse = 1
+ result_line = next_line.strip()
+
+ # Parse output of "Merged via: ..." line
+ if is_result:
+ merges = []
+ prefix_len = len('Merged via: ')
+ for rev_str in result_line[prefix_len:].split(','):
+ merges.append(int(rev_str.strip()[1:]))
+ this_item['merges'] = merges
+
+ # Eat blank line
+ log_lines.pop(0)
+
+ # Parse output of "Reverse merged via: ..." line
+ if is_result_reverse:
+ reverse_merges = []
+ prefix_len = len('Reverse merged via: ')
+ for rev_str in result_line[prefix_len:].split(','):
+ reverse_merges.append(int(rev_str.strip()[1:]))
+ this_item['reverse_merges'] = reverse_merges
+
+ # Eat blank line
+ log_lines.pop(0)
+
+ # Accumulate the log message
+ msg = ''
+ for line in log_lines[0:lines]:
+ msg += line
+ del log_lines[0:lines]
+
+ # Maybe accumulate a diff.
+ # If there is a diff, there is a blank line before and after it.
+ if with_diffs and len(log_lines) >= 2 and log_lines[0] == '\n':
+ log_lines.pop(0)
+ diff_lines = []
+ while len(log_lines) and log_lines[0] != msg_separator:
+ diff_lines.append(log_lines.pop(0))
+ if diff_lines[-1] == '\n':
+ diff_lines.pop()
+ else:
+ raise SVNLogParseError("no blank line after diff in log")
+ this_item['diff_lines'] = diff_lines
+
+ elif this_line == msg_separator:
+ if this_item:
+ this_item['msg'] = msg
+ chain.append(this_item)
+ else: # if didn't see separator now, then something's wrong
+ print(this_line)
+ raise SVNLogParseError("trailing garbage after log message")
+
+ return chain
+
+
+class SVNUnexpectedLogs(svntest.Failure):
+ "Exception raised if a set of log messages doesn't meet expectations."
+
+ def __init__(self, msg, chain, field_selector = 'revision'):
+ """Stores the log chain for later use. FIELD_SELECTOR indicates
+ which individual field to display when turning the exception into
+ text."""
+ svntest.Failure.__init__(self, msg)
+ self.chain = chain
+ self.field_selector = field_selector
+
+ def __str__(self):
+ msg = svntest.Failure.__str__(self)
+ if self.chain:
+ chain_data = list(self.chain)
+ for i in range(0, len(self.chain)):
+ chain_data[i] = self.chain[i][self.field_selector]
+ msg = msg + ': Actual %s list was %s' % (self.field_selector, chain_data)
+ return msg
+
+
+def check_log_chain(chain, revlist, path_counts=[]):
+ """Verify that log chain CHAIN contains the right log messages for
+ revisions START to END (see documentation for parse_log_output() for
+ more about log chains).
+
+ Do nothing if the log chain's messages run from revision START to END
+ and each log message contains a line of the form
+
+ 'Log message for revision N'
+
+ where N is the revision number of that commit. Verify that
+ author and date are present and look sane, but don't check them too
+ carefully.
+ Also verify that even numbered commit messages have three lines.
+
+ If the length of PATH_COUNTS is greater than zero, make sure that each
+ log has that number of paths.
+
+ Raise an error if anything looks wrong.
+ """
+
+ nbr_expected = len(revlist)
+ if len(chain) != nbr_expected:
+ raise SVNUnexpectedLogs('Number of elements in log chain and revision ' +
+ 'list %s not equal' % revlist, chain)
+ if path_counts and len(path_counts) != nbr_expected:
+ raise SVNUnexpectedLogs('Number of elements in log chain and path ' +
+ 'counts %s not equal' % path_counts, chain)
+ missing_revs = []
+ for i in range(0, nbr_expected):
+ expect_rev = revlist[i]
+ log_item = chain[i]
+ saw_rev = log_item['revision']
+ date = log_item['date']
+ author = log_item['author']
+ msg = log_item['msg']
+ # The most important check is that the revision is right:
+ if expect_rev != saw_rev:
+ missing_revs.append(expect_rev)
+ continue
+ # Check that date looks at least vaguely right:
+ date_re = re.compile('[0-9]+')
+ if not date_re.search(date):
+ raise SVNUnexpectedLogs('Malformed date', chain, 'date')
+ # Authors are a little harder, since they might not exist over ra-dav.
+ # Well, it's not much of a check, but we'll do what we can.
+ author_re = re.compile('[a-zA-Z]+')
+ if (not (author_re.search(author)
+ or author == ''
+ or author == '(no author)')):
+ raise SVNUnexpectedLogs('Malformed author', chain, 'author')
+
+ # Verify the expectation that even-numbered revisions in the Greek
+ # tree tweaked by the log tests have 3-line log messages.
+ if (saw_rev % 2 == 0 and log_item['lines'] != 3):
+ raise SVNUnexpectedLogs('Malformed log line counts', chain, 'lines')
+
+ # Check that the log message looks right:
+ pattern = 'Log message for revision ' + repr(saw_rev)
+ msg_re = re.compile(pattern)
+ if not msg_re.search(msg):
+ raise SVNUnexpectedLogs("Malformed log message, expected '%s'" % msg,
+ chain)
+
+ # If path_counts, check the number of changed paths
+ if path_counts:
+ if (not 'paths' in log_item) or (not log_item['paths']):
+ raise SVNUnexpectedLogs("No changed path information", chain)
+ if path_counts[i] != len(log_item['paths']):
+ raise SVNUnexpectedLogs("Changed paths counts not equal for " +
+ "revision %d" % (i + 1), chain)
+
+ nbr_missing_revs = len(missing_revs)
+ if nbr_missing_revs > 0:
+ raise SVNUnexpectedLogs('Unable to find expected revision(s) %s' %
+ missing_revs, chain)
+
+
+def parse_diff(output):
+ """Return a set containing the various diff bits, broken up by file."""
+
+ diff_set = []
+ current_diff = []
+ for line in output:
+ if line.startswith('Index: ') and current_diff:
+ diff_set.append(current_diff)
+ current_diff = []
+ current_diff.append(line)
+ diff_set.append(current_diff)
+
+ return diff_set
+
+
+def setify(diff_list):
+ """Take a list of lists and make it a set of tuples."""
+ s = set()
+ for diff in diff_list:
+ s.add(tuple(diff))
+ return s
+
+
+def compare_diff_output(expected_diffs, output):
+ """Compare the diffs in EXPECTED_DIFFS (which is a Python set) with the
+ text in OUTPUT, remembering that there is no canonical ordering for diffs."""
+
+ diffs = parse_diff(output)
+ diffs = setify(diffs)
+ expected_diffs = setify(expected_diffs)
+
+ if diffs.issubset(expected_diffs) and diffs.issuperset(expected_diffs):
+ return
+
+ print("=============== DIFFS NOT EQUAL ===================")
+ print("Expected")
+ for line in expected_diffs:
+ print(line)
+ print("Actual:")
+ for line in output:
+ print(line)
+ raise svntest.Failure("Diffs not equal")
+
+
+######################################################################
+# Tests
+#
+
+#----------------------------------------------------------------------
+def plain_log(sbox):
+ "'svn log', no args, top of wc"
+
+ guarantee_repos_and_wc(sbox)
+
+ os.chdir(sbox.wc_dir)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log')
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, list(range(max_revision, 1 - 1, -1)))
+
+
+#----------------------------------------------------------------------
+def log_with_empty_repos(sbox):
+ "'svn log' on an empty repository"
+
+ # Create virgin repos
+ sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+def log_where_nothing_changed(sbox):
+ "'svn log -rN some_dir_unchanged_in_N'"
+ sbox.build()
+
+ # Fix bug whereby running 'svn log -rN SOMEPATH' would result in an
+ # xml protocol error if there were no changes in revision N
+ # underneath SOMEPATH. This problem was introduced in revision
+ # 3811, which didn't cover the case where svn_repos_get_logs might
+ # invoke log_receiver zero times. Since the receiver never ran, the
+ # lrb->needs_header flag never got cleared. Control would proceed
+ # without error to the end of dav_svn__log_report(), which would
+ # send a closing tag even though no opening tag had ever been sent.
+
+ rho_path = os.path.join(sbox.wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(rho_path, "some new material in rho")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m',
+ 'log msg', rho_path)
+
+ # Now run 'svn log -r2' on a directory unaffected by revision 2.
+ H_path = os.path.join(sbox.wc_dir, 'A', 'D', 'H')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-r', '2', H_path)
+
+
+#----------------------------------------------------------------------
+def log_to_revision_zero(sbox):
+ "'svn log -v -r 1:0 wc_root'"
+ sbox.build(read_only = True)
+
+ # This used to segfault the server.
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-v',
+ '-r', '1:0', sbox.wc_dir)
+
+#----------------------------------------------------------------------
+def log_with_path_args(sbox):
+ "'svn log', with args, top of wc"
+
+ guarantee_repos_and_wc(sbox)
+
+ os.chdir(sbox.wc_dir)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'log', sbox.repo_url, 'A/D/G', 'A/D/H')
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [8, 6, 5, 3, 1])
+
+#----------------------------------------------------------------------
+def dynamic_revision(sbox):
+ "'svn log -r COMMITTED' of dynamic/local WC rev"
+
+ guarantee_repos_and_wc(sbox)
+ os.chdir(sbox.wc_dir)
+
+ revprops = [{'svn:author': 'jrandom',
+ 'svn:date': '', 'svn:log': 'Log message for revision 9'}]
+ for rev in ('HEAD', 'BASE', 'COMMITTED'):
+ svntest.actions.run_and_verify_log_xml(expected_revprops=revprops,
+ args=['-r', rev])
+ revprops[0]['svn:log'] = ('Log message for revision 8\n'
+ ' but with multiple lines\n'
+ ' to test the code')
+ svntest.actions.run_and_verify_log_xml(expected_revprops=revprops,
+ args=['-r', 'PREV'])
+
+#----------------------------------------------------------------------
+def log_wc_with_peg_revision(sbox):
+ "'svn log wc_target@N'"
+ guarantee_repos_and_wc(sbox)
+ my_path = os.path.join(sbox.wc_dir, "A", "B", "E", "beta") + "@8"
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', my_path)
+ check_log_chain(parse_log_output(output), [1])
+
+#----------------------------------------------------------------------
+def url_missing_in_head(sbox):
+ "'svn log target@N' when target removed from HEAD"
+
+ guarantee_repos_and_wc(sbox)
+
+ my_url = sbox.repo_url + "/A/B/E/alpha" + "@8"
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', my_url)
+ check_log_chain(parse_log_output(output), [3, 1])
+
+#----------------------------------------------------------------------
+def log_through_copyfrom_history(sbox):
+ "'svn log TGT' with copyfrom history"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ msg_file=os.path.join(sbox.repo_dir, 'log-msg')
+ msg_file=os.path.abspath(msg_file)
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ mu2_path = os.path.join(wc_dir, 'A', 'mu2')
+ mu_URL = sbox.repo_url + '/A/mu'
+ mu2_URL = sbox.repo_url + '/A/mu2'
+
+ msg2=""" Log message for revision 2
+ but with multiple lines
+ to test the code"""
+
+ msg4=""" Log message for revision 4
+ but with multiple lines
+ to test the code"""
+
+ msg6=""" Log message for revision 6
+ but with multiple lines
+ to test the code"""
+
+ svntest.main.file_write(msg_file, msg2)
+ svntest.main.file_append(mu_path, "2")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-F', msg_file)
+ svntest.main.file_append(mu2_path, "this is mu2")
+ svntest.actions.run_and_verify_svn(None, [], 'add', mu2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', "Log message for revision 3")
+ svntest.actions.run_and_verify_svn(None, [], 'rm', mu2_path)
+ svntest.main.file_write(msg_file, msg4)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-F', msg_file)
+ svntest.main.file_append(mu_path, "5")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', "Log message for revision 5")
+
+ svntest.main.file_write(msg_file, msg6)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-r', '5', mu_URL, mu2_URL,
+ '-F', msg_file)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # The full log for mu2 is relatively unsurprising
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', mu2_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [6, 5, 2, 1])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', mu2_URL)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [6, 5, 2, 1])
+
+ # First "oddity", the full log for mu2 doesn't include r3, but the -r3
+ # log works!
+ peg_mu2_path = mu2_path + "@3"
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-r', '3',
+ peg_mu2_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [3])
+
+ peg_mu2_URL = mu2_URL + "@3"
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-r', '3',
+ peg_mu2_URL)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [3])
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-r', '2',
+ mu2_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-r', '2',
+ mu2_URL)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2])
+
+#----------------------------------------------------------------------
+def escape_control_chars(sbox):
+ "mod_dav_svn must escape invalid XML control chars"
+
+ dump_str = b"""SVN-fs-dump-format-version: 2
+
+UUID: ffcae364-69ee-0310-a980-ca5f10462af2
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-01-24T10:09:21.759592Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 128
+Content-length: 128
+
+K 7
+svn:log
+V 100
+This msg contains a Ctrl-T (\x14) and a Ctrl-I (\t).
+The former might be escaped, but the latter never.
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2005-01-24T10:09:22.012524Z
+PROPS-END
+"""
+
+ # load dumpfile with control character into repos to get
+ # a log with control char content
+ svntest.actions.load_repo(sbox, dump_str=dump_str)
+
+ URL = sbox.repo_url
+
+ # run log
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'log', URL)
+
+ # Verify the output contains either the expected fuzzy escape
+ # sequence, or the literal control char.
+ match_unescaped_ctrl_re = "This msg contains a Ctrl-T \(.\) " \
+ "and a Ctrl-I \(\t\)\."
+ match_escaped_ctrl_re = "^This msg contains a Ctrl-T \(\?\\\\020\) " \
+ "and a Ctrl-I \(\t\)\."
+ matched = None
+ for line in output:
+ if re.match(match_unescaped_ctrl_re, line) \
+ or re.match(match_escaped_ctrl_re, line):
+ matched = 1
+
+ if not matched:
+ raise svntest.Failure("log message not transmitted properly:" +
+ str(output) + "\n" + "error: " + str(errput))
+
+#----------------------------------------------------------------------
+def log_xml_empty_date(sbox):
+ "svn log --xml must not print empty date elements"
+ sbox.build()
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ date_re = re.compile('<date')
+
+ # Ensure that we get a date before we delete the property.
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--xml', '-r1', sbox.wc_dir)
+
+ matched = 0
+ for line in output:
+ if date_re.search(line):
+ matched = 1
+ if not matched:
+ raise svntest.Failure("log contains no date element")
+
+ # Set the svn:date revprop to the empty string on revision 1.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'pdel', '--revprop', '-r1', 'svn:date',
+ sbox.wc_dir)
+
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--xml', '-r1', sbox.wc_dir)
+
+ for line in output:
+ if date_re.search(line):
+ raise svntest.Failure("log contains date element when svn:date is empty")
+
+#----------------------------------------------------------------------
+def log_limit(sbox):
+ "svn log --limit"
+ guarantee_repos_and_wc(sbox)
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log',
+ '--limit', '2',
+ sbox.repo_url)
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [9, 8])
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log',
+ '--limit', '2',
+ sbox.repo_url,
+ 'A/B')
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [9, 6])
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'log', '--limit', '2', '--revision', '2:HEAD', sbox.repo_url, 'A/B')
+
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [3, 6])
+
+ # Use -l instead of --limit to test both option forms.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'log', '-l', '2', '--revision', '1', sbox.repo_url, 'A/B')
+
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [1])
+
+ must_be_positive = ".*Argument to --limit must be positive.*"
+
+ # error expected when limit <= 0
+ svntest.actions.run_and_verify_svn(None, must_be_positive,
+ 'log', '--limit', '0', '--revision', '1',
+ sbox.repo_url, 'A/B')
+
+ svntest.actions.run_and_verify_svn(None, must_be_positive,
+ 'log', '--limit', '-1', '--revision', '1',
+ sbox.repo_url, 'A/B')
+
+def log_base_peg(sbox):
+ "run log on an @BASE target"
+ guarantee_repos_and_wc(sbox)
+
+ target = os.path.join(sbox.wc_dir, 'A', 'B', 'E', 'beta') + '@BASE'
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', target)
+
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [9, 1])
+
+ svntest.actions.run_and_verify_svn(None, [], 'update', '-r', '1',
+ sbox.wc_dir)
+
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', target)
+
+ log_chain = parse_log_output(out)
+ check_log_chain(log_chain, [1])
+
+
+def log_verbose(sbox):
+ "run log with verbose output"
+ guarantee_repos_and_wc(sbox)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-v',
+ sbox.wc_dir)
+
+ log_chain = parse_log_output(output)
+ path_counts = [2, 2, 1, 2, 2, 2, 4, 1, 20]
+ check_log_chain(log_chain, list(range(max_revision, 1 - 1, -1)), path_counts)
+
+
+def log_parser(sbox):
+ "meta-test for the log parser"
+
+ logs = ['''------------------------------------------------------------------------
+r24 | chuck | 2007-04-30 10:18:01 -0500 (Mon, 16 Apr 2007) | 1 line
+Changed paths:
+ M /trunk/death-ray.c
+ M /trunk/frobnicator/frapnalyzer.c
+
+Merge r12 and r14 from branch to trunk.
+------------------------------------------------------------------------
+r14 | bob | 2007-04-16 18:50:29 -0500 (Mon, 16 Apr 2007) | 1 line
+Changed paths:
+ M /trunk/death-ray.c
+Merged via: r24
+
+Remove inadvertent changes to Death-Ray-o-Matic introduced in r12.
+------------------------------------------------------------------------
+r12 | alice | 2007-04-16 19:02:48 -0500 (Mon, 16 Apr 2007) | 1 line
+Changed paths:
+ M /trunk/frobnicator/frapnalyzer.c
+ M /trunk/death-ray.c
+Merged via: r24
+
+Fix frapnalyzer bug in frobnicator.
+------------------------------------------------------------------------''',
+ '''------------------------------------------------------------------------
+r24 | chuck | 2007-04-30 10:18:01 -0500 (Mon, 16 Apr 2007) | 1 line
+
+Merge r12 and r14 from branch to trunk.
+------------------------------------------------------------------------
+r14 | bob | 2007-04-16 18:50:29 -0500 (Mon, 16 Apr 2007) | 1 line
+Merged via: r24
+
+Remove inadvertent changes to Death-Ray-o-Matic introduced in r12.
+------------------------------------------------------------------------
+r12 | alice | 2007-04-16 19:02:48 -0500 (Mon, 16 Apr 2007) | 1 line
+Merged via: r24
+
+Fix frapnalyzer bug in frobnicator.
+------------------------------------------------------------------------
+r10 | alice | 2007-04-16 19:02:28 -0500 (Mon, 16 Apr 2007) | 1 line
+Merged via: r12, r24
+
+Fix frapnalyzer documentation.
+------------------------------------------------------------------------
+r9 | bob | 2007-04-16 19:01:48 -0500 (Mon, 16 Apr 2007) | 1 line
+Merged via: r12, r24
+
+Whitespace fixes. No functional change.
+------------------------------------------------------------------------''',
+ '''------------------------------------------------------------------------
+r5 | kfogel | Tue 6 Nov 2001 17:18:19 | 1 line
+
+Log message for revision 5.
+------------------------------------------------------------------------
+r4 | kfogel | Tue 6 Nov 2001 17:18:18 | 3 lines
+
+Log message for revision 4
+but with multiple lines
+to test the code.
+------------------------------------------------------------------------
+r3 | kfogel | Tue 6 Nov 2001 17:18:17 | 1 line
+
+Log message for revision 3.
+------------------------------------------------------------------------''',
+ ] # end of log list
+
+ for log in logs:
+ log_chain = parse_log_output([line+"\n" for line in log.split("\n")])
+
+
+def check_merge_results(log_chain, expected_merges=None,
+ expected_reverse_merges=None):
+ '''Check LOG_CHAIN to see if the log information contains 'Merged via'
+ and/or 'Reverse Merged via' information indicated by EXPECTED_MERGES and
+ EXPECTED_REVERSE_MERGES respectively. EXPECTED_MERGES and
+ EXPECTED_REVERSE_MERGES are dictionaries whose keys are the merged
+ revisions, and whose values are the merging revisions.'''
+
+ # Check to see if the number and values of the revisions is correct
+ for log in log_chain:
+ if not ((expected_merges and log['revision'] in expected_merges)
+ or (expected_reverse_merges
+ and log['revision'] in expected_reverse_merges)):
+ raise SVNUnexpectedLogs("Found unexpected revision %d" %
+ log['revision'], log_chain)
+
+ # Check to see that each rev in expected_merges contains the correct data
+ if expected_merges:
+ for rev in expected_merges:
+ try:
+ log = [x for x in log_chain if x['revision'] == rev][0]
+ if 'merges' in log.keys():
+ actual = log['merges']
+ else:
+ actual = []
+ expected = expected_merges[rev]
+
+ if actual != expected:
+ raise SVNUnexpectedLogs(("Merging revisions in rev %d not " +
+ "correct; expecting %s, found %s") %
+ (rev, str(expected), str(actual)), log_chain)
+ except IndexError:
+ raise SVNUnexpectedLogs("Merged revision '%d' missing" % rev,
+ log_chain)
+
+ # Check to see that each rev in expected_merges contains the correct data
+ if expected_reverse_merges:
+ for rev in expected_reverse_merges:
+ try:
+ log = [x for x in log_chain if x['revision'] == rev][0]
+ if 'reverse_merges' in log.keys():
+ actual = log['reverse_merges']
+ else:
+ actual = []
+ expected = expected_reverse_merges[rev]
+
+ if actual != expected:
+ raise SVNUnexpectedLogs(("Reverse merging revisions in rev %d not " +
+ "correct; expecting %s, found %s") %
+ (rev, str(expected), str(actual)), log_chain)
+ except IndexError:
+ raise SVNUnexpectedLogs("Reverse merged revision '%d' missing" % rev,
+ log_chain)
+
+
+@SkipUnless(server_has_mergeinfo)
+def merge_sensitive_log_single_revision(sbox):
+ "test 'svn log -g' on a single revision"
+
+ merge_history_repos(sbox)
+
+ # Paths we care about
+ wc_dir = sbox.wc_dir
+ TRUNK_path = os.path.join(wc_dir, "trunk")
+ BRANCH_B_path = os.path.join(wc_dir, "branches", "b")
+
+ # Run the merge sensitive log, and compare results
+ saved_cwd = os.getcwd()
+
+ expected_merges = {
+ 14 : [],
+ 13 : [14],
+ 12 : [14],
+ 11 : [14, 12],
+ 10 : [14],
+ }
+ os.chdir(TRUNK_path)
+ # First try a single rev using -rN
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '-r14')
+
+
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+ # Then try a single rev using --limit 1
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '--limit', '1',
+ '-r14:1')
+
+
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+ os.chdir(saved_cwd)
+
+ expected_merges = {
+ 12 : [],
+ 11 : [12],
+ }
+ # First try a single rev using -rN
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '-r12',
+ BRANCH_B_path)
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '--limit', '1',
+ '-r12:1',
+ BRANCH_B_path)
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+
+
+@SkipUnless(server_has_mergeinfo)
+def merge_sensitive_log_branching_revision(sbox):
+ "test 'svn log -g' on a branching revision"
+
+ merge_history_repos(sbox)
+
+ # Paths we care about
+ wc_dir = sbox.wc_dir
+ BRANCH_B_path = os.path.join(wc_dir, "branches", "b")
+
+ # Run log on a copying revision
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '-r10',
+ BRANCH_B_path)
+
+ # Parse and check output. There should be no extra revisions.
+ log_chain = parse_log_output(output)
+ expected_merges = {
+ 10 : [],
+ }
+ check_merge_results(log_chain, expected_merges)
+
+
+@SkipUnless(server_has_mergeinfo)
+def merge_sensitive_log_non_branching_revision(sbox):
+ "test 'svn log -g' on a non-branching revision"
+
+ merge_history_repos(sbox)
+
+ TRUNK_path = os.path.join(sbox.wc_dir, "trunk")
+
+ # Run log on a non-copying revision that adds mergeinfo
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '-r6',
+ TRUNK_path)
+
+ # Parse and check output. There should be one extra revision.
+ log_chain = parse_log_output(output)
+ expected_merges = {
+ 6 : [],
+ 4 : [6],
+ 3 : [6],
+ }
+ check_merge_results(log_chain, expected_merges)
+
+
+@SkipUnless(server_has_mergeinfo)
+def merge_sensitive_log_added_path(sbox):
+ "test 'svn log -g' a path added before merge"
+
+ merge_history_repos(sbox)
+
+ XI_path = os.path.join(sbox.wc_dir, "trunk", "A", "xi")
+
+ # Run log on a non-copying revision that adds mergeinfo
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ XI_path)
+
+ # Parse and check output. There should be one extra revision.
+ log_chain = parse_log_output(output)
+ expected_merges = {
+ 14 : [],
+ 12 : [],
+ 11 : [],
+ }
+ check_merge_results(log_chain, expected_merges)
+
+ revprops = [{'svn:author': 'jconstant', 'svn:date': '',
+ 'svn:log': 'Merged branches/b to trunk.'},
+ {'svn:author': 'jconstant', 'svn:date': '',
+ 'svn:log': 'Merged branches/a to branches/b.'},
+ {'svn:author': 'jrandom', 'svn:date': '',
+ 'svn:log': "Added 'xi' to branches/a,"
+ ' made a few other changes.'}]
+ svntest.actions.run_and_verify_log_xml(expected_revprops=revprops,
+ args=['-g', XI_path])
+
+
+def log_single_change(sbox):
+ "test log -c for a single change"
+
+ guarantee_repos_and_wc(sbox)
+ repo_url = sbox.repo_url
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-c',
+ 4, repo_url)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [4])
+
+def log_changes_range(sbox):
+ "test log -c on range of changes"
+
+ guarantee_repos_and_wc(sbox)
+ repo_url = sbox.repo_url
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-c',
+ '2-5', repo_url)
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2, 3, 4, 5])
+
+def log_changes_list(sbox):
+ "test log -c on comma-separated list of changes"
+
+ guarantee_repos_and_wc(sbox)
+ repo_url = sbox.repo_url
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-c',
+ '2,5,7',
+ repo_url)
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2, 5, 7])
+
+def log_changes_complex(sbox):
+ "test log -c on complex set of ranges"
+
+ guarantee_repos_and_wc(sbox)
+ repo_url = sbox.repo_url
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-c',
+ '2,5-3,-8,6-7', repo_url)
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2, 5, 4, 3, 8, 6, 7])
+
+#----------------------------------------------------------------------
+def only_one_wc_path(sbox):
+ "svn log of two wc paths is disallowed"
+
+ sbox.build(read_only = True)
+ os.chdir(sbox.wc_dir)
+
+ svntest.actions.run_and_verify_log_xml(
+ expected_stderr=('.*When specifying working copy paths,'
+ ' only one target may be given'),
+ args=['A/mu', 'iota'])
+
+#----------------------------------------------------------------------
+def retrieve_revprops(sbox):
+ "test revprop retrieval"
+
+ sbox.build()
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # test properties
+ author = 'jrandom'
+ msg1 = 'Log message for revision 1.'
+ msg2 = 'Log message for revision 2.'
+ custom_name = 'retrieve_revprops'
+ custom_value = 'foo bar'
+
+ # Commit a change.
+ wc_dir = sbox.wc_dir
+ cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.main.file_append(os.path.join('A', 'D', 'H', 'omega'), "new otext")
+ os.chdir(cwd)
+ omega_path = os.path.join(wc_dir, 'A', 'D', 'H', 'omega')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/omega' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/omega', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ '-m', msg2,
+ omega_path)
+
+ os.chdir(wc_dir)
+
+ # Set custom property on r1 and r2.
+ svntest.actions.run_and_verify_svn(
+ None, [], # message, expected_stdout, expected_stderr
+ 'ps', '--revprop', '-r1', custom_name, custom_value, sbox.repo_url)
+ svntest.actions.run_and_verify_svn(
+ None, [], # message, expected_stdout, expected_stderr
+ 'ps', '--revprop', '-r2', custom_name, custom_value, sbox.repo_url)
+
+ # Can't set revprops with log.
+ svntest.actions.run_and_verify_log_xml(
+ expected_stderr=(".*cannot assign with 'with-revprop' option"
+ " \(drop the '='\)"),
+ args=['--with-revprop=foo=bar'])
+
+ # basic test without revprop options
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': author, 'svn:date': '', 'svn:log': msg1}],
+ args=['-r1'])
+
+ # basic test without revprop options, with multiple revisions
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': author, 'svn:date': '', 'svn:log': msg1},
+ {'svn:author': author, 'svn:date': '', 'svn:log': msg2}])
+
+ # -q with no revprop options must suppress svn:log only.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': author, 'svn:date': ''}],
+ args=['-q', '-r1'])
+
+ # Request svn:date, svn:log, and a non-existent property.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:date': '', 'svn:log': msg1}],
+ args=['-r1', '--with-revprop=svn:date', '--with-revprop', 'svn:log',
+ '--with-revprop', 'nosuchprop'])
+
+ # Get all revprops.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': author, 'svn:date': '',
+ 'svn:log': msg1, custom_name: custom_value}],
+ args=['-r1', '--with-all-revprops'])
+
+ # Get all revprops, with multiple revisions.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{'svn:author': author, 'svn:date': '',
+ 'svn:log': msg1, custom_name: custom_value},
+ {'svn:author': author, 'svn:date': '',
+ 'svn:log': msg2, custom_name: custom_value}],
+ args=['--with-all-revprops'])
+
+ # Get only the custom property.
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=[{custom_name: custom_value}],
+ args=['-r1', '--with-revprop', custom_name])
+
+
+@Issue(2866)
+def log_xml_with_bad_data(sbox):
+ "log --xml escapes non-utf8 data"
+ svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
+ 'log_tests_data',
+ 'xml-invalid-chars.dump'))
+ r0_props = {
+ 'svn:date' : '',
+ 'svn:log' : 'After the colon are a space, 3 bad chars, '
+ + '2 good chars, and a period: '
+ + '?\\021?\\022?\\017\t\n.' }
+ svntest.actions.run_and_verify_log_xml(
+ expected_revprops=(r0_props,), args=[sbox.repo_url])
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3172)
+def merge_sensitive_log_target_with_bogus_mergeinfo(sbox):
+ "'svn log -g target_with_bogus_mergeinfo'"
+ # A test for issue #3172 'svn log -g' seems to encounter error on server':
+ # 'log -g' fails the moment it encounters a bogus mergeinfo which claims a
+ # merge from a non-existentpath@REV1-REV2.
+ #
+ # ### Present test: test that 'svn log -g' does not report an error.
+ # ### Desirable test: test that 'svn log -g' produces the results expected
+ # from ignoring all such revisions and reporting on all revisions that
+ # are valid.
+
+ # In r2, create /A/B-copied as a copy of something that existed at r1, and
+ # /A/B-new as something new. Manually set mergeinfo on /A/C@2 saying it
+ # was merged from the non-existent r1 of /A/B-copied, and on /A/D@2 saying
+ # it was merged from the non-existent r1 of /A/B-new.
+ sbox.build()
+ wc_path = sbox.wc_dir
+ B_copied_path = os.path.join(wc_path, 'A', 'B-copied')
+ B_new_path = os.path.join(wc_path, 'A', 'B-new')
+ B_path = os.path.join(wc_path, 'A', 'B')
+ C_path = os.path.join(wc_path, 'A', 'C')
+ D_path = os.path.join(wc_path, 'A', 'D')
+ svntest.main.run_svn(None, 'cp', B_path, B_copied_path)
+ svntest.main.run_svn(None, 'ps', SVN_PROP_MERGEINFO, '/A/B-copied:1', C_path)
+ svntest.main.run_svn(None, 'mkdir', B_new_path)
+ svntest.main.run_svn(None, 'ps', SVN_PROP_MERGEINFO, '/A/B-new:1', D_path)
+ svntest.main.run_svn(None, 'ci', '-m', 'setting bogus mergeinfo', wc_path)
+
+ # The tests: Check that 'svn log -g' doesn't error on these.
+ svntest.actions.run_and_verify_svn(None, [], 'log', '-g', C_path)
+ svntest.actions.run_and_verify_svn(None, [], 'log', '-g', D_path)
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3235)
+def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox):
+ "log -g and explicit mergeinfo replacing inherited"
+
+ # Test that log -g reports the correct merged revisions when
+ # a merge results in added explicit mergeinfo on a path, but that
+ # path previously inherited mergeinfo (rather than had no explicit
+ # or inherited mergeinfo). See issue #3235, specifically
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=3235#desc8.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ D_COPY_path = os.path.join(wc_dir, "A_COPY", "D")
+ H_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H")
+
+ # Merge all available changes from 'A/D' to 'A_COPY/D' and commit as r7.
+ expected_output = wc.State(D_COPY_path, {
+ 'H/psi' : Item(status='U '),
+ 'G/rho' : Item(status='U '),
+ 'H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'G' : Item(status=' ', wc_rev=2),
+ 'G/pi' : Item(status=' ', wc_rev=2),
+ 'G/rho' : Item(status='M ', wc_rev=2),
+ 'G/tau' : Item(status=' ', wc_rev=2),
+ 'H' : Item(status=' ', wc_rev=2),
+ 'H/chi' : Item(status=' ', wc_rev=2),
+ 'H/psi' : Item(status='M ', wc_rev=2),
+ 'H/omega' : Item(status='M ', wc_rev=2),
+ 'gamma' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:2-6'}),
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("New content"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("New content"),
+ 'H/omega' : Item("New content"),
+ 'gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, None, None,
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Commit the merge.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY/D' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY/D',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/omega',
+ 'A_COPY/D/H/psi',
+ wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ wc_disk.tweak("A_COPY/D",
+ props={SVN_PROP_MERGEINFO : '/A/D:2-6'})
+ wc_disk.tweak("A_COPY/D/G/rho", "A_COPY/D/H/omega", "A_COPY/D/H/psi",
+ contents="New content")
+
+ # Reverse merge r3 from 'A/D/H' to 'A_COPY/D/H' and commit as r8.
+ # First update the wc so mergeinfo inheritance can occur. This is
+ # necessary so A_COPY/D/H 'knows' that r3 has been merged into it.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=7)
+ expected_output = wc.State(H_COPY_path, {
+ 'psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'psi' : Item(status='M ', wc_rev=7),
+ 'omega' : Item(status=' ', wc_rev=7),
+ 'chi' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2,4-6'}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, { })
+ svntest.actions.run_and_verify_merge(H_COPY_path, '3', '2',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Commit the merge.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY/D/H',
+ 'A_COPY/D/H/psi',
+ wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ wc_disk.tweak("A_COPY/D/H",
+ props={SVN_PROP_MERGEINFO : '/A/D:2,4-6'})
+ wc_disk.tweak("A_COPY/D/G/rho", "A_COPY/D/H/omega", "A_COPY/D/H/psi",
+ contents="New content")
+
+ # Check that outputs of,
+ #
+ # log -g -r8 wc_dir
+ # log -g -r8 wc_dir/A_COPY
+ # log -g -r8 wc_dir/A_COPY/D
+ # log -g -r8 wc_dir/A_COPY/D/H
+ # log -g -r8 wc_dir/A_COPY/D/H/psi
+ #
+ # all show that r3 was merged via r8.
+
+ def run_log_g_r8(log_target):
+ expected_merges = {
+ 8 : []}
+ expected_reverse_merges = {
+ 3 : [8]}
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None,
+ [],
+ 'log', '-g',
+ '-r8',
+ log_target)
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges, expected_reverse_merges)
+
+ run_log_g_r8(wc_dir)
+ run_log_g_r8(os.path.join(wc_dir, "A_COPY"))
+ run_log_g_r8(os.path.join(wc_dir, "A_COPY", "D"))
+ run_log_g_r8(os.path.join(wc_dir, "A_COPY", "D", "H"))
+ run_log_g_r8(os.path.join(wc_dir, "A_COPY", "D", "H", "psi"))
+
+#----------------------------------------------------------------------
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3285)
+def merge_sensitive_log_propmod_merge_inheriting_path(sbox):
+ "log -g and simple propmod to merge-inheriting path"
+
+ # Issue #3285 (http://subversion.tigris.org/issues/show_bug.cgi?id=3285)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ A_path = os.path.join(wc_dir, 'A')
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ A_COPY_psi_path = os.path.join(wc_dir, 'A_COPY', 'D', 'H', 'psi')
+
+ # Merge the post-copy changes to A into A_COPY
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-r2:6', A_path, A_COPY_path)
+ svntest.main.run_svn(None, 'ci', '-m', 'Merge changes from A.', wc_dir)
+
+ # Now, tweak a non-mergeinfo property on A_COPY.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'propset', 'foo', 'bar', A_COPY_psi_path)
+ svntest.main.run_svn(None, 'ci', '-m',
+ 'Set property "foo" to "bar" on A_COPY/D/H/psi', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Check that log -g -r7 on wc_dir/A_COPY and parents show merges of r3-r6.
+ def run_log_g_r7(log_target):
+ expected_merges = {
+ 7 : [],
+ 6 : [7],
+ 5 : [7],
+ 4 : [7],
+ 3 : [7],
+ }
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-g', '-r7', log_target)
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+ run_log_g_r7(wc_dir)
+ run_log_g_r7(A_COPY_path)
+
+ # Check that log -g -r8 on wc_dir/A_COPY/D/H/psi and parents show no merges.
+ def run_log_g_r8(log_target):
+ expected_merges = { 8 : [] }
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-g', '-r8', log_target)
+ log_chain = parse_log_output(output)
+ check_merge_results(log_chain, expected_merges)
+ run_log_g_r8(wc_dir)
+ run_log_g_r8(A_COPY_path)
+ run_log_g_r8(A_COPY_psi_path)
+
+#----------------------------------------------------------------------
+# Should be able to run 'svn log' against an uncommitted copy or move
+# destination. See http://svn.haxx.se/dev/archive-2010-01/0492.shtml.
+def log_of_local_copy(sbox):
+ "svn log on an uncommitted copy"
+
+ guarantee_repos_and_wc(sbox)
+
+ C_path = os.path.join(sbox.wc_dir, "A", "C")
+ C_moved_path = os.path.join(sbox.wc_dir, "A", "C_MOVED")
+ psi_path = os.path.join(sbox.wc_dir, "A", "D", "H", "psi")
+ psi_moved_path = os.path.join(sbox.wc_dir, "A", "D", "H", "psi_moved")
+
+ # Get the logs for a directory and a file.
+ exit_code, C_log_out, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-v', C_path)
+ exit_code, psi_log_out, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-v', psi_path)
+
+ # Move that directory and file.
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ C_path, C_moved_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ psi_path, psi_moved_path)
+
+ # Get the logs for the move destinations.
+ #
+ # This was failing with:
+ #
+ # svn log -v log_tests-29\A\C_MOVED
+ # ..\..\..\subversion\svn\log-cmd.c:600: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\log.c:627: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\log.c:1449: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\log.c:1092: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:2818: (apr_err=160013)
+ # svn: File not found: revision 9, path '/A/C_MOVED'
+ #
+ exit_code, C_moved_log_out, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-v', C_moved_path)
+ exit_code, psi_moved_log_out, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-v', psi_moved_path)
+
+ # The logs of the move source and destinations should be the same.
+ if C_log_out != C_moved_log_out:
+ raise svntest.Failure("Log on uncommitted move destination '%s' " \
+ "differs from that on move source '%s'"
+ % (C_moved_path, C_path))
+ if psi_log_out != psi_moved_log_out:
+ raise svntest.Failure("Log on uncommitted move destination '%s' " \
+ "differs from that on move source '%s'"
+ % (psi_moved_path, psi_path))
+
+#----------------------------------------------------------------------
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3176)
+def merge_sensitive_log_reverse_merges(sbox):
+ "log -g differentiates forward and reverse merges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ A_path = os.path.join(wc_dir, 'A')
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ D_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D')
+
+ # Merge -c3,5 from A to A_COPY, commit as r7
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-c3,5', A_path, A_COPY_path)
+ sbox.simple_commit(message='Merge -c3,5 from A to A_COPY')
+
+ # Merge -c-3,-5,4,6 from A to A_COPY, commit as r8
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-c-3,4,-5,6', A_path, A_COPY_path)
+ sbox.simple_commit(message='Merge -c-3,-5,4,6 from A to A_COPY')
+
+ # Update so
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Run log -g on path with explicit mergeinfo (A_COPY).
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g', '-r8',
+ A_COPY_path)
+ log_chain = parse_log_output(out)
+ expected_merges = {
+ 8 : [],
+ 6 : [8],
+ 4 : [8],
+ }
+ expected_reverse_merges = {
+ 5 : [8],
+ 3 : [8],
+ }
+ check_merge_results(log_chain, expected_merges, expected_reverse_merges)
+
+ # Run log -g on path with inherited mergeinfo (A_COPY/D).
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g', '-r8',
+ D_COPY_path)
+ log_chain = parse_log_output(out)
+ # expected_merges is the same as before.
+ expected_reverse_merges = {
+ # 5 : [8], r5 only affects A_COPY/B/E/beta
+ 3 : [8],
+ }
+ check_merge_results(log_chain, expected_merges, expected_reverse_merges)
+
+#----------------------------------------------------------------------
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3650)
+def merge_sensitive_log_ignores_cyclic_merges(sbox):
+ "log -g should ignore cyclic merges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ A_path = os.path.join(wc_dir, 'A')
+ X_path = os.path.join(wc_dir, 'A', 'C', 'X')
+ kappa_path = os.path.join(wc_dir, 'A', 'C', 'X', 'kappa')
+ chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi')
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ mu_COPY_path = os.path.join(wc_dir, 'A_COPY', 'mu')
+ tau_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D', 'G', 'tau')
+ Z_COPY_path = os.path.join(wc_dir, 'A_COPY', 'C', 'Z')
+ nu_COPY_path = os.path.join(wc_dir, 'A_COPY', 'C', 'Z', 'nu')
+
+ # Make an edit on the "branch" to A_COPY/mu, commit as r7.
+ svntest.main.file_write(mu_COPY_path, "Branch edit.\n")
+ sbox.simple_commit(message='Branch edit')
+
+ # Make an edit on both the "trunk" and the "branch", commit as r8.
+ svntest.main.file_write(chi_path, "Trunk edit.\n")
+ svntest.main.file_write(tau_COPY_path, "Branch edit.\n")
+ sbox.simple_commit(message='Branch and trunk edits in one rev')
+
+ # Sync merge A to A_COPY, commit as r9
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit(message='Sync merge A to A_COPY')
+
+ # Reintegrate A_COPY to A, commit as r10
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '--reintegrate',
+ sbox.repo_url + '/A_COPY', A_path)
+ sbox.simple_commit(message='Reintegrate A_COPY to A')
+
+ # Do a --record-only merge of r10 from A to A_COPY, commit as r11.
+ # This will allow us to continue using the branch without deleting it.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit(message='--record-only merge r10 from A to A_COPY')
+
+ # Make an edit on the "branch"; add A_COPY/C and A_COPY/C/Z/nu,
+ # commit as r12.
+ svntest.main.run_svn(None, 'mkdir', Z_COPY_path)
+ svntest.main.file_write(nu_COPY_path, "A new branch file.\n")
+ svntest.main.run_svn(None, 'add', nu_COPY_path)
+ sbox.simple_commit(message='Branch edit: Add a subtree')
+
+ # Make an edit on the "trunk"; add A/C/X and A/C/X/kappa,
+ # commit as r13.
+ svntest.main.run_svn(None, 'mkdir', X_path)
+ svntest.main.file_write(kappa_path, "A new trunk file.\n")
+ svntest.main.run_svn(None, 'add', kappa_path)
+ sbox.simple_commit(message='Trunk edit: Add a subtree')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Sync merge A to A_COPY, commit as r14
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit(message='Sync merge A to A_COPY')
+
+ # Reintegrate A_COPY to A, commit as r15
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '--reintegrate',
+ sbox.repo_url + '/A_COPY', A_path)
+ sbox.simple_commit(message='2nd reintegrate of A_COPY to A')
+
+ # Run 'svn log -g A'. We expect to see r13, r10, r6, r5, r4, and r3 only
+ # once, as part of A's own history, not as merged in from A_COPY.
+ expected_merges = {
+ 15 : [],
+ 14 : [15],
+ 13 : [],
+ 12 : [15],
+ 11 : [15],
+ 10 : [],
+ 9 : [15,11],
+ 8 : [15,11,9],
+ 7 : [15,11],
+ 6 : [],
+ 5 : [],
+ 4 : [],
+ 3 : [],
+ 2 : [15,11],
+ 1 : [],
+ }
+ svntest.main.run_svn(None, 'up', wc_dir)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ A_path)
+ log_chain = parse_log_output(out)
+ check_merge_results(log_chain, expected_merges)
+
+#----------------------------------------------------------------------
+@Issue(3931,3936)
+def log_with_unrelated_peg_and_operative_revs(sbox):
+ "log with unrelated peg and operative rev targets"
+
+ guarantee_repos_and_wc(sbox)
+
+ target = sbox.repo_url + '/A/D/G/rho@2'
+
+ # log for /A/D/G/rho, deleted in revision 5, recreated in revision 8
+ expected_error = ".*(File|path) not found.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', '6:7', target)
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', '7:6', target)
+
+ expected_error = ".*Unable to find repository location for.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', '2:9', target)
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', '9:2', target)
+
+ expected_error = ".*Unable to find repository location for.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', '2:HEAD', target)
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-r', 'HEAD:2', target)
+
+#----------------------------------------------------------------------
+@Issue(3937)
+def log_on_nonexistent_path_and_valid_rev(sbox):
+ "log on nonexistent path does not error out"
+
+ sbox.build(create_wc=False)
+ real_path_real_rev = sbox.repo_url + '/A@1'
+ real_path_bad_rev = sbox.repo_url + '/A@99'
+ bad_url_bad_rev = sbox.repo_url + '/Z@99'
+ bad_path_real_rev = sbox.repo_url + '/Z@1'
+ bad_path_default_rev = sbox.repo_url + '/Z'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-q', real_path_real_rev)
+
+ expected_error = ".*No such revision 99*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-q', real_path_bad_rev)
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-q', bad_url_bad_rev)
+
+ expected_error = ".*not found.*"
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-q', bad_path_real_rev)
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'log', '-q', bad_path_default_rev)
+
+#----------------------------------------------------------------------
+# Test for issue #4022 'svn log -g interprets change in inherited mergeinfo
+# due to move as a merge'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4022)
+def merge_sensitive_log_copied_path_inherited_mergeinfo(sbox):
+ "log -g on copied path with inherited mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, branch_only=True)
+
+ A_path = os.path.join(wc_dir, 'A')
+ gamma_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D', 'gamma')
+ old_gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
+ new_gamma_path = os.path.join(wc_dir, 'A', 'C', 'gamma')
+
+ # r3 - Modify a file (A_COPY/D/gamma) on the branch
+ svntest.main.file_write(gamma_COPY_path, "Branch edit.\n")
+ sbox.simple_commit(message='Branch edit')
+
+ # r4 - Reintegrate A_COPY to A
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '--reintegrate',
+ sbox.repo_url + '/A_COPY', A_path)
+ sbox.simple_commit(message='Reintegrate A_COPY to A')
+
+ # r5 - Move file modified by reintegrate (A/D/gamma to A/C/gamma).
+ svntest.main.run_svn(None, 'move', old_gamma_path, new_gamma_path)
+ sbox.simple_commit(message='Move file')
+
+ # 'svn log -g --stop-on-copy ^/A/C/gamma' hould return *only* r5
+ # Previously this test failed because the change in gamma's inherited
+ # mergeinfo between r4 and r5, due to the move, was understood as a merge:
+ #
+ # >svn log -v -g --stop-on-copy ^^/A/C/gamma
+ # ------------------------------------------------------------------------
+ # r5 | jrandom | 2011-10-11 14:37:57 -0700 (Tue, 11 Oct 2011) | 1 line #
+ # Changed paths:
+ # A /A/C/gamma (from /A/D/gamma:4)
+ # D /A/D/gamma
+ #
+ # Move file
+ # ------------------------------------------------------------------------
+ # r3 | jrandom | 2011-10-11 14:37:56 -0700 (Tue, 11 Oct 2011) | 1 line
+ # Changed paths:
+ # M /A_COPY/D/gamma
+ # Reverse merged via: r5
+ #
+ # Branch edit
+ # ------------------------------------------------------------------------
+ # r2 | jrandom | 2011-10-11 14:37:56 -0700 (Tue, 11 Oct 2011) | 1 line
+ # Changed paths:
+ # A /A_COPY (from /A:1)
+ # Reverse merged via: r5
+ #
+ # log msg
+ # ------------------------------------------------------------------------
+ expected_merges = {5 : []}
+ svntest.main.run_svn(None, 'up', wc_dir)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-g', '--stop-on-copy',
+ sbox.repo_url + '/A/C/gamma')
+ log_chain = parse_log_output(out)
+ check_merge_results(log_chain, expected_merges)
+
+#----------------------------------------------------------------------
+def log_diff(sbox):
+ "'svn log --diff'"
+
+ guarantee_repos_and_wc(sbox)
+
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff')
+ os.chdir(was_cwd)
+
+ for line in output:
+ if line.startswith('Index:'):
+ break
+ else:
+ raise SVNLogParseError("no diffs found in log output")
+
+ # After a copy, a log of the copy destination used to fail because the
+ # diff tried to use the head-revision URL with the old revision numbers
+ # without using the correct peg revision.
+
+ sbox.simple_copy('A', 'A2')
+ sbox.simple_commit()
+
+ os.chdir(sbox.wc_dir)
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff',
+ '-r10:8', 'A2')
+ os.chdir(was_cwd)
+
+ r9diff = [ make_no_diff_deleted_header('A2/B/E/alpha', 8, 9),
+ make_diff_header('A2/B/E/beta', 'revision 8', 'revision 9')
+ + [ "@@ -1 +1,2 @@\n",
+ " This is the file 'beta'.\n",
+ "+9\n",
+ "\ No newline at end of file\n",
+ ]
+ ]
+ r8diff = [ make_diff_header('A2/D/G/rho', 'nonexistent', 'revision 8')
+ + [ "@@ -0,0 +1 @@\n",
+ "+88\n",
+ "\ No newline at end of file\n",
+ ]
+ ]
+ log_chain = parse_log_output(output, with_diffs=True)
+ if len(log_chain) != 3:
+ raise SVNLogParseError("%d logs found, 3 expected" % len(log_chain))
+ compare_diff_output(r9diff, log_chain[1]['diff_lines'])
+ compare_diff_output(r8diff, log_chain[2]['diff_lines'])
+
+@Skip(svntest.main.is_fs_type_fsx)
+def log_xml_old(sbox):
+ "log --xml shows kind for old style repository"
+
+ sbox.build(minor_version=5)
+
+ sbox.simple_copy('A/B', 'A/B2')
+ sbox.simple_rm('A/B/lambda', 'A/B/E', 'A/B2/lambda', 'A/B2/E')
+ sbox.simple_commit()
+
+ os.chdir(sbox.wc_dir)
+ paths=[{'/A/B/E' : [{'kind':'dir', 'action':'D'}],
+ '/A/B/lambda' : [{'kind':'file', 'action':'D'}],
+ '/A/B2' : [{'kind':'dir', 'action':'A'}],
+ '/A/B2/E' : [{'kind':'dir', 'action':'D'}],
+ '/A/B2/lambda' : [{'kind':'file', 'action':'D'}]}]
+ svntest.actions.run_and_verify_log_xml(args=['-r', '2', '-v'],
+ expected_paths=paths)
+
+
+@Issue(4153)
+def log_diff_moved(sbox):
+ "log --diff on moved file"
+
+ sbox.build()
+
+ sbox.simple_move('A/mu', 'A/mu2')
+ svntest.main.file_append(sbox.ospath('A/mu2'), "now mu2\n")
+ sbox.simple_commit()
+ sbox.simple_move('A/mu2', 'A/mu3')
+ svntest.main.file_append(sbox.ospath('A/mu3'), "now mu3\n")
+ sbox.simple_commit()
+
+ mu_at_1 = sbox.repo_url + '/A/mu@1'
+ mu3_at_3 = sbox.repo_url + '/A/mu3@3'
+
+ r1diff = [make_diff_header('mu', 'nonexistent', 'revision 1')
+ + ["@@ -0,0 +1 @@\n",
+ "+This is the file 'mu'.\n"]]
+
+ # The mu2@2 and mu3@3 diffs show diffs relative to the copy source
+ r2diff = [make_diff_header('mu',
+ '.../mu)\t(revision 1',
+ '.../mu2)\t(revision 2')
+ + ["@@ -1 +1,2 @@\n",
+ " This is the file 'mu'.\n",
+ "+now mu2\n"]]
+
+ r3diff = [make_diff_header('mu2',
+ '.../mu2)\t(revision 2',
+ '.../mu3)\t(revision 3')
+ + ["@@ -1,2 +1,3 @@\n",
+ " This is the file 'mu'.\n",
+ " now mu2\n",
+ "+now mu3\n"]]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff',
+ mu_at_1)
+ log_chain = parse_log_output(output, with_diffs=True)
+ if len(log_chain) != 1:
+ raise SVNLogParseError("%d logs found, 1 expected" % len(log_chain))
+ compare_diff_output(r1diff, log_chain[0]['diff_lines'])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff',
+ '-r3', mu3_at_3)
+ log_chain = parse_log_output(output, with_diffs=True)
+ if len(log_chain) != 1:
+ raise SVNLogParseError("%d logs found, 1 expected" % len(log_chain))
+ compare_diff_output(r3diff, log_chain[0]['diff_lines'])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff',
+ '-r3:2', mu3_at_3)
+ log_chain = parse_log_output(output, with_diffs=True)
+ if len(log_chain) != 2:
+ raise SVNLogParseError("%d logs found, 2 expected" % len(log_chain))
+ compare_diff_output(r3diff, log_chain[0]['diff_lines'])
+ compare_diff_output(r2diff, log_chain[1]['diff_lines'])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '--diff',
+ mu3_at_3)
+ log_chain = parse_log_output(output, with_diffs=True)
+ if len(log_chain) != 3:
+ raise SVNLogParseError("%d logs found, 3 expected" % len(log_chain))
+ compare_diff_output(r3diff, log_chain[0]['diff_lines'])
+ compare_diff_output(r2diff, log_chain[1]['diff_lines'])
+ compare_diff_output(r1diff, log_chain[2]['diff_lines'])
+
+
+#----------------------------------------------------------------------
+def log_search(sbox):
+ "'svn log --search'"
+
+ guarantee_repos_and_wc(sbox)
+
+ os.chdir(sbox.wc_dir)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--search',
+ 'for revision [367]')
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [7, 6, 3])
+
+ # search is case-insensitive
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--search',
+ 'FOR REVISION [367]')
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [7, 6, 3])
+
+ # multi-pattern search
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log',
+ '--search', 'for revision 3',
+ '--search', 'for revision 6',
+ '--search', 'for revision 7')
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [7, 6, 3])
+
+ # combined pattern search
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--verbose',
+ '--search', 'for revision 8',
+ '--search-and', 'test the code',
+ '--search', 'for revision 7',
+ '--search-and', 'this won\'t match ',
+ '--search', 'psi',
+ '--search-and', 'multiple lines',
+ '--search-and', 'revision 6') # don't match r4
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [8, 6])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '--verbose',
+ '--search', 'for revision 8',
+ '--search-and', 'this won\'t match ',
+ '--search', 'for revision 7',
+ '--search', 'psi',
+ '--search-and', 'multiple lines',
+ '--search-and', 'revision 4') # don't match r6
+
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [7, 4])
+
+
+@SkipUnless(server_has_mergeinfo)
+def merge_sensitive_log_with_search(sbox):
+ "test 'svn log -g --search'"
+
+ merge_history_repos(sbox)
+
+ TRUNK_path = os.path.join(sbox.wc_dir, "trunk")
+
+ # Run log -g on a non-copying revision that adds mergeinfo,
+ # and perform a search that only matches the merged revision
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'log', '-g',
+ '-r6',
+ '--search',
+ 'upsilon',
+ TRUNK_path)
+
+ # Parse and check output. The only revision should be r4 (the merge rev).
+ log_chain = parse_log_output(output)
+ expected_merges = {
+ 4 : [6],
+ }
+ check_merge_results(log_chain, expected_merges)
+
+# Helper function for a few tests
+def create_renaming_history_repos(sbox):
+ "create a repository containing renames and a suitable working copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ msg_file=os.path.join(sbox.repo_dir, 'log-msg')
+ msg_file=os.path.abspath(msg_file)
+ mu_path1 = os.path.join(wc_dir, 'A', 'mu')
+ mu_path2 = os.path.join(wc_dir, 'trunk', 'mu')
+
+ # r2 - Change a file.
+ msg=""" Log message for revision 2
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.file_append(mu_path1, "2")
+ svntest.main.run_svn(None, 'ci', '-F', msg_file, wc_dir)
+
+ # r3 - Rename that file's parent.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ sbox.simple_move('A', 'trunk')
+ svntest.main.run_svn(None, 'ci', '-m', "Log message for revision 3",
+ wc_dir)
+
+ # r4 - Change the file again.
+ msg=""" Log message for revision 4
+ but with multiple lines
+ to test the code"""
+ svntest.main.file_write(msg_file, msg)
+ svntest.main.file_append(mu_path2, "4")
+ svntest.main.run_svn(None, 'ci', '-F', msg_file, wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r5 - Cyclic exchange.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ sbox.simple_move(os.path.join('trunk', 'D'), os.path.join('trunk', 'X'))
+ sbox.simple_move(os.path.join('trunk', 'C'), os.path.join('trunk', 'D'))
+ sbox.simple_move(os.path.join('trunk', 'X'), os.path.join('trunk', 'C'))
+ svntest.main.run_svn(None, 'ci', '-m', "Log message for revision 5",
+ wc_dir)
+
+
+#----------------------------------------------------------------------
+# Test for issue #4355 'svn_client_log5 broken with multiple revisions
+# which span a rename'.
+@Issue(4355)
+@SkipUnless(server_has_mergeinfo)
+def log_multiple_revs_spanning_rename(sbox):
+ "log for multiple revs which span a rename"
+
+ trunk_path = sbox.ospath('trunk')
+
+ create_renaming_history_repos(sbox)
+
+ # Check that log can handle a revision range that spans a rename.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r2:4', sbox.repo_url + '/trunk/mu')
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2,3,4])
+
+ # Check that log can handle discrete revisions that don't span a rename.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-c3,4', sbox.repo_url + '/trunk/mu')
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [3,4])
+
+ # Check that log can handle discrete revisions that span a rename.
+ #
+ # Previously this failed with:
+ #
+ # >svn log ^/trunk -c2,3,1
+ # ------------------------------------------------------------------------
+ # r2 | jrandom | 2013-04-18 19:58:47 -0400 (Thu, 18 Apr 2013) | 3 lines
+ #
+ # Log message for revision 2
+ # but with multiple lines
+ # to test the code
+ # ------------------------------------------------------------------------
+ # r3 | jrandom | 2013-04-18 19:58:47 -0400 (Thu, 18 Apr 2013) | 1 line
+ #
+ # Log message for revision 3
+ # ..\..\..\subversion\svn\log-cmd.c:868,
+ # ..\..\..\subversion\libsvn_client\log.c:641,
+ # ..\..\..\subversion\libsvn_repos\log.c:1931,
+ # ..\..\..\subversion\libsvn_repos\log.c:1358,
+ # ..\..\..\subversion\libsvn_fs\fs-loader.c:979,
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:3205:
+ # (apr_err=SVN_ERR_FS_NOT_FOUND)
+ # svn: E160013: File not found: revision 1, path '/trunk'
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-c2,3,1', sbox.repo_url + '/trunk/mu')
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2,3,1])
+
+ mu_path2 = sbox.ospath('trunk/mu')
+
+ # Should work with a WC target too.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-c2,3,1', mu_path2)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [2,3,1])
+
+ # Discreet revision *ranges* which span a rename should work too.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r1', '-r4:2', sbox.repo_url + '/trunk')
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [1,4,3,2])
+
+ # As above, but revision ranges from younger to older. Previously this
+ # failed with:
+ #
+ # >svn log ^/trunk -r1:1 -r2:4
+ # ------------------------------------------------------------------------
+ # r1 | jrandom | 2013-04-18 19:58:46 -0400 (Thu, 18 Apr 2013) | 1 line
+ #
+ # Log message for revision 1.
+ # ..\..\..\subversion\svn\log-cmd.c:868,
+ # ..\..\..\subversion\libsvn_client\log.c:678,
+ # ..\..\..\subversion\libsvn_repos\log.c:1931,
+ # ..\..\..\subversion\libsvn_repos\log.c:1358,
+ # ..\..\..\subversion\libsvn_fs\fs-loader.c:979,
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:3205:
+ # (apr_err=SVN_ERR_FS_NOT_FOUND)
+ # svn: E160013: File not found: revision 4, path '/A'
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r1', '-r2:4', sbox.repo_url + '/trunk')
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [1,2,3,4])
+
+ # Discrete revs with WC-only opt revs shouldn't cause any problems.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r1', '-rPREV', trunk_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [1,3])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r1', '-rCOMMITTED', trunk_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [1,4])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'log', '-r1', '-rBASE', trunk_path)
+ log_chain = parse_log_output(output)
+ check_log_chain(log_chain, [1,4])
+
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_log(sbox):
+ "'mergeinfo --log' on a path with mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # make a branch 'A2'
+ sbox.simple_repo_copy('A', 'A2') # r2
+ # make a change in branch 'A'
+ sbox.simple_mkdir('A/newdir')
+ sbox.simple_commit(message="Log message for revision 3.") # r3
+ sbox.simple_update()
+
+ # Dummy up some mergeinfo.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', SVN_PROP_MERGEINFO, '/A:3',
+ sbox.ospath('A2'))
+
+ # test --log
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'mergeinfo', '--show-revs=merged',
+ '--log', sbox.repo_url + '/A',
+ sbox.ospath('A2'))
+ check_log_chain(parse_log_output(output), [3])
+
+ # test --log -v
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'mergeinfo', '--show-revs=merged',
+ '--log', '-v', sbox.repo_url + '/A',
+ sbox.ospath('A2'))
+ check_log_chain(parse_log_output(output), [3], [1])
+
+ # test --log -q
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mergeinfo', '--show-revs=merged',
+ '--log', '-q', sbox.repo_url + '/A',
+ sbox.ospath('A2'))
+ # TODO: Validate the output, the check_log_chain() function assumes it
+ # gets the output of the message
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4463)
+def merge_sensitive_log_xml_reverse_merges(sbox):
+ "log -g --xml differentiates forward/reverse merges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ A_path = os.path.join(wc_dir, 'A')
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ D_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D')
+
+ # Merge -c3,5 from A to A_COPY, commit as r7
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-c3,5', A_path, A_COPY_path)
+ sbox.simple_commit(message='Merge -c3,5 from A to A_COPY')
+
+ # Merge -c-3,-5,4,6 from A to A_COPY, commit as r8
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', '-c-3,4,-5,6', A_path, A_COPY_path)
+ sbox.simple_commit(message='Merge -c-3,-5,4,6 from A to A_COPY')
+
+ # Update so
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Run log -g --xml on path with explicit mergeinfo (A_COPY).
+ log_attrs = [
+ {
+ u'revision': u'8',
+ },
+ {
+ u'revision': u'6',
+ u'reverse-merge': u'false',
+ },
+ {
+ u'revision': u'5',
+ u'reverse-merge': u'true',
+ },
+ {
+ u'revision': u'4',
+ u'reverse-merge': u'false',
+ },
+ {
+ u'revision': u'3',
+ u'reverse-merge': u'true',
+ }]
+ svntest.actions.run_and_verify_log_xml(expected_log_attrs=log_attrs,
+ args=['-g', '-r8', A_COPY_path])
+
+ # Run log -g --xml on path with inherited mergeinfo (A_COPY/D).
+ # r5 only affects A_COPY/B/E/beta so not listed
+ log_attrs = [
+ {
+ u'revision': u'8',
+ },
+ {
+ u'revision': u'6',
+ u'reverse-merge': u'false',
+ },
+ {
+ u'revision': u'4',
+ u'reverse-merge': u'false',
+ },
+ {
+ u'revision': u'3',
+ u'reverse-merge': u'true',
+ }]
+ svntest.actions.run_and_verify_log_xml(expected_log_attrs=log_attrs,
+ args=['-g', '-r8', D_COPY_path])
+
+def log_revision_move_copy(sbox):
+ "log revision handling over move/copy"
+
+ sbox.build()
+
+ sbox.simple_move('iota', 'iotb')
+ sbox.simple_append('iotb', 'new line\n')
+
+ sbox.simple_copy('A/mu', 'mutb')
+ sbox.simple_append('mutb', 'mutb\n')
+
+ sbox.simple_move('A/B/E', 'E')
+ sbox.simple_move('E/alpha', 'alpha')
+
+ #r2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/A/D', '-mm')
+
+ sbox.simple_commit() #r3
+
+ # This introduces a copy and a move in r3, but check how the history
+ # of these nodes behaves in r2.
+
+ # This one might change behavior once we improve move handling
+ expected_output = [
+ '------------------------------------------------------------------------\n'
+ ]
+ expected_err = []
+ svntest.actions.run_and_verify_svn(expected_output, expected_err,
+ 'log', '-v',sbox.ospath('iotb'),
+ '-r2')
+
+ # While this one
+ expected_output = []
+ expected_err = '.*E195012: Unable to find repository location.*'
+ svntest.actions.run_and_verify_svn(expected_output, expected_err,
+ 'log', '-v', sbox.ospath('mutb'),
+ '-r2')
+
+ # And just for fun, do the same thing for blame
+ expected_output = [
+ ' 1 jrandom This is the file \'iota\'.\n'
+ ]
+ expected_err = []
+ svntest.actions.run_and_verify_svn(expected_output, expected_err,
+ 'blame', sbox.ospath('iotb'),
+ '-r2')
+
+ expected_output = None
+ expected_err = '.*E195012: Unable to find repository location.*'
+ svntest.actions.run_and_verify_svn(expected_output, expected_err,
+ 'blame', sbox.ospath('mutb'),
+ '-r2')
+
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r3\ .*\n',
+ re.escape('Changed paths:\n'),
+ re.escape(' D /A/B/E\n'),
+ re.escape(' A /E (from /A/B/E:2)\n'), # Patched - Direct move
+ re.escape(' D /E/alpha\n'),
+ re.escape(' A /alpha (from /A/B/E/alpha:1)\n'), # Indirect move - Not patched
+ re.escape(' D /iota\n'),
+ re.escape(' A /iotb (from /iota:2)\n'), # Patched - Direct move
+ re.escape(' A /mutb (from /A/mu:1)\n'), # Copy (always r1)
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox.wc_dir,
+ '-c3')
+
+def log_on_deleted_deep(sbox):
+ "log on deleted deep path"
+
+ sbox.build()
+ sbox.simple_propset('k', 'v', 'A/B/E/alpha')
+ sbox.simple_commit() #r2
+ sbox.simple_propset('k', 'v', 'A/B/E/beta')
+ sbox.simple_commit() #r3
+
+ sbox.simple_update() # Or commit fails
+ sbox.simple_move('A/B', 'B')
+ sbox.simple_commit() #r4
+
+ expected_output = svntest.verify.RegexListOutput([
+ r'-+\n',
+ r'r1 .*\n',
+ r'-+\n',
+ r'r2 .*\n',
+ r'-+\n',
+ r'r2 .*\n',
+ r'-+\n',
+ r'r3 .*\n',
+ r'-+\n',
+ ])
+ # In deleted location
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ '-q', '-c', '1-2,2-3')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ 'alpha', 'beta',
+ '-q', '-c', '1-2,2-3')
+
+ # In new location
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E',
+ '-q', '-c', '1-2,2-3')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E@4',
+ 'alpha', 'beta',
+ '-q', '-c', '1-2,2-3')
+
+ expected_output = svntest.verify.RegexListOutput([
+ r'-+\n',
+ r'r1 .*\n',
+ r'-+\n',
+ r'r2 .*\n',
+ r'-+\n',
+ ])
+ # 2 ranges
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ 'alpha',
+ '-q', '-c', '1,2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E',
+ 'alpha',
+ '-q', '-c', '1,2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ '',
+ '-q', '-c', '1,2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E',
+ '',
+ '-q', '-c', '1,2')
+
+ # 1 range
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ 'alpha',
+ '-q', '-c', '1-2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E',
+ 'alpha',
+ '-q', '-c', '1-2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/A/B/E@3',
+ '',
+ '-q', '-c', '1-2')
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', sbox.repo_url + '/B/E',
+ '',
+ '-q', '-c', '1-2')
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ plain_log,
+ log_with_empty_repos,
+ log_where_nothing_changed,
+ log_to_revision_zero,
+ dynamic_revision,
+ log_with_path_args,
+ log_wc_with_peg_revision,
+ url_missing_in_head,
+ log_through_copyfrom_history,
+ escape_control_chars,
+ log_xml_empty_date,
+ log_limit,
+ log_base_peg,
+ log_verbose,
+ log_parser,
+ merge_sensitive_log_single_revision,
+ merge_sensitive_log_branching_revision,
+ merge_sensitive_log_non_branching_revision,
+ merge_sensitive_log_added_path,
+ log_single_change,
+ log_changes_range,
+ log_changes_list,
+ log_changes_complex,
+ only_one_wc_path,
+ retrieve_revprops,
+ log_xml_with_bad_data,
+ merge_sensitive_log_target_with_bogus_mergeinfo,
+ merge_sensitive_log_added_mergeinfo_replaces_inherited,
+ merge_sensitive_log_propmod_merge_inheriting_path,
+ log_of_local_copy,
+ merge_sensitive_log_reverse_merges,
+ merge_sensitive_log_ignores_cyclic_merges,
+ log_with_unrelated_peg_and_operative_revs,
+ log_on_nonexistent_path_and_valid_rev,
+ merge_sensitive_log_copied_path_inherited_mergeinfo,
+ log_diff,
+ log_xml_old,
+ log_diff_moved,
+ log_search,
+ merge_sensitive_log_with_search,
+ log_multiple_revs_spanning_rename,
+ mergeinfo_log,
+ merge_sensitive_log_xml_reverse_merges,
+ log_revision_move_copy,
+ log_on_deleted_deep,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/log_tests_data/merge_history_repo.png b/subversion/tests/cmdline/log_tests_data/merge_history_repo.png
new file mode 100644
index 0000000..017a3e3
--- /dev/null
+++ b/subversion/tests/cmdline/log_tests_data/merge_history_repo.png
Binary files differ
diff --git a/subversion/tests/cmdline/log_tests_data/xml-invalid-chars.dump b/subversion/tests/cmdline/log_tests_data/xml-invalid-chars.dump
new file mode 100644
index 0000000..065d7cc
--- /dev/null
+++ b/subversion/tests/cmdline/log_tests_data/xml-invalid-chars.dump
@@ -0,0 +1,19 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 9677aabc-7113-11dc-9ed1-dff27aa61d95
+
+Revision-number: 0
+Prop-content-length: 150
+Content-length: 150
+
+K 7
+svn:log
+V 76
+After the colon are a space, 3 bad chars, 2 good chars, and a period: 
+.
+K 8
+svn:date
+V 27
+2007-10-02T18:16:25.707165Z
+PROPS-END
+
diff --git a/subversion/tests/cmdline/merge_authz_tests.py b/subversion/tests/cmdline/merge_authz_tests.py
new file mode 100755
index 0000000..8e14089
--- /dev/null
+++ b/subversion/tests/cmdline/merge_authz_tests.py
@@ -0,0 +1,918 @@
+#!/usr/bin/env python
+#
+# merge_authz_tests.py: merge tests that need to write an authz file
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+from svntest.mergetrees import set_up_branch
+from svntest.mergetrees import expected_merge_output
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import write_restrictive_svnserve_conf
+from svntest.main import write_authz_file
+from svntest.main import is_ra_type_dav
+from svntest.main import is_ra_type_svn
+from svntest.main import server_has_mergeinfo
+from svntest.actions import fill_file_with_lines
+from svntest.actions import make_conflict_marker_text
+from svntest.actions import inject_conflict_into_expected_state
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+# Test for issues
+#
+# #2893 - Handle merge info for portions of a tree not checked out due
+# to insufficient authz.
+#
+# #2997 - If skipped paths come first in operative merge mergeinfo
+# is incomplete
+#
+# #2829 - Improve handling for skipped paths encountered during a merge.
+# This is *not* a full test of issue #2829, see also merge_tests.py,
+# search for "2829". This tests the problem where a merge adds a path
+# with a missing sibling and so needs its own explicit mergeinfo.
+#
+# #4056 - Don't record non-inheritable mergeinfo if missing subtrees are not
+# touched by the full-depth diff
+@Issues(2893,2997,2829,4056)
+@SkipUnless(svntest.main.server_has_mergeinfo)
+@Skip(svntest.main.is_ra_type_file)
+def mergeinfo_and_skipped_paths(sbox):
+ "skipped paths get overriding mergeinfo"
+
+ # Test that we override the mergeinfo for child paths which weren't
+ # actually merged because they were skipped.
+ #
+ # This test covers paths skipped because:
+ #
+ # 1) The source of a merge is inaccessible due to authz restrictions.
+ # 2) Destination of merge is inaccessible due to authz restrictions.
+ # 3) Source *and* destination of merge is inaccessible due to authz
+ # restrictions.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, False, 3)
+
+ # Create a restrictive authz where part of the merge source and part
+ # of the target are inaccesible.
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+ write_authz_file(sbox, {"/" : svntest.main.wc_author +"=rw",
+ # Make a directory in the merge source inaccessible.
+ "/A/B/E" : svntest.main.wc_author + "=",
+ # Make a file and dir in the merge destination
+ # inaccessible.
+ "/A_COPY_2/D/H/psi" : svntest.main.wc_author + "=",
+ "/A_COPY_2/D/G" : svntest.main.wc_author + "=",
+ # Make the source and destination inaccessible.
+ "/A_COPY_3/B/E" : svntest.main.wc_author + "=",
+ })
+
+ # Checkout just the branch under the newly restricted authz.
+ wc_restricted = sbox.add_wc_path('restricted')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url,
+ wc_restricted)
+
+ # Some paths we'll use in the second WC.
+ A_COPY_path = os.path.join(wc_restricted, "A_COPY")
+ A_COPY_2_path = os.path.join(wc_restricted, "A_COPY_2")
+ A_COPY_2_H_path = os.path.join(wc_restricted, "A_COPY_2", "D", "H")
+ A_COPY_3_path = os.path.join(wc_restricted, "A_COPY_3")
+ omega_path = os.path.join(wc_restricted, "A_COPY", "D", "H", "omega")
+ zeta_path = sbox.ospath("A/D/H/zeta")
+
+ # Merge r4:8 into the restricted WC's A_COPY.
+ #
+ # We expect A_COPY/B/E to be skipped because we can't access the source
+ # and A_COPY/D/H/omega because it is missing. Since we have A_COPY/B/E
+ # we should override it's inherited mergeinfo, giving it just what it
+ # inherited from A_COPY before the merge.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'B/E' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/psi' : Item(status='M ', wc_rev=8),
+ 'D/H/omega' : Item(status='M ', wc_rev=8),
+ 'D/H' : Item(status=' ', wc_rev=8),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status='M ', wc_rev=8),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'D/G' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/E' : Item(status=' M', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-8'}),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/G' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(props={SVN_PROP_MERGEINFO : ''}),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ })
+ expected_skip = wc.State(A_COPY_path, {
+ 'B/E' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '8',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r4:8 into the restricted WC's A_COPY_2.
+ #
+ # As before we expect A_COPY_2/B/E to be skipped because we can't access the
+ # source but now the destination paths A_COPY_2/D/G, A_COPY_2/D/G/rho, and
+ # A_COPY_2/D/H/psi should also be skipped because our test user doesn't have
+ # access.
+ #
+ # After the merge the parents of the missing dest paths, A_COPY_2/D and
+ # A_COPY_2/D/H get non-inheritable mergeinfo. Those parents' children that
+ # *are* present and are affected by the merge, only A_COPY_2/D/H/omega in
+ # this case, get their own mergeinfo. Note that A_COPY_2/D/H is both the
+ # parent of a missing child and the sibling of missing child, but the former
+ # always takes precedence in terms of getting *non*-inheritable mergeinfo.
+ expected_output = wc.State(A_COPY_2_path, {
+ 'D/H/omega' : Item(status='U '),
+ # Below the skip
+ 'D/G/rho' : Item(status=' ', treeconflict='U'),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ 'D/H/omega' : Item(status=' U'),
+ 'B/E' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/omega' : Item(status='MM', wc_rev=8),
+ 'D/H' : Item(status=' M', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' M', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/E' : Item(status=' M', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-8'}),
+ 'D/H/omega' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:5-8'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-8*'}),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5-8*'}),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(props={SVN_PROP_MERGEINFO : ''}),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ })
+ expected_skip = wc.State(A_COPY_2_path, {
+ 'B/E' : Item(verb='Skipped missing target'),
+ 'D/G' : Item(verb='Skipped missing target'),
+ 'D/H/psi' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, '4', '8',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r5:7 into the restricted WC's A_COPY_3.
+ #
+ # Again A_COPY_3/B/E should be skipped, but because we can't access the
+ # source *or* the destination we expect its parent A_COPY_3/B to get
+ # non-inheritable mergeinfo. A_COPY_3B's two existing siblings,
+ # A_COPY_3/B/F and A_COPY_3/B/lambda are untouched by the merge so
+ # neither gets any mergeinfo recorded.
+ expected_output = wc.State(A_COPY_3_path, {
+ 'D/G/rho' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_3_path, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_3_path, {
+ })
+ expected_status = wc.State(A_COPY_3_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/omega' : Item(status=' ', wc_rev=8),
+ 'D/H/psi' : Item(status=' ', wc_rev=8),
+ 'D/H' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' ', wc_rev=8),
+ 'D/G' : Item(status=' ', wc_rev=8),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status='M ', wc_rev=8),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'B' : Item(status=' M', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:6-7'}),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'B' : Item(props={SVN_PROP_MERGEINFO : '/A/B:6-7*'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ })
+ expected_skip = wc.State(A_COPY_3_path,
+ {'B/E' : Item(verb='Skipped missing target')})
+ svntest.actions.run_and_verify_merge(A_COPY_3_path, '5', '7',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive',
+ wc_restricted)
+
+ # Test issue #2997. If a merge requires two separate editor drives and the
+ # first is non-operative we should still update the mergeinfo to reflect
+ # this.
+ #
+ # Merge -c5 -c8 to the restricted WC's A_COPY_2/D/H. r5 gets merged first
+ # but is a no-op, r8 get's merged next and is operative so the mergeinfo
+ # should be updated on the merge target to reflect both merges.
+ expected_output = wc.State(A_COPY_2_H_path, {
+ 'omega' : Item(status='U '),
+ })
+ expected_elision_output = wc.State(A_COPY_2_H_path, {
+ })
+ expected_status = wc.State(A_COPY_2_H_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'chi' : Item(status=' ', wc_rev=8),
+ 'omega' : Item(status='MM', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8*'}),
+ 'omega' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:8'}),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_2_H_path, {
+ 'psi' : Item(verb='Skipped missing target'),
+ })
+ # Note we don't bother checking expected mergeinfo output because the
+ # multiple merges being performed here, -c5 and -c8, will result in
+ # first ' U' and then ' G' mergeinfo notifications. Our expected
+ # tree structures can't handle checking for multiple values for the
+ # same key.
+ svntest.actions.run_and_verify_merge(A_COPY_2_H_path, '4', '5',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ None, # expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '-c5', '-c8',
+ A_COPY_2_H_path)
+
+ # Test issue #2829 'Improve handling for skipped paths encountered
+ # during a merge'
+
+ # Revert previous changes to restricted WC
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive',
+ wc_restricted)
+ # Add new path 'A/D/H/zeta'
+ svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/zeta' : Item(verb='Adding')})
+ wc_status.add({'A/D/H/zeta' : Item(status=' ', wc_rev=9)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge -r7:9 to the restricted WC's A_COPY_2/D/H.
+ #
+ # r9 adds a path, 'A_COPY_2/D/H/zeta', which has a missing sibling 'psi',
+ # but since 'psi' is untouched by the merge it isn't skipped, and since it
+ # isn't skipped, its parent 'A_COPY_2/D/H' won't get non-inheritable
+ # mergeinfo set on it to describe the merge, so none of the parent's
+ # children will get explicit mergeinfo -- see issue #4056.
+ expected_output = wc.State(A_COPY_2_H_path, {
+ 'omega' : Item(status='U '),
+ 'zeta' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_H_path, {
+ '' : Item(status=' U'),
+ 'omega' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_H_path, {
+ 'omega' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_2_H_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'chi' : Item(status=' ', wc_rev=8),
+ 'omega' : Item(status='M ', wc_rev=8),
+ 'zeta' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8-9'}),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'zeta' : Item("This is the file 'zeta'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_2_H_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_2_H_path, '7', '9',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge -r4:9 to the restricted WC's A_COPY_2/D/H.
+ #
+ # r9 adds a path, 'A_COPY_2/D/H/zeta', which has a parent with
+ # non-inheritable mergeinfo (due to the fact 'A_COPY_2/D/H/psi' is missing
+ # and skipped). 'A_COPY_2/D/H/zeta' must therefore get its own explicit
+ # mergeinfo from this merge.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive',
+ wc_restricted)
+ expected_output = wc.State(A_COPY_2_H_path, {
+ 'omega' : Item(status='U '),
+ 'zeta' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_H_path, {
+ '' : Item(status=' U'),
+ 'omega' : Item(status=' U'),
+ 'zeta' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_H_path, {
+ })
+ expected_status = wc.State(A_COPY_2_H_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'chi' : Item(status=' ', wc_rev=8),
+ 'omega' : Item(status='MM', wc_rev=8),
+ 'zeta' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-9*'}),
+ 'omega' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:5-9'}),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'zeta' : Item("This is the file 'zeta'.\n",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/zeta:9'}),
+ })
+ expected_skip = wc.State(A_COPY_2_H_path, {
+ 'psi' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_2_H_path, '4', '9',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(2876)
+def merge_fails_if_subtree_is_deleted_on_src(sbox):
+ "merge fails if subtree is deleted on src"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2876. ##
+
+ # Create a WC
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ if is_ra_type_svn() or is_ra_type_dav():
+ write_authz_file(sbox, {"/" : "* = rw",
+ "/unrelated" : ("* =\n" +
+ svntest.main.wc_author2 + " = rw")})
+
+ # Some paths we'll care about
+ Acopy_path = sbox.ospath('A_copy')
+ gamma_path = sbox.ospath('A/D/gamma')
+ Acopy_gamma_path = sbox.ospath('A_copy/D/gamma')
+ Acopy_D_path = sbox.ospath('A_copy/D')
+ A_url = sbox.repo_url + '/A'
+ Acopy_url = sbox.repo_url + '/A_copy'
+
+ # Contents to be added to 'gamma'
+ new_content = "line1\nline2\nline3\nline4\nline5\n"
+
+ svntest.main.file_write(gamma_path, new_content)
+
+ # Create expected output tree for commit
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree for commit
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+
+ # Commit the new content
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, Acopy_url,
+ '-m', 'create a new copy of A')
+
+ # Update working copy
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ svntest.main.file_substitute(gamma_path, "line1", "this is line1")
+ # Create expected output tree for commit
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree for commit
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('A/D/gamma', wc_rev=4)
+ expected_status.add({
+ 'A_copy' : Item(status=' ', wc_rev=3),
+ 'A_copy/B' : Item(status=' ', wc_rev=3),
+ 'A_copy/B/lambda' : Item(status=' ', wc_rev=3),
+ 'A_copy/B/E' : Item(status=' ', wc_rev=3),
+ 'A_copy/B/E/alpha': Item(status=' ', wc_rev=3),
+ 'A_copy/B/E/beta' : Item(status=' ', wc_rev=3),
+ 'A_copy/B/F' : Item(status=' ', wc_rev=3),
+ 'A_copy/mu' : Item(status=' ', wc_rev=3),
+ 'A_copy/C' : Item(status=' ', wc_rev=3),
+ 'A_copy/D' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/gamma' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/G' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/G/pi' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/G/rho' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/G/tau' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/H' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/H/chi' : Item(status=' ', wc_rev=3),
+ 'A_copy/D/H/omega': Item(status=' ', wc_rev=3),
+ 'A_copy/D/H/psi' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Delete A/D/gamma from working copy
+ svntest.actions.run_and_verify_svn(None, [], 'delete', gamma_path)
+ # Create expected output tree for commit
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/gamma')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [],
+ wc_dir, wc_dir)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3,4]],
+ ['U ' + Acopy_gamma_path + '\n',
+ ' U ' + Acopy_gamma_path + '\n']),
+ [], 'merge', '-r1:4',
+ A_url + '/D/gamma' + '@4',
+ Acopy_gamma_path)
+
+ # r6: create an empty (unreadable) commit.
+ # Empty or unreadable revisions used to crash a svn 1.6+ client when
+ # used with a 1.5 server:
+ # http://svn.haxx.se/dev/archive-2009-04/0476.shtml
+ svntest.main.run_svn(None, 'mkdir', sbox.repo_url + '/unrelated',
+ '--username', svntest.main.wc_author2,
+ '-m', 'creating a rev with no paths.')
+
+ # A delete merged ontop of a modified file is normally a tree conflict,
+ # see notes/tree-conflicts/detection.txt, but --force currently avoids
+ # this.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3,6]],
+ ['D ' + Acopy_gamma_path + '\n',
+ ' U ' + Acopy_path + '\n']),
+ [], 'merge', '-r1:6', '--force',
+ A_url, Acopy_path)
+
+@SkipUnless(svntest.main.server_has_mergeinfo)
+@Skip(svntest.main.is_ra_type_file)
+@Issue(3242)
+def reintegrate_fails_if_no_root_access(sbox):
+ "reintegrate fails if no root access"
+
+ # If a user is authorized to a reintegrate source and target, they
+ # should be able to reintegrate, regardless of what authorization
+ # they have to parents of the source and target.
+ #
+ # See http://subversion.tigris.org/issues/show_bug.cgi?id=3242#desc78
+
+ # Some paths we'll care about
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # Copy A@1 to A_COPY in r2, and then make some changes to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Make a change on the branch, to A_COPY/mu, commit in r7.
+ svntest.main.file_write(sbox.ospath("A_COPY/mu"),
+ "Changed on the branch.")
+ expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.')
+
+ # Update the WC.
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+
+ # Sync A_COPY with A.
+ expected_output = expected_merge_output([[2,7]],
+ ['U ' + beta_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ # Mergeinfo notification
+ ' U ' + A_COPY_path + '\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit(message='synch A_COPY with A')
+
+ # Update so we are ready for reintegrate.
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Change authz file so everybody has access to everything but the root.
+ if is_ra_type_svn() or is_ra_type_dav():
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+ write_authz_file(sbox, {"/" : "* =",
+ "/A" : "* = rw",
+ "/A_COPY" : "* = rw",
+ "/iota" : "* = rw"})
+
+ # Now reintegrate A_COPY back to A. The lack of access to the root of the
+ # repository shouldn't be a problem.
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-8'}),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/F' : Item(),
+ 'mu' : Item("Changed on the branch."),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ })
+ expected_status = wc.State(A_path, {
+ "B" : Item(status=' ', wc_rev=8),
+ "B/lambda" : Item(status=' ', wc_rev=8),
+ "B/E" : Item(status=' ', wc_rev=8),
+ "B/E/alpha" : Item(status=' ', wc_rev=8),
+ "B/E/beta" : Item(status=' ', wc_rev=8),
+ "B/F" : Item(status=' ', wc_rev=8),
+ "mu" : Item(status='M ', wc_rev=8),
+ "C" : Item(status=' ', wc_rev=8),
+ "D" : Item(status=' ', wc_rev=8),
+ "D/gamma" : Item(status=' ', wc_rev=8),
+ "D/G" : Item(status=' ', wc_rev=8),
+ "D/G/pi" : Item(status=' ', wc_rev=8),
+ "D/G/rho" : Item(status=' ', wc_rev=8),
+ "D/G/tau" : Item(status=' ', wc_rev=8),
+ "D/H" : Item(status=' ', wc_rev=8),
+ "D/H/chi" : Item(status=' ', wc_rev=8),
+ "D/H/omega" : Item(status=' ', wc_rev=8),
+ "D/H/psi" : Item(status=' ', wc_rev=8),
+ "" : Item(status=' M', wc_rev=8),
+ })
+ expected_skip = wc.State(A_path, {})
+ svntest.actions.run_and_verify_merge(A_path, None, None,
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reintegrate', A_path)
+
+def diff_unauth_parent(sbox):
+ "diff directory without reading parent"
+
+ sbox.build(create_wc=False)
+
+ # Create r2: Change A a bit
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ 'propset', 'k', 'v',
+ sbox.repo_url + '/A',
+ '-m', 'set prop')
+
+ # Create r3 Mark E and G
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ 'propset', 'this-is', 'E',
+ sbox.repo_url + '/A/B/E',
+ 'propset', 'this-is', 'G',
+ sbox.repo_url + '/A/D/G',
+ '-m', 'set prop')
+
+ # Create r4: Replace A/B/E with A/D/G
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ 'rm', sbox.repo_url + '/A/B/E',
+ 'cp', '3', sbox.repo_url + '/A/D/G',
+ sbox.repo_url + '/A/B/E',
+ '-m', 'replace A/B/E')
+
+
+ if is_ra_type_svn() or is_ra_type_dav():
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+ write_authz_file(sbox, {"/" : "* =",
+ "/A" : "* = rw"})
+
+ # Diff the property change
+ expected_output = [
+ 'Index: .\n',
+ '===================================================================\n',
+ '--- .\t(revision 1)\n',
+ '+++ .\t(revision 2)\n',
+ '\n',
+ 'Property changes on: .\n',
+ '___________________________________________________________________\n',
+ 'Added: k\n',
+ '## -0,0 +1 ##\n',
+ '+v\n',
+ '\ No newline at end of property\n'
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.repo_url + '/A', '-c', '2')
+
+ if is_ra_type_svn() or is_ra_type_dav():
+ write_authz_file(sbox, {"/" : "* =",
+ "/A/B/E" : "* = rw"})
+
+ # Diff the replacement
+ expected_output = [
+ 'Index: alpha\n',
+ '===================================================================\n',
+ '--- alpha\t(revision 3)\n',
+ '+++ alpha\t(nonexistent)\n',
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'alpha\'.\n',
+ 'Index: beta\n',
+ '===================================================================\n',
+ '--- beta\t(revision 3)\n',
+ '+++ beta\t(nonexistent)\n',
+ '@@ -1 +0,0 @@\n',
+ '-This is the file \'beta\'.\n',
+ 'Index: tau\n',
+ '===================================================================\n',
+ '--- tau\t(nonexistent)\n',
+ '+++ tau\t(revision 4)\n',
+ '@@ -0,0 +1 @@\n',
+ '+This is the file \'tau\'.\n',
+ 'Index: rho\n',
+ '===================================================================\n',
+ '--- rho\t(nonexistent)\n',
+ '+++ rho\t(revision 4)\n',
+ '@@ -0,0 +1 @@\n',
+ '+This is the file \'rho\'.\n',
+ 'Index: pi\n',
+ '===================================================================\n',
+ '--- pi\t(nonexistent)\n',
+ '+++ pi\t(revision 4)\n',
+ '@@ -0,0 +1 @@\n',
+ '+This is the file \'pi\'.\n',
+ ]
+
+ if is_ra_type_svn() or is_ra_type_dav():
+ # Because we can't anchor above C we see just a changed C, not a
+ # replacement
+ expected_output += [
+ 'Index: .\n',
+ '===================================================================\n',
+ '--- .\t(revision 3)\n',
+ '+++ .\t(revision 4)\n',
+ '\n',
+ 'Property changes on: .\n',
+ '___________________________________________________________________\n',
+ 'Modified: this-is\n',
+ '## -1 +1 ##\n',
+ '-E\n',
+ '\ No newline at end of property\n',
+ '+G\n',
+ '\ No newline at end of property\n',
+ ]
+ else:
+ # ### We should also see a property deletion here!
+ expected_output += [
+ 'Index: .\n',
+ '===================================================================\n',
+ '--- .\t(revision 3)\n',
+ '+++ .\t(nonexistent)\n',
+ '\n',
+ 'Property changes on: .\n',
+ '___________________________________________________________________\n',
+ 'Deleted: this-is\n',
+ '## -1 +0,0 ##\n',
+ '-E\n',
+ '\ No newline at end of property\n',
+ 'Index: .\n',
+ '===================================================================\n',
+ '--- .\t(nonexistent)\n',
+ '+++ .\t(revision 4)\n',
+ '\n',
+ 'Property changes on: .\n',
+ '___________________________________________________________________\n',
+ 'Added: this-is\n',
+ '## -0,0 +1 ##\n',
+ '+G\n',
+ '\ No newline at end of property\n',
+ ]
+
+ # Use two url diff, because 'svn diff url -c' uses copyfrom to diff against
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.repo_url + '/A/B/E@3',
+ sbox.repo_url + '/A/B/E@4',
+ '--notice-ancestry')
+
+ # Do the same thing with summarize to really see directory deletes and adds
+ if is_ra_type_svn() or is_ra_type_dav():
+ # With no rights on the parent directory we just see a property change on E
+ expected_output = [
+ 'D %s/A/B/E/alpha\n' % sbox.repo_url,
+ 'D %s/A/B/E/beta\n' % sbox.repo_url,
+ 'A %s/A/B/E/tau\n' % sbox.repo_url,
+ 'A %s/A/B/E/rho\n' % sbox.repo_url,
+ 'A %s/A/B/E/pi\n' % sbox.repo_url,
+ ' M %s/A/B/E\n' % sbox.repo_url,
+ ]
+ else:
+ # But with rights on the parent we see a replacement of E
+ expected_output = [
+ 'D %s/A/B/E/alpha\n' % sbox.repo_url,
+ 'D %s/A/B/E/beta\n' % sbox.repo_url,
+ 'D %s/A/B/E\n' % sbox.repo_url,
+ 'A %s/A/B/E/tau\n' % sbox.repo_url,
+ 'A %s/A/B/E/rho\n' % sbox.repo_url,
+ 'A %s/A/B/E/pi\n' % sbox.repo_url,
+ 'A %s/A/B/E\n' % sbox.repo_url,
+ ]
+
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', sbox.repo_url + '/A/B/E@3',
+ sbox.repo_url + '/A/B/E@4',
+ '--notice-ancestry', '--summarize')
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ mergeinfo_and_skipped_paths,
+ merge_fails_if_subtree_is_deleted_on_src,
+ reintegrate_fails_if_no_root_access,
+ diff_unauth_parent,
+ ]
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list, serial_only = serial_only)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/merge_automatic_tests.py b/subversion/tests/cmdline/merge_automatic_tests.py
new file mode 100755
index 0000000..f4bb231
--- /dev/null
+++ b/subversion/tests/cmdline/merge_automatic_tests.py
@@ -0,0 +1,1440 @@
+#!/usr/bin/env python
+#
+# merge_automatic_tests.py: testing "automatic merge" scenarios
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+import svntest
+from svntest import main, wc, verify, actions
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import server_has_mergeinfo
+from svntest.mergetrees import local_path
+from svntest.mergetrees import expected_merge_output
+from svntest.mergetrees import svn_merge
+from svntest.mergetrees import set_up_branch
+
+#----------------------------------------------------------------------
+
+# Merging scenarios to test
+#
+# Merge once
+#
+# A (--?---
+# ( \
+# B (--?--x
+#
+# Merge twice in same direction
+#
+# A (--o-----?---
+# ( \ \
+# B (--o--x--?--x
+#
+# Merge to and fro
+#
+# A (--o-----?--x
+# ( \ /
+# B (--o--x--?---
+#
+# A (--o-----o-----?--x
+# ( \ \ /
+# B (--o--x--o--x--?---
+#
+# A (--o-----o--x--?--x
+# ( \ / /
+# B (--o--x--o-----?---
+#
+# A (--o-----o--x--?---
+# ( \ / \
+# B (--o--x--o-----?--x
+#
+# Merge with cherry-picks
+# (This set of six cases represents all of the topologically distinct
+# scenarios involving one cherry-pick between two automatic merges.)
+#
+# Cherry1, fwd
+# A (--o-----o-[o]----o---
+# ( \ \ \
+# B (--o--x--?-----c-----x
+#
+# Cherry2, fwd
+# A (--o-----?-----c--o---
+# ( \ / \
+# B (--o--x--o-[o]-------x
+#
+# Cherry3, fwd
+# A (--o-----?-------c--o----
+# ( \_____ / \
+# ( \ / \
+# B (--o--o-[o]-x-/---------x
+# \__/
+#
+# Cherry1, back
+# A (--o-----o-[o]-------x
+# ( \ \ /
+# B (--o--x--?-----c--o---
+#
+# Cherry2, back
+# A (--o-----?-----c-----x
+# ( \ / /
+# B (--o--x--o-[o]----o---
+#
+# Cherry3, back
+# A (--o-----?-------c------x
+# ( \_____ / /
+# ( \ / /
+# B (--o--o-[o]-x-/-----o----
+# \__/
+#
+# Criss-cross merge
+#
+# A (--o--?--x--?----
+# ( \ / \
+# ( X \
+# ( / \ \
+# B (--o--?--x--?---x
+#
+# Subtree mergeinfo
+#
+# subtree to, fro
+# A (--o-o-o-o---------x
+# ( \ \ /
+# ( \ \ /
+# B ( o--o------s--
+#
+# merge to, reverse cherry subtree to, merge to
+# A (--o-o-o-o------------------
+# ( \ \ \ \
+# ( \ \ \ \
+# B ( o--o------x-------rcs----x
+#
+# Sparse WC
+#
+# ...
+#
+# Mixed-rev WC
+#
+# ...
+#
+#
+# Key to diagrams:
+#
+# o - an original change
+# ? - an original change or no-op (test both)
+# x - a branch root merge
+# c - a cherry-pick merge
+# [o] - source range of a cherry-pick merge
+# s - a subtree merge
+# r - reverse merge
+
+
+########################################################################
+
+def assert_equal(a, b):
+ """Assert that two generic Python objects are equal. If not, raise an
+ exception giving their values. Rationale: During debugging, it's
+ easier to see what's wrong if we see the values rather than just
+ an indication that the assertion failed."""
+ if a != b:
+ raise Exception("assert_equal failed: a = (%s), b = (%s)" % (a, b))
+
+def logical_changes_in_branch(sbox, branch):
+ """Return the set of logical changes that are actually in branch BRANCH
+ (at its current working version), by examining the state of the
+ branch files and directories rather than its mergeinfo.
+
+ Each logical change is described by its branch and revision number
+ as a string such as 'A1'."""
+ changes = set()
+ for propname in sbox.simple_proplist(branch + '/D').keys():
+ if propname.startswith('prop-'):
+ changes.add(propname[5:])
+ return changes
+
+def get_mergeinfo_change(sbox, target):
+ """Return a list of revision numbers representing the mergeinfo change
+ on TARGET (working version against base). Non-recursive."""
+ exit, out, err = actions.run_and_verify_svn(None, [],
+ 'diff', '--depth=empty',
+ sbox.ospath(target))
+ merged_revs = []
+ for line in out:
+ match = re.match(r' Merged /(\w+):r(.*)', line)
+ if match:
+ for r_range in match.group(2).split(','):
+ if '-' in r_range:
+ r_start, r_end = r_range.split('-')
+ else:
+ r_start = r_end = r_range
+ merged_revs += range(int(r_start), int(r_end) + 1)
+ return merged_revs
+
+def get_3ways_from_output(output):
+ """Scan the list of lines OUTPUT for indications of 3-way merges.
+ Return a list of (base, source-right) tuples."""
+ ### Problem: test suite strips debugging output within run_and_verify_...()
+ ### so we don't see it here. And relying on debug output is a temporary
+ ### measure only. Better to access svn_client_find_automatic_merge()
+ ### directly, via bindings?
+
+ merges = []
+ for line in output:
+ sys.stdout.write("## " + line + " ")
+ # Extract "A1" from a line like "DBG: merge.c:11336: base svn://.../A@1"
+ match = re.search(r'merge\.c:.* base .* /(\w+)@([0-9-]+)', line)
+ if match:
+ base = match.group(1) + match.group(2)
+ match = re.search(r'merge\.c:.* right.* /(\w+)@([0-9-]+)', line)
+ if match:
+ right = match.group(1) + match.group(2)
+ assert base is not None
+ merges.append((base, right))
+ base = None
+ return merges
+
+def make_branches(sbox):
+ """Make branches A and B."""
+ sbox.build()
+ sbox.simple_copy('A', 'B')
+ sbox.simple_commit()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+def modify_branch(sbox, branch, number, conflicting=False):
+ """Commit a modification to branch BRANCH. The actual modification depends
+ on NUMBER. If CONFLICTING=True, the change will be of a kind that
+ conflicts with any other change that has CONFLICTING=True. We don't
+ modify (properties on) the branch root node itself, to make it easier
+ for the tests to distinguish mergeinfo changes from these mods."""
+ uniq = branch + str(number) # something like 'A1' or 'B2'
+ if conflicting:
+ sbox.simple_propset('conflict', uniq, branch + '/C')
+ else:
+ # Make some changes. We add a property, which we will read later in
+ # logical_changes_in_branch() to check that the correct logical
+ # changes were merged. We add a file, so that we will notice if
+ # Subversion tries to merge this same logical change into a branch
+ # that already has it (it will raise a tree conflict).
+ sbox.simple_propset('prop-' + uniq, uniq, branch + '/D')
+ sbox.simple_copy(branch + '/mu', branch + '/mu-' + uniq)
+ sbox.simple_commit()
+
+def expected_automatic_merge_output(target, expect_3ways):
+ """Calculate the expected output."""
+
+ # (This is rather specific to the current implementation.)
+
+ # Match a notification for each rev-range.
+ if expect_3ways:
+ rev_ranges = []
+ for base, right in expect_3ways:
+ if base[0] == right[0]:
+ base_rev = int(base[1:])
+ right_rev = int(right[1:])
+ rev_ranges += [(base_rev + 1, right_rev)];
+ else:
+ rev_ranges = None
+
+ # Match any content modifications; but not of the root of the branch
+ # because we don't intentionally modify the branch root node in most
+ # tests and we don't want to accidentally overlook a mergeinfo change.
+ lines = ["(A |D |[UG] | [UG]|[UG][UG]) " + target + os.path.sep + ".*\n"]
+
+ # Match mergeinfo changes. (### Subtrees are not yet supported here.)
+ lines += [" [UG] " + target + "\n"]
+
+ # At the moment, the automatic merge code sometimes says 'Merging
+ # differences between repository URLs' and sometimes 'Merging r3 through
+ # r5', but it's not trivial to predict which, so expect either form.
+ lines += ["--- Merging .* into '%s':\n" % (target,),
+ "--- Recording mergeinfo for merge .* into '%s':\n" % (target,)]
+
+ return expected_merge_output(rev_ranges, lines, target=target)
+
+def automatic_merge(sbox, source, target, args=[],
+ expect_changes=None, expect_mi=None, expect_3ways=None):
+ """Do a complete, automatic merge from path SOURCE to path TARGET, and
+ commit. Verify the output and that there is no error.
+ ### TODO: Verify the changes made.
+
+ ARGS are additional arguments passed to svn merge."""
+
+ source = local_path(source)
+ target = local_path(target)
+
+ # First, update the WC target because mixed-rev is not fully supported.
+ sbox.simple_update(target)
+
+ before_changes = logical_changes_in_branch(sbox, target)
+
+ exp_out = expected_automatic_merge_output(target, expect_3ways)
+ exit, out, err = svntest.actions.run_and_verify_svn(exp_out, [],
+ 'merge',
+ '^/' + source, target,
+ *args)
+
+ if expect_changes is not None:
+ after_changes = logical_changes_in_branch(sbox, target)
+ merged_changes = after_changes - before_changes
+ assert_equal(merged_changes, set(expect_changes))
+ reversed_changes = before_changes - after_changes
+ assert_equal(reversed_changes, set())
+
+ if expect_mi is not None:
+ actual_mi_change = get_mergeinfo_change(sbox, target)
+ assert_equal(actual_mi_change, expect_mi)
+
+ if expect_3ways is not None:
+ ### actual_3ways = get_3ways_from_output(out)
+ ### assert_equal(actual_3ways, expect_3ways)
+ pass
+
+ sbox.simple_commit()
+
+def three_way_merge(base_node, source_right_node):
+ return (base_node, source_right_node)
+
+def three_way_merge_no_op(base_node, source_right_node):
+ return (base_node, source_right_node)
+
+def cherry_pick(sbox, rev, source, target):
+ """Cherry-pick merge revision REV from branch SOURCE to branch TARGET
+ (both WC-relative paths), and commit."""
+ svn_merge(rev, source, target)
+ sbox.simple_commit()
+
+no_op_commit__n = 0
+def no_op_commit(sbox):
+ """Commit a new revision that does not affect the branches under test."""
+
+ global no_op_commit__n
+ sbox.simple_propset('foo', str(no_op_commit__n), 'iota')
+ no_op_commit__n += 1
+ sbox.simple_commit('iota')
+
+
+#----------------------------------------------------------------------
+
+def init_mod_merge_mod(sbox, mod_6, mod_7):
+ """Modify both branches, merge A -> B, optionally modify again.
+ MOD_6 is True to modify A in r6, MOD_7 is True to modify B in r7,
+ otherwise make no-op commits for r6 and/or r7."""
+
+ # A (--o------?-
+ # ( \
+ # B (---o--x---?
+ # 2 34 5 67
+
+ make_branches(sbox)
+ modify_branch(sbox, 'A', 3)
+ modify_branch(sbox, 'B', 4)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A3'],
+ expect_mi=[2, 3, 4],
+ expect_3ways=[three_way_merge('A1', 'A4')])
+
+ if mod_6:
+ modify_branch(sbox, 'A', 6)
+ else:
+ no_op_commit(sbox) # r6
+
+ if mod_7:
+ modify_branch(sbox, 'B', 7)
+ else:
+ no_op_commit(sbox) # r7
+
+########################################################################
+
+# Merge once
+
+@SkipUnless(server_has_mergeinfo)
+def merge_once_1(sbox):
+ """merge_once_1"""
+
+ # A (------
+ # ( \
+ # B (-----x
+ # 2 34 5
+
+ make_branches(sbox)
+ no_op_commit(sbox) # r3
+ no_op_commit(sbox) # r4
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=[],
+ expect_mi=[2, 3, 4],
+ expect_3ways=[three_way_merge_no_op('A1', 'A4')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_once_2(sbox):
+ """merge_once_2"""
+
+ # A (-o----
+ # ( \
+ # B (-----x
+ # 2 34 5
+
+ make_branches(sbox)
+ modify_branch(sbox, 'A', 3)
+ no_op_commit(sbox) # r4
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A3'],
+ expect_mi=[2, 3, 4],
+ expect_3ways=[three_way_merge('A1', 'A4')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_once_3(sbox):
+ """merge_once_3"""
+
+ # A (------
+ # ( \
+ # B (--o--x
+ # 2 34 5
+
+ make_branches(sbox)
+ no_op_commit(sbox) # r3
+ modify_branch(sbox, 'B', 4)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=[],
+ expect_mi=[2, 3, 4],
+ expect_3ways=[three_way_merge_no_op('A1', 'A4')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_once_4(sbox):
+ """merge_once_4"""
+
+ # A (-o----
+ # ( \
+ # B (--o--x
+ # 2 34 5
+
+ make_branches(sbox)
+ modify_branch(sbox, 'A', 3)
+ modify_branch(sbox, 'B', 4)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A3'],
+ expect_mi=[2, 3, 4],
+ expect_3ways=[three_way_merge('A1', 'A4')])
+
+#----------------------------------------------------------------------
+
+# Merge twice in same direction
+
+@SkipUnless(server_has_mergeinfo)
+def merge_twice_same_direction_1(sbox):
+ """merge_twice_same_direction_1"""
+
+ # A (--o-----------
+ # ( \ \
+ # B (---o--x------x
+ # 2 34 5 67 8
+
+ init_mod_merge_mod(sbox, mod_6=False, mod_7=False)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=[],
+ expect_mi=[5, 6, 7],
+ expect_3ways=[three_way_merge_no_op('A4', 'A7')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_twice_same_direction_2(sbox):
+ """merge_twice_same_direction_2"""
+
+ # A (--o------o----
+ # ( \ \
+ # B (---o--x---o--x
+ # 2 34 5 67 8
+
+ init_mod_merge_mod(sbox, mod_6=True, mod_7=True)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A6'],
+ expect_mi=[5, 6, 7],
+ expect_3ways=[three_way_merge('A4', 'A7')])
+
+#----------------------------------------------------------------------
+
+# Merge to and fro
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_1_1(sbox):
+ """merge_to_and_fro_1_1"""
+
+ # A (--o----------x
+ # ( \ /
+ # B (---o--x-------
+ # 2 34 5 67 8
+
+ init_mod_merge_mod(sbox, mod_6=False, mod_7=False)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B4'],
+ expect_mi=[2, 3, 4, 5, 6, 7],
+ expect_3ways=[three_way_merge('A4', 'B7')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_1_2(sbox):
+ """merge_to_and_fro_1_2"""
+
+ # A (--o------o---x
+ # ( \ /
+ # B (---o--x---o---
+ # 2 34 5 67 8
+
+ init_mod_merge_mod(sbox, mod_6=True, mod_7=True)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B4', 'B7'],
+ expect_mi=[2, 3, 4, 5, 6, 7],
+ expect_3ways=[three_way_merge('A4', 'B7')])
+
+def init_merge_to_and_fro_2(sbox, mod_9, mod_10):
+ """Set up branches A and B for the merge_to_and_fro_2 scenarios.
+ MOD_9 is True to modify A in r9, MOD_10 is True to modify B in r10,
+ otherwise make no-op commits for r9 and/or r10."""
+
+ # A (--o------o------?-
+ # ( \ \
+ # B (---o--x---o--x---?
+ # 2 34 5 67 8--90
+
+ init_mod_merge_mod(sbox, mod_6=True, mod_7=True)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A6'],
+ expect_mi=[5, 6, 7],
+ expect_3ways=[three_way_merge('A4', 'A7')])
+
+ if mod_9:
+ modify_branch(sbox, 'A', 9)
+ else:
+ no_op_commit(sbox) # r9
+
+ if mod_10:
+ modify_branch(sbox, 'B', 10)
+ else:
+ no_op_commit(sbox) # r10
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_2_1(sbox):
+ """merge_to_and_fro_2_1"""
+
+ # A (--o------o----------x
+ # ( \ \ /
+ # B (---o--x---o--x-------
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_2(sbox, mod_9=False, mod_10=False)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B4', 'B7'],
+ expect_mi=[2, 3, 4, 5, 6, 7, 8, 9, 10],
+ expect_3ways=[three_way_merge('A7', 'B10')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_2_2(sbox):
+ """merge_to_and_fro_2_2"""
+
+ # A (--o------o------o---x
+ # ( \ \ /
+ # B (---o--x---o--x---o---
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_2(sbox, mod_9=True, mod_10=True)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B4', 'B7', 'B10'],
+ expect_mi=[2, 3, 4, 5, 6, 7, 8, 9, 10],
+ expect_3ways=[three_way_merge('A7', 'B10')])
+
+def init_merge_to_and_fro_3(sbox, mod_9, mod_10):
+ """Set up branches A and B for the merge_to_and_fro_3/4 scenarios.
+ MOD_9 is True to modify A in r9, MOD_10 is True to modify B in r10,
+ otherwise make no-op commits for r9 and/or r10."""
+
+ # A (--o------o---x--?-
+ # ( \ /
+ # B (---o--x---o------?
+ # 2 34 5 67 8 90
+
+ init_mod_merge_mod(sbox, mod_6=True, mod_7=True)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B4', 'B7'],
+ expect_mi=[2, 3, 4, 5, 6, 7],
+ expect_3ways=[three_way_merge('A4', 'B7')])
+
+ if mod_9:
+ modify_branch(sbox, 'A', 9)
+ else:
+ no_op_commit(sbox) # r9
+
+ if mod_10:
+ modify_branch(sbox, 'B', 10)
+ else:
+ no_op_commit(sbox) # r10
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_3_1(sbox):
+ """merge_to_and_fro_3_1"""
+
+ # A (--o------o---x------x
+ # ( \ / /
+ # B (---o--x---o----------
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_3(sbox, mod_9=False, mod_10=False)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=[],
+ expect_mi=[8, 9, 10],
+ expect_3ways=[three_way_merge_no_op('B7', 'B10')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_3_2(sbox):
+ """merge_to_and_fro_3_2"""
+
+ # A (--o------o---x--o---x
+ # ( \ / /
+ # B (---o--x---o------o---
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_3(sbox, mod_9=True, mod_10=True)
+
+ automatic_merge(sbox, 'B', 'A',
+ expect_changes=['B10'],
+ expect_mi=[8, 9, 10],
+ expect_3ways=[three_way_merge('B7', 'B10')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_4_1(sbox):
+ """merge_to_and_fro_4_1"""
+
+ # A (--o------o---x-------
+ # ( \ / \
+ # B (---o--x---o---------x
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_3(sbox, mod_9=False, mod_10=False)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A6'],
+ expect_mi=[5, 6, 7, 8, 9, 10],
+ expect_3ways=[three_way_merge_no_op('B7', 'A10')])
+
+@SkipUnless(server_has_mergeinfo)
+def merge_to_and_fro_4_2(sbox):
+ """merge_to_and_fro_4_2"""
+
+ # A (--o------o---x--o----
+ # ( \ / \
+ # B (---o--x---o------o--x
+ # 2 34 5 67 8 90 1
+
+ init_merge_to_and_fro_3(sbox, mod_9=True, mod_10=True)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A6', 'A9'],
+ expect_mi=[5, 6, 7, 8, 9, 10],
+ expect_3ways=[three_way_merge('B7', 'A10')])
+
+#----------------------------------------------------------------------
+
+# Cherry-pick scenarios
+
+@SkipUnless(server_has_mergeinfo)
+def cherry1_fwd(sbox):
+ """cherry1_fwd"""
+
+ # A (--o------o--[o]----o---
+ # ( \ \ \
+ # B (---o--x---------c-----x
+ # 2 34 5 67 8 9 0 1
+
+ init_mod_merge_mod(sbox, mod_6=True, mod_7=False)
+ sbox.simple_update()
+ modify_branch(sbox, 'A', 8)
+ cherry_pick(sbox, 8, 'A', 'B')
+ modify_branch(sbox, 'A', 10)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A6', 'A10'], # and NOT A8
+ expect_mi=[5, 6, 7, 9, 10],
+ expect_3ways=[three_way_merge('A4', 'A7'),
+ three_way_merge('A8', 'A10')])
+
+@SkipUnless(server_has_mergeinfo)
+@XFail()
+@Issue(4255)
+def cherry2_fwd(sbox):
+ """cherry2_fwd"""
+
+ # A (--o-------------c--o---
+ # ( \ / \
+ # B (---o--x---o-[o]-------x
+ # 2 34 5 67 8 9 0 1
+
+ init_mod_merge_mod(sbox, mod_6=False, mod_7=True)
+ modify_branch(sbox, 'B', 8)
+ cherry_pick(sbox, 8, 'B', 'A')
+ modify_branch(sbox, 'A', 10)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A10'], # and NOT A9
+ expect_mi=[5, 6, 7, 8, 9, 10],
+ expect_3ways=[three_way_merge('A9', 'A10')])
+
+@SkipUnless(server_has_mergeinfo)
+@XFail()
+@Issue(4255)
+def cherry3_fwd(sbox):
+ """cherry3_fwd"""
+
+ # A (--o--------------c--o----
+ # ( \ / \
+ # ( \ / \
+ # B (---o--o-[o]-x-/---------x
+ # \__/
+ # 2 34 5 6 7 8 9 0
+
+ make_branches(sbox)
+ modify_branch(sbox, 'A', 3)
+ modify_branch(sbox, 'B', 4)
+ modify_branch(sbox, 'B', 5)
+ modify_branch(sbox, 'B', 6)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A3'],
+ expect_mi=[2, 3, 4, 5, 6],
+ expect_3ways=[three_way_merge('A1', 'A6')])
+
+ cherry_pick(sbox, 6, 'B', 'A')
+ modify_branch(sbox, 'A', 9)
+
+ automatic_merge(sbox, 'A', 'B',
+ expect_changes=['A9'], # and NOT A8
+ expect_mi=[7, 8, 9],
+ expect_3ways=[three_way_merge('A8', 'A9')])
+
+#----------------------------------------------------------------------
+# Automatic merges ignore subtree mergeinfo during reintegrate.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4258)
+def subtree_to_and_fro(sbox):
+ "reintegrate considers source subtree mergeinfo"
+
+ # A (-----o-o-o-o------------x
+ # ( \ \ /
+ # ( \ \ /
+ # A_COPY ( o---------o--s--o--
+ # 2 3 4 5 6 7 8 9
+
+ # Some paths we'll care about.
+ A_COPY_gamma_path = sbox.ospath('A_COPY/D/gamma')
+ psi_path = sbox.ospath('A/D/H/psi')
+ A_COPY_D_path = sbox.ospath('A_COPY/D')
+ A_path = sbox.ospath('A')
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup a simple 'trunk & branch': Copy ^/A to ^/A_COPY in r2 and then
+ # make a few edits under A in r3-6 (edits r3, r4, r6 are under subtree 'D'):
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # r7 - Edit a file on the branch.
+ svntest.main.file_write(A_COPY_gamma_path, "Branch edit to 'gamma'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Edit a file on our branch')
+
+ # r8 - Do a subtree sync merge from ^/A/D to A_COPY/D.
+ # Note that among other things this changes A_COPY/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D', A_COPY_D_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Automatic subtree merge')
+
+ # r9 - Make an edit to A/D/H/psi.
+ svntest.main.file_write(psi_path, "Trunk Edit to 'psi'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Edit a file on our trunk')
+
+ # Now reintegrate ^/A_COPY back to A. Prior to issue #4258's fix, the
+ # the subtree merge to A_COPY/D just looks like any other branch edit and
+ # was not considered a merge. So the changes which exist on A/D and were
+ # merged to A_COPY/D, were merged *back* to A, resulting in a conflict:
+ #
+ # C:\...\working_copies\merge_automatic_tests-18>svn merge ^^/A_COPY A
+ # DBG: merge.c:11461: base on source: ^/A@1
+ # DBG: merge.c:11462: base on target: ^/A@1
+ # DBG: merge.c:11567: yca ^/A@1
+ # DBG: merge.c:11568: base ^/A@1
+ # DBG: merge.c:11571: right ^/A_COPY@8
+ # Conflict discovered in file 'A\D\H\psi'.
+ # Select: (p) postpone, (df) diff-full, (e) edit,
+ # (mc) mine-conflict, (tc) theirs-conflict,
+ # (s) show all options: p
+ # --- Merging r2 through r9 into 'A':
+ # C A\D\H\psi
+ # U A\D\gamma
+ # --- Recording mergeinfo for merge of r2 through r9 into 'A':
+ # U A
+ # Summary of conflicts:
+ # Text conflicts: 1
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'merge', sbox.repo_url + '/A_COPY', A_path)
+
+ # Better to produce the same warning that explicitly using the
+ # --reintegrate option would produce:
+ svntest.verify.verify_outputs("Automatic Reintegrate failed, but not "
+ "in the way expected",
+ err, None,
+ "(svn: E195016: Reintegrate can only be used if "
+ "revisions 2 through 9 were previously "
+ "merged from .*/A to the reintegrate source, "
+ "but this is not the case:\n)"
+ "|( A_COPY\n)"
+ "|( Missing ranges: /A:5\n)"
+ "|(\n)"
+ "|" + svntest.main.stack_trace_regexp,
+ None,
+ True) # Match *all* lines of stdout
+
+#----------------------------------------------------------------------
+# Automatic merges ignore subtree mergeinfo gaps older than the last rev
+# synced to the target root.
+@SkipUnless(server_has_mergeinfo)
+def merge_to_reverse_cherry_subtree_to_merge_to(sbox):
+ "sync merge considers target subtree mergeinfo"
+
+ # A (--o-o-o-o------------------
+ # ( \ \ \ \
+ # ( \ \ \ \
+ # B ( o--o------x-------rc-----x
+
+ # Some paths we'll care about.
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_B_path = sbox.ospath('A_COPY/B')
+ A_COPY_beta_path = sbox.ospath('A_COPY/B/E/beta')
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup a simple 'trunk & branch': Copy ^/A to ^/A_COPY in r2 and then
+ # make a few edits under A in r3-6:
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Sync merge ^/A to A_COPY, then reverse merge r5 from ^/A/B to A_COPY/B.
+ # This results in mergeinfo on the target which makes it appear that the
+ # branch is synced up to r6, but the subtree mergeinfo on A_COPY/B reveals
+ # that r5 has not been merged to that subtree:
+ #
+ # Properties on 'A_COPY':
+ # svn:mergeinfo
+ # /A:2-6
+ # Properties on 'A_COPY\B':
+ # svn:mergeinfo
+ # /A/B:2-4,6
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c-5',
+ sbox.repo_url + '/A/B',
+ A_COPY_B_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m',
+ 'sync merge and reverse subtree merge')
+
+ # Try an automatic sync merge from ^/A to A_COPY. Revision 5 should be
+ # merged to A_COPY/B as its subtree mergeinfo reveals that rev is missing,
+ # like so:
+ #
+ # >svn merge ^/A A_COPY
+ # --- Merging r5 into 'A_COPY\B':
+ # U A_COPY\B\E\beta
+ # --- Recording mergeinfo for merge of r5 through r7 into 'A_COPY':
+ # U A_COPY
+ # --- Recording mergeinfo for merge of r5 through r7 into 'A_COPY\B':
+ # U A_COPY\B
+ # --- Eliding mergeinfo from 'A_COPY\B':
+ # U A_COPY\B
+ #
+ # But the merge ignores the subtree mergeinfo and considers
+ # only the mergeinfo on the target itself (and thus is a no-op but for
+ # the mergeinfo change on the root of the merge target):
+ #
+ # >svn merge ^/A A_COPY
+ # --- Recording mergeinfo for merge of r7 into 'A_COPY':
+ # U A_COPY
+ #
+ # >svn diff
+ # Index: A_COPY
+ # ===================================================================
+ # --- A_COPY (revision 7)
+ # +++ A_COPY (working copy)
+ #
+ # Property changes on: A_COPY
+ # ___________________________________________________________________
+ # Modified: svn:mergeinfo
+ # Merged /A:r7
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'B' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' M'),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev='7')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-7'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ A_COPY_path)
+
+#----------------------------------------------------------------------
+# Automatic merges should notice ancestory for replaced files
+@SkipUnless(server_has_mergeinfo)
+def merge_replacement(sbox):
+ "notice ancestory for replaced files"
+
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_copy')
+ A_COPY_mu_path = sbox.ospath('A_copy/mu')
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_copy('A', 'A_copy')
+ # Commit as r2
+ sbox.simple_commit()
+
+ sbox.simple_rm('A_copy/B/lambda')
+ sbox.simple_copy('A_copy/D/gamma', 'A_copy/B/lambda')
+
+ sbox.simple_rm('A_copy/mu')
+ svntest.main.file_write(A_COPY_mu_path, "Branch edit to 'mu'.\n")
+ sbox.simple_add('A_copy/mu')
+
+ # Commit as r3
+ sbox.simple_commit()
+
+ expected_output = wc.State(A_path, {
+ 'B/lambda' : Item(status='R '),
+ 'mu' : Item(status='R '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M', wc_rev='1'),
+ 'B' : Item(status=' ', wc_rev='1'),
+ 'mu' : Item(status='R ', copied='+', wc_rev='-'),
+ 'B/E' : Item(status=' ', wc_rev='1'),
+ 'B/E/alpha' : Item(status=' ', wc_rev='1'),
+ 'B/E/beta' : Item(status=' ', wc_rev='1'),
+ 'B/lambda' : Item(status='R ', copied='+', wc_rev='-'),
+ 'B/F' : Item(status=' ', wc_rev='1'),
+ 'C' : Item(status=' ', wc_rev='1'),
+ 'D' : Item(status=' ', wc_rev='1'),
+ 'D/G' : Item(status=' ', wc_rev='1'),
+ 'D/G/pi' : Item(status=' ', wc_rev='1'),
+ 'D/G/rho' : Item(status=' ', wc_rev='1'),
+ 'D/G/tau' : Item(status=' ', wc_rev='1'),
+ 'D/gamma' : Item(status=' ', wc_rev='1'),
+ 'D/H' : Item(status=' ', wc_rev='1'),
+ 'D/H/chi' : Item(status=' ', wc_rev='1'),
+ 'D/H/psi' : Item(status=' ', wc_rev='1'),
+ 'D/H/omega' : Item(status=' ', wc_rev='1'),
+ })
+
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_copy:2-3'}),
+ 'B' : Item(),
+ 'mu' : Item("Branch edit to 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'gamma'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+
+ expected_skip = wc.State(A_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(A_path, None, None,
+ sbox.repo_url + '/A_copy', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ A_path)
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4313)
+
+# Test for issue #4313 'replaced merges source causes assertion during
+# automatic merge'
+def auto_merge_handles_replacements_in_merge_source(sbox):
+ "automerge handles replacements in merge source"
+
+ sbox.build()
+
+ A_path = sbox.ospath('A')
+ branch1_path = sbox.ospath('branch-1')
+ branch2_path = sbox.ospath('branch-2')
+
+ # r2 - Make two branches.
+ sbox.simple_copy('A', 'branch-1')
+ sbox.simple_copy('A', 'branch-2')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # r3 - Replace 'A' with 'branch-1'.
+ svntest.main.run_svn(None, 'del', A_path)
+ svntest.main.run_svn(None, 'copy', branch1_path, A_path)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Merge^/A to branch-2, it should be a no-op but for mergeinfo changes,
+ # but it *should* work. Previously this failed because automatic merges
+ # weren't adhering to the merge source normalization rules, resulting in
+ # this assertion:
+ #
+ # >svn merge ^/A branch-2
+ # ..\..\..\subversion\libsvn_client\merge.c:4568: (apr_err=235000)
+ # svn: E235000: In file '..\..\..\subversion\libsvn_client\merge.c'
+ # line 4568: assertion failed (apr_hash_count(implicit_src_mergeinfo)
+ # == 1)
+ #
+ # This application has requested the Runtime to terminate it in an
+ # unusual way.
+ # Please contact the application's support team for more information.
+ svntest.actions.run_and_verify_svn(
+ ["--- Recording mergeinfo for merge of r2 into '" + branch2_path + "':\n",
+ " U " + branch2_path + "\n",
+ "--- Recording mergeinfo for merge of r3 into '" + branch2_path + "':\n",
+ " G " + branch2_path + "\n"],
+ [], 'merge', sbox.repo_url + '/A', branch2_path)
+
+# Test for issue #4329 'automatic merge uses reintegrate type merge if
+# source is fully synced'
+@SkipUnless(server_has_mergeinfo)
+@Issue(4329)
+def effective_sync_results_in_reintegrate(sbox):
+ "an effectively synced branch gets reintegrated"
+
+ sbox.build()
+
+ iota_path = sbox.ospath('iota')
+ A_path = sbox.ospath('A')
+ psi_path = sbox.ospath('A/D/H/psi')
+ mu_path = sbox.ospath('A/mu')
+ branch_path = sbox.ospath('branch')
+ psi_branch_path = sbox.ospath('branch/D/H/psi')
+
+ # r2 - Make a branch.
+ sbox.simple_copy('A', 'branch')
+ sbox.simple_commit()
+
+ # r3 - An edit to a file on the trunk.
+ sbox.simple_append('A/mu', "Trunk edit to 'mu'\n", True)
+ sbox.simple_commit()
+
+ # r4 - An edit to a file on the branch
+ sbox.simple_append('branch/D/H/psi', "Branch edit to 'psi'\n", True)
+ sbox.simple_commit()
+
+ # r5 - Effectively sync all changes on trunk to the branch. We do this
+ # not via an automatic sync merge, but with a cherry pick that effectively
+ # merges the same changes (i.e. r3).
+ sbox.simple_update()
+ cherry_pick(sbox, 3, A_path, branch_path)
+
+ # r6 - Make another edit to the file on the trunk.
+ sbox.simple_append('A/mu', "2nd trunk edit to 'mu'\n", True)
+ sbox.simple_commit()
+
+ # Now try an explicit --reintegrate merge from ^/branch to A.
+ # This should work because since the resolution of
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=3577
+ # if B is *effectively* synced with A, then B can be reintegrated
+ # to A.
+ sbox.simple_update()
+ expected_output = [
+ "--- Merging differences between repository URLs into '" +
+ A_path + "':\n",
+ "U " + psi_path + "\n",
+ "--- Recording mergeinfo for merge between repository URLs into '" +
+ A_path + "':\n",
+ " U " + A_path + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ sbox.repo_url + '/branch', A_path,
+ '--reintegrate')
+
+ # Revert the merge and try it again, this time without the --reintegrate
+ # option. The merge should still work with the same results.
+ #
+ # Previously this failed because the reintegrate code path is not followed,
+ # rather the automatic merge attempts a sync style merge of the yca (^/A@1)
+ # through the HEAD of the branch (^/branch@7). This results in a spurious
+ # conflict on A/mu as the edit made in r3 is reapplied.
+ #
+ # >svn merge ^/branch A
+ # --- Merging r2 through r6 into 'A':
+ # C A\mu
+ # U A\D\H\psi
+ # --- Recording mergeinfo for merge of r2 through r6 into 'A':
+ # U A
+ # Summary of conflicts:
+ # Text conflicts: 1
+ # Conflict discovered in file 'A\mu'.
+ # Select: (p) postpone, (df) diff-full, (e) edit, (m) merge,
+ # (mc) mine-conflict, (tc) theirs-conflict, (s) show all options: p
+ svntest.actions.run_and_verify_svn(None, [], 'revert', A_path, '-R')
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ sbox.repo_url + '/branch', A_path)
+
+@Issue(4481)
+def reintegrate_subtree_not_updated(sbox):
+ "reintegrate subtree not updated"
+
+ sbox.build()
+
+ # Create change on branch 'D_1'
+ sbox.simple_copy('A/D', 'D_1')
+ sbox.simple_commit()
+ sbox.simple_append('D_1/G/pi', "D_1/G pi edit\n")
+ sbox.simple_append('D_1/H/chi', "D_1/H chi edit\n")
+ sbox.simple_commit()
+
+ # Merge back to 'D' with two subtree merges
+ expected_output = [
+ "--- Merging r2 through r3 into '"
+ + sbox.ospath('A/D/G') + "':\n",
+ "U "
+ + sbox.ospath('A/D/G/pi') + "\n",
+ "--- Recording mergeinfo for merge of r2 through r3 into '"
+ + sbox.ospath('A/D/G') + "':\n",
+ " U "
+ + sbox.ospath('A/D/G') + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/D_1/G',
+ sbox.ospath('A/D/G'))
+ expected_output = [
+ "--- Merging r2 through r3 into '"
+ + sbox.ospath('A/D/H') + "':\n",
+ "U "
+ + sbox.ospath('A/D/H/chi') + "\n",
+ "--- Recording mergeinfo for merge of r2 through r3 into '"
+ + sbox.ospath('A/D/H') + "':\n",
+ " U "
+ + sbox.ospath('A/D/H') + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/D_1/H',
+ sbox.ospath('A/D/H'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create branch 'D_2'
+ sbox.simple_copy('A/D', 'D_2')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create change on 'D_2'
+ sbox.simple_append('D_2/G/pi', "D_2/G pi edit\n")
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create change on 'D'
+ sbox.simple_append('A/D/G/rho', "D/G rho edit\n")
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Sync merge to 'D_2' (doesn't record mergeinfo on 'D_2/H' subtree)
+ expected_output = [
+ "--- Merging r5 through r7 into '"
+ + sbox.ospath('D_2') + "':\n",
+ "U "
+ + sbox.ospath('D_2/G/rho') + "\n",
+ "--- Recording mergeinfo for merge of r5 through r7 into '"
+ + sbox.ospath('D_2') + "':\n",
+ " U "
+ + sbox.ospath('D_2') + "\n",
+ "--- Recording mergeinfo for merge of r5 through r7 into '"
+ + sbox.ospath('D_2/G') + "':\n",
+ " U "
+ + sbox.ospath('D_2/G') + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/A/D',
+ sbox.ospath('D_2'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Reintegrate 'D_2' to 'D'
+ expected_output = [
+ "--- Merging differences between repository URLs into '"
+ + sbox.ospath('A/D') + "':\n",
+ "U "
+ + sbox.ospath('A/D/G/pi') + "\n",
+ " U "
+ + sbox.ospath('A/D/G') + "\n",
+ "--- Recording mergeinfo for merge between repository URLs into '"
+ + sbox.ospath('A/D') + "':\n",
+ " U "
+ + sbox.ospath('A/D') + "\n",
+ " U "
+ + sbox.ospath('A/D/G') + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/D_2',
+ sbox.ospath('A/D'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # merge to 'D_2'. This merge previously failed with this error:
+ #
+ # svn: E195016: Reintegrate can only be used if revisions 5 through 9 were
+ # previously merged from [URL]/D_2 to the reintegrate source, but this is
+ # not the case:
+ # A/D/G
+ # Missing ranges: /A/D/G:7
+ #
+ expected_output = [
+ "--- Merging differences between repository URLs into '"
+ + sbox.ospath('D_2') + "':\n",
+ " U "
+ + sbox.ospath('D_2/G') + "\n",
+ "--- Recording mergeinfo for merge between repository URLs into '"
+ + sbox.ospath('D_2') + "':\n",
+ " U "
+ + sbox.ospath('D_2') + "\n",
+ " G "
+ + sbox.ospath('D_2/G') + "\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/A/D',
+ sbox.ospath('D_2'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+def merge_to_copy_and_add(sbox):
+ "merge peg to a copy and add"
+
+ sbox.build()
+
+ sbox.simple_copy('A', 'AA')
+ sbox.simple_append('A/mu', 'A/mu')
+ sbox.simple_commit('A')
+
+ # This is the scenario the code is supposed to support; a copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '^/A', sbox.ospath('AA'))
+
+ sbox.simple_mkdir('A3')
+ # And this case currently segfaults, because merge doesn't check
+ # if the path has a repository location
+ expected_err = ".*svn: E195012: Can't perform .*A3'.*added.*"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'merge', '^/A', sbox.ospath('A3'))
+ # Try the same merge with --reintegrate, for completeness' sake.
+ expected_err = ".*svn: E195012: Can't reintegrate into .*A3'.*added.*"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'merge', '--reintegrate', '^/A',
+ sbox.ospath('A3'))
+
+def merge_delete_crlf_file(sbox):
+ "merge the deletion of a strict CRLF file"
+
+ sbox.build()
+
+ sbox.simple_copy('A', 'AA')
+
+ # Let commit fix the eols
+ sbox.simple_add_text('with\rCRLF\rhere!', 'A/crlf')
+ sbox.simple_add_text('with\rnative\r\eol', 'A/native')
+ sbox.simple_add_text('with\rCR\r\eol', 'A/cr')
+ sbox.simple_add_text('with\rLF\r\eol', 'A/lf')
+
+ # And apply the magic property
+ sbox.simple_propset('svn:eol-style', 'CRLF', 'A/crlf')
+ sbox.simple_propset('svn:eol-style', 'native', 'A/native')
+ sbox.simple_propset('svn:eol-style', 'CR', 'A/cr')
+ sbox.simple_propset('svn:eol-style', 'LF', 'A/lf')
+
+ sbox.simple_commit('A') # r2
+
+ # Merge the addition of the files
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '^/A', sbox.ospath('AA'))
+ sbox.simple_commit('AA') # r3
+
+ sbox.simple_rm('A/D', 'A/mu', 'A/crlf', 'A/native', 'A/cr', 'A/lf')
+ sbox.simple_commit('A') # r4
+
+ sbox.simple_update('') # Make single revision r4
+
+ # And now merge the deletes
+ expected_output = svntest.verify.UnorderedOutput([
+ '--- Merging r3 through r4 into \'%s\':\n' % sbox.ospath('AA'),
+ 'D %s\n' % sbox.ospath('AA/cr'),
+ 'D %s\n' % sbox.ospath('AA/crlf'),
+ 'D %s\n' % sbox.ospath('AA/lf'),
+ 'D %s\n' % sbox.ospath('AA/native'),
+ 'D %s\n' % sbox.ospath('AA/mu'),
+ 'D %s\n' % sbox.ospath('AA/D'),
+ '--- Recording mergeinfo for merge of r3 through r4 into \'%s\':\n'
+ % sbox.ospath('AA'),
+ ' U %s\n' % sbox.ospath('AA')
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/A', sbox.ospath('AA'))
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ merge_once_1,
+ merge_once_2,
+ merge_once_3,
+ merge_once_4,
+ merge_twice_same_direction_1,
+ merge_twice_same_direction_2,
+ merge_to_and_fro_1_1,
+ merge_to_and_fro_1_2,
+ merge_to_and_fro_2_1,
+ merge_to_and_fro_2_2,
+ merge_to_and_fro_3_1,
+ merge_to_and_fro_3_2,
+ merge_to_and_fro_4_1,
+ merge_to_and_fro_4_2,
+ cherry1_fwd,
+ cherry2_fwd,
+ cherry3_fwd,
+ subtree_to_and_fro,
+ merge_to_reverse_cherry_subtree_to_merge_to,
+ merge_replacement,
+ auto_merge_handles_replacements_in_merge_source,
+ effective_sync_results_in_reintegrate,
+ reintegrate_subtree_not_updated,
+ merge_to_copy_and_add,
+ merge_delete_crlf_file,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/merge_reintegrate_tests.py b/subversion/tests/cmdline/merge_reintegrate_tests.py
new file mode 100755
index 0000000..7a27373
--- /dev/null
+++ b/subversion/tests/cmdline/merge_reintegrate_tests.py
@@ -0,0 +1,2893 @@
+#!/usr/bin/env python
+#
+# merge_reintegrate_tests.py: testing merge --reintegrate
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+import svntest
+from svntest import main, wc, verify, actions
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import server_has_mergeinfo
+from svntest.mergetrees import set_up_branch
+from svntest.mergetrees import expected_merge_output
+
+#----------------------------------------------------------------------
+def run_reintegrate(src_url, tgt_path):
+ """Run 'svn merge --reintegrate SRC_URL TGT_PATH'. Raise an error if
+ there is nothing on stdout, anything on stderr, or a non-zero exit
+ code.
+ """
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', '--reintegrate',
+ src_url, tgt_path)
+
+def run_reintegrate_expect_error(src_url, tgt_path,
+ expected_stdout, expected_stderr):
+ """Run 'svn merge --reintegrate SRC_URL TGT_PATH'. Raise an error
+ unless stdout and stderr both match and the exit code is non-zero.
+ Every line of stderr must match the regex EXPECTED_STDERR.
+ """
+ expected_stderr += "|" + svntest.main.stack_trace_regexp
+
+ # The actions.run_and_verify_* methods are happy if one line of the error
+ # matches the regex, but we want to check that every line matches.
+ # So we will pass the stderr to svntest.verify.verify_outputs()
+ # ourselves, but as the 'actual_stdout' argument, that way each line of
+ # error must match the regex.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ expected_stdout, svntest.verify.AnyOutput,
+ 'merge', '--reintegrate',
+ src_url, tgt_path)
+ assert exit_code
+ svntest.verify.verify_outputs(
+ "Reintegrate failed but not in the way expected",
+ err, None,
+ expected_stderr, None,
+ True) # Match *all* lines
+
+def run_and_verify_reintegrate(tgt_dir, src_url,
+ output_tree,
+ mergeinfo_output_tree,
+ elision_output_tree,
+ disk_tree, status_tree, skip_tree,
+ expected_stderr = [],
+ check_props = True,
+ dry_run = True):
+ """Run 'svn merge --reintegrate SRC_URL TGT_DIR'. Raise an error if
+ there is nothing on stdout, anything on stderr, or a non-zero exit
+ code, or if the expected ERROR_RE_STRING or any of the given expected
+ trees don't match.
+ """
+ svntest.actions.run_and_verify_merge(
+ tgt_dir, None, None, src_url, None,
+ output_tree, mergeinfo_output_tree, elision_output_tree,
+ disk_tree, status_tree, skip_tree,
+ expected_stderr, check_props, dry_run,
+ '--reintegrate', tgt_dir)
+
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3640)
+def basic_reintegrate(sbox):
+ "basic merge --reintegrate support"
+
+ # Also includes test for issue #3640 'moved target breaks reintegrate merge'
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Make a change on the branch, to A/mu. Commit in r7.
+ svntest.main.file_write(sbox.ospath('A_COPY/mu'),
+ "Changed on the branch.")
+ expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.')
+
+ # Update the wcs.
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='7')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # Merge from trunk to branch (ie, r3-6), using normal cherry-harvest.
+ A_COPY_path = sbox.ospath('A_COPY')
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'B/E/beta' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ k_expected_status = wc.State(A_COPY_path, {
+ "B" : Item(status=' ', wc_rev=7),
+ "B/lambda" : Item(status=' ', wc_rev=7),
+ "B/E" : Item(status=' ', wc_rev=7),
+ "B/E/alpha" : Item(status=' ', wc_rev=7),
+ "B/E/beta" : Item(status='M ', wc_rev=7),
+ "B/F" : Item(status=' ', wc_rev=7),
+ "mu" : Item(status=' ', wc_rev=7),
+ "C" : Item(status=' ', wc_rev=7),
+ "D" : Item(status=' ', wc_rev=7),
+ "D/gamma" : Item(status=' ', wc_rev=7),
+ "D/G" : Item(status=' ', wc_rev=7),
+ "D/G/pi" : Item(status=' ', wc_rev=7),
+ "D/G/rho" : Item(status='M ', wc_rev=7),
+ "D/G/tau" : Item(status=' ', wc_rev=7),
+ "D/H" : Item(status=' ', wc_rev=7),
+ "D/H/chi" : Item(status=' ', wc_rev=7),
+ "D/H/omega" : Item(status='M ', wc_rev=7),
+ "D/H/psi" : Item(status='M ', wc_rev=7),
+ "" : Item(status=' M', wc_rev=7),
+ })
+ k_expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-7'}),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/F' : Item(),
+ 'mu' : Item("Changed on the branch."),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ k_expected_status,
+ expected_skip,
+ [], True)
+ expected_disk.tweak('A_COPY', props={SVN_PROP_MERGEINFO: '/A:2-7'})
+ expected_disk.tweak('A_COPY/B/E/beta', contents="New content")
+ expected_disk.tweak('A_COPY/D/G/rho', contents="New content")
+ expected_disk.tweak('A_COPY/D/H/omega', contents="New content")
+ expected_disk.tweak('A_COPY/D/H/psi', contents="New content")
+
+ # Commit the merge to branch (r8).
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/D/G/rho',
+ 'A_COPY/B/E/beta', 'A_COPY/D/H/omega', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update the wcs again.
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='8')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+
+ # *finally*, actually run merge --reintegrate in trunk with the
+ # branch URL. This should bring in the mu change and the tauprime
+ # change.
+ A_path = sbox.ospath('A')
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ k_expected_status = wc.State(A_path, {
+ "B" : Item(status=' ', wc_rev=8),
+ "B/lambda" : Item(status=' ', wc_rev=8),
+ "B/E" : Item(status=' ', wc_rev=8),
+ "B/E/alpha" : Item(status=' ', wc_rev=8),
+ "B/E/beta" : Item(status=' ', wc_rev=8),
+ "B/F" : Item(status=' ', wc_rev=8),
+ "mu" : Item(status='M ', wc_rev=8),
+ "C" : Item(status=' ', wc_rev=8),
+ "D" : Item(status=' ', wc_rev=8),
+ "D/gamma" : Item(status=' ', wc_rev=8),
+ "D/G" : Item(status=' ', wc_rev=8),
+ "D/G/pi" : Item(status=' ', wc_rev=8),
+ "D/G/rho" : Item(status=' ', wc_rev=8),
+ "D/G/tau" : Item(status=' ', wc_rev=8),
+ "D/H" : Item(status=' ', wc_rev=8),
+ "D/H/chi" : Item(status=' ', wc_rev=8),
+ "D/H/omega" : Item(status=' ', wc_rev=8),
+ "D/H/psi" : Item(status=' ', wc_rev=8),
+ "" : Item(status=' M', wc_rev=8),
+ })
+ k_expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A_COPY:2-8'})
+ expected_skip = wc.State(A_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ k_expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Test issue #3640:
+ #
+ # Revert the merge then move A to A_MOVED in r9. Repeat the merge, but
+ # targeting A_MOVED this time. This should work with almost the same
+ # results. The only differences being the inclusion of r9 in the
+ # mergeinfo and the A-->A_MOVED path difference.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 9.\n'],
+ [], 'move',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A_MOVED',
+ '-m', 'Copy A to A_MOVED')
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ A_MOVED_path = sbox.ospath('A_MOVED')
+ expected_output = wc.State(A_MOVED_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_MOVED_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_MOVED_path, {
+ })
+ expected_status = wc.State(A_MOVED_path, {
+ "B" : Item(status=' '),
+ "B/lambda" : Item(status=' '),
+ "B/E" : Item(status=' '),
+ "B/E/alpha" : Item(status=' '),
+ "B/E/beta" : Item(status=' '),
+ "B/F" : Item(status=' '),
+ "mu" : Item(status='M '),
+ "C" : Item(status=' '),
+ "D" : Item(status=' '),
+ "D/gamma" : Item(status=' '),
+ "D/G" : Item(status=' '),
+ "D/G/pi" : Item(status=' '),
+ "D/G/rho" : Item(status=' '),
+ "D/G/tau" : Item(status=' '),
+ "D/H" : Item(status=' '),
+ "D/H/chi" : Item(status=' '),
+ "D/H/omega" : Item(status=' '),
+ "D/H/psi" : Item(status=' '),
+ "" : Item(status=' M'),
+ })
+ expected_status.tweak(wc_rev=9)
+ k_expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A_COPY:2-9'})
+ expected_skip = wc.State(A_MOVED_path, {})
+ run_and_verify_reintegrate(A_MOVED_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_with_rename(sbox):
+ "merge --reintegrate with renamed file on branch"
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Make a change on the branch, to A/mu. Commit in r7.
+ svntest.main.file_write(sbox.ospath('A_COPY/mu'),
+ "Changed on the branch.")
+ expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.')
+
+ # Update the wcs.
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='7')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # Merge from trunk to branch (ie, r3-6), using normal cherry-harvest.
+ A_COPY_path = sbox.ospath('A_COPY')
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'B/E/beta' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ k_expected_status = wc.State(A_COPY_path, {
+ "B" : Item(status=' ', wc_rev=7),
+ "B/lambda" : Item(status=' ', wc_rev=7),
+ "B/E" : Item(status=' ', wc_rev=7),
+ "B/E/alpha" : Item(status=' ', wc_rev=7),
+ "B/E/beta" : Item(status='M ', wc_rev=7),
+ "B/F" : Item(status=' ', wc_rev=7),
+ "mu" : Item(status=' ', wc_rev=7),
+ "C" : Item(status=' ', wc_rev=7),
+ "D" : Item(status=' ', wc_rev=7),
+ "D/gamma" : Item(status=' ', wc_rev=7),
+ "D/G" : Item(status=' ', wc_rev=7),
+ "D/G/pi" : Item(status=' ', wc_rev=7),
+ "D/G/rho" : Item(status='M ', wc_rev=7),
+ "D/G/tau" : Item(status=' ', wc_rev=7),
+ "D/H" : Item(status=' ', wc_rev=7),
+ "D/H/chi" : Item(status=' ', wc_rev=7),
+ "D/H/omega" : Item(status='M ', wc_rev=7),
+ "D/H/psi" : Item(status='M ', wc_rev=7),
+ "" : Item(status=' M', wc_rev=7),
+ })
+ k_expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-7'}),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/F' : Item(),
+ 'mu' : Item("Changed on the branch."),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ k_expected_status,
+ expected_skip,
+ [], True)
+ expected_disk.tweak('A_COPY', props={SVN_PROP_MERGEINFO: '/A:2-7'})
+ expected_disk.tweak('A_COPY/B/E/beta', contents="New content")
+ expected_disk.tweak('A_COPY/D/G/rho', contents="New content")
+ expected_disk.tweak('A_COPY/D/H/omega', contents="New content")
+ expected_disk.tweak('A_COPY/D/H/psi', contents="New content")
+
+ # Commit the merge to branch (r8).
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/D/G/rho',
+ 'A_COPY/B/E/beta', 'A_COPY/D/H/omega', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+ # Update the wcs again.
+ #
+ # Note: this update had to be added because of r869016 (which was
+ # merged into the reintegrate branch in r869021). Without this
+ # update, the mergeinfo will not be inherited properly as part of
+ # the 'svn cp tau tauprime' step, and later (during the post-commit
+ # update, with the new expected_disk) we'll get an error like this:
+ #
+ # =============================================================
+ # Expected 'tauprime' and actual 'tauprime' in disk tree are different!
+ # =============================================================
+ # EXPECTED NODE TO BE:
+ # =============================================================
+ # * Node name: tauprime
+ # Path: A_COPY/D/G/tauprime
+ # Contents: This is the file 'tau'.
+ #
+ # Properties: {'svn:mergeinfo': '/A/D/G/tau:2-7'}
+ # Attributes: {}
+ # Children: N/A (node is a file)
+ # =============================================================
+ # ACTUAL NODE FOUND:
+ # =============================================================
+ # * Node name: tauprime
+ # Path: G/tauprime
+ # Contents: This is the file 'tau'.
+ #
+ # Properties: {'svn:mergeinfo': ''}
+ # Attributes: {}
+ # Children: N/A (node is a file)
+ #
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='8')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # Make another change on the branch: copy tau to tauprime. Commit
+ # in r9.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.repo_url + '/A_COPY/D/G/tau',
+ sbox.repo_url + '/A_COPY/D/G/tauprime',
+ '-m',
+ 'Repos to repos copy of tau to tauprime')
+
+ # Update the trunk (well, the whole wc) to get the copy above and since
+ # reintegrate really wants a clean wc.
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/G/tauprime' : Item(verb='Adding')
+ })
+ expected_output = wc.State(A_COPY_path, {
+ 'D/G/tauprime' : Item(status='A '),
+ })
+ expected_status.add({'A_COPY/D/G/tauprime': Item(status=' ', wc_rev=9)})
+ expected_disk.add({
+ 'A_COPY/D/G/tauprime' : Item(props={SVN_PROP_MERGEINFO: '/A/D/G/tau:2-7'},
+ contents="This is the file 'tau'.\n")
+ })
+ expected_status.tweak(wc_rev='9')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # *finally*, actually run merge --reintegrate in trunk with the
+ # branch URL. This should bring in the mu change and the tauprime
+ # change.
+ A_path = sbox.ospath('A')
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ 'D/G/tauprime' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ 'D/G/tauprime' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ k_expected_status = wc.State(A_path, {
+ "B" : Item(status=' ', wc_rev=9),
+ "B/lambda" : Item(status=' ', wc_rev=9),
+ "B/E" : Item(status=' ', wc_rev=9),
+ "B/E/alpha" : Item(status=' ', wc_rev=9),
+ "B/E/beta" : Item(status=' ', wc_rev=9),
+ "B/F" : Item(status=' ', wc_rev=9),
+ "mu" : Item(status='M ', wc_rev=9),
+ "C" : Item(status=' ', wc_rev=9),
+ "D" : Item(status=' ', wc_rev=9),
+ "D/gamma" : Item(status=' ', wc_rev=9),
+ "D/G" : Item(status=' ', wc_rev=9),
+ "D/G/pi" : Item(status=' ', wc_rev=9),
+ "D/G/rho" : Item(status=' ', wc_rev=9),
+ "D/G/tau" : Item(status=' ', wc_rev=9),
+ "D/G/tauprime" : Item(status='A ', wc_rev='-', copied='+'),
+ "D/H" : Item(status=' ', wc_rev=9),
+ "D/H/chi" : Item(status=' ', wc_rev=9),
+ "D/H/omega" : Item(status=' ', wc_rev=9),
+ "D/H/psi" : Item(status=' ', wc_rev=9),
+ "" : Item(status=' M', wc_rev=9),
+ })
+ k_expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A_COPY:2-9'})
+ k_expected_disk.add({
+ 'D/G/tauprime' : Item(props={SVN_PROP_MERGEINFO :
+ '/A/D/G/tau:2-7\n/A_COPY/D/G/tauprime:9'},
+ contents="This is the file 'tau'.\n")
+ })
+ expected_skip = wc.State(A_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ k_expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Finally, commit the result of the merge (r10).
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/tauprime' : Item(verb='Adding'),
+ 'A/mu' : Item(verb='Sending'),
+ 'A' : Item(verb='Sending'),
+ })
+ expected_status.add({
+ 'A/D/G/tauprime' : Item(status=' ', wc_rev=10),
+ })
+ expected_status.tweak('A', 'A/mu', wc_rev=10)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_branch_never_merged_to(sbox):
+ "merge --reintegrate on a never-updated branch"
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Make a change on the branch, to A_COPY/mu. Commit in r7.
+ svntest.main.file_write(sbox.ospath('A_COPY/mu'),
+ "Changed on the branch.")
+ expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.')
+
+ # Update the wcs.
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='7')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # Make another change on the branch: copy tau to tauprime. Commit
+ # in r8.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ os.path.join(wc_dir, 'A_COPY', 'D', 'G',
+ 'tau'),
+ os.path.join(wc_dir, 'A_COPY', 'D', 'G',
+ 'tauprime'))
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/G/tauprime' : Item(verb='Adding')
+ })
+ expected_status.add({'A_COPY/D/G/tauprime': Item(status=' ', wc_rev=8)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.add({
+ 'A_COPY/D/G/tauprime' : Item(contents="This is the file 'tau'.\n")
+ })
+
+ # Update the trunk (well, the whole wc) (since reintegrate really
+ # wants a clean wc).
+ expected_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='8')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ check_props=True)
+
+ # *finally*, actually run merge --reintegrate in trunk with the
+ # branch URL. This should bring in the mu change and the tauprime
+ # change.
+ A_path = sbox.ospath('A')
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ 'D/G/tauprime' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ k_expected_status = wc.State(A_path, {
+ "B" : Item(status=' ', wc_rev=8),
+ "B/lambda" : Item(status=' ', wc_rev=8),
+ "B/E" : Item(status=' ', wc_rev=8),
+ "B/E/alpha" : Item(status=' ', wc_rev=8),
+ "B/E/beta" : Item(status=' ', wc_rev=8),
+ "B/F" : Item(status=' ', wc_rev=8),
+ "mu" : Item(status='M ', wc_rev=8),
+ "C" : Item(status=' ', wc_rev=8),
+ "D" : Item(status=' ', wc_rev=8),
+ "D/gamma" : Item(status=' ', wc_rev=8),
+ "D/G" : Item(status=' ', wc_rev=8),
+ "D/G/pi" : Item(status=' ', wc_rev=8),
+ "D/G/rho" : Item(status=' ', wc_rev=8),
+ "D/G/tau" : Item(status=' ', wc_rev=8),
+ "D/G/tauprime" : Item(status='A ', wc_rev='-', copied='+'),
+ "D/H" : Item(status=' ', wc_rev=8),
+ "D/H/chi" : Item(status=' ', wc_rev=8),
+ "D/H/omega" : Item(status=' ', wc_rev=8),
+ "D/H/psi" : Item(status=' ', wc_rev=8),
+ "" : Item(status=' M', wc_rev=8),
+ })
+ k_expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-8'}),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/F' : Item(),
+ 'mu' : Item("Changed on the branch."),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/G/tauprime' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ })
+ expected_skip = wc.State(A_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ k_expected_disk,
+ k_expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Finally, commit the result of the merge (r9).
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/tauprime' : Item(verb='Adding'),
+ 'A/mu' : Item(verb='Sending'),
+ 'A' : Item(verb='Sending'),
+ })
+ expected_status.add({
+ 'A/D/G/tauprime' : Item(status=' ', wc_rev=9),
+ })
+ expected_status.tweak('A', 'A/mu', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_fail_on_modified_wc(sbox):
+ "merge --reintegrate should fail in modified wc"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ mu_path = os.path.join(A_path, "mu")
+ ignored_expected_disk, ignored_expected_status = set_up_branch(sbox)
+
+ # Do a 'sync' merge first so that the following merge really needs to be a
+ # reintegrate, so that an equivalent automatic merge would behave the same.
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit()
+
+ svntest.main.file_write(mu_path, "Changed on 'trunk' (the merge target).")
+ expected_skip = wc.State(wc_dir, {})
+ sbox.simple_update() # avoid mixed-revision error
+ run_and_verify_reintegrate(
+ A_path, sbox.repo_url + '/A_COPY', None, None, None,
+ None, None, expected_skip,
+ ".*Cannot merge into a working copy that has local modifications.*",
+ True, False)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_fail_on_mixed_rev_wc(sbox):
+ "merge --reintegrate should fail in mixed-rev wc"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ mu_path = os.path.join(A_path, "mu")
+ ignored_expected_disk, expected_status = set_up_branch(sbox)
+ # Make and commit a change, in order to get a mixed-rev wc.
+ svntest.main.file_write(mu_path, "Changed on 'trunk' (the merge target).")
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_skip = wc.State(wc_dir, {})
+ # Try merging into that same wc, expecting failure.
+ run_and_verify_reintegrate(
+ A_path, sbox.repo_url + '/A_COPY', None, None, None,
+ None, None, expected_skip,
+ ".*Cannot merge into mixed-revision working copy.*",
+ True, False)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_fail_on_switched_wc(sbox):
+ "merge --reintegrate should fail in switched wc"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ G_path = os.path.join(A_path, "D", "G")
+ switch_url = sbox.repo_url + "/A/D/H"
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Do a 'sync' merge first so that the following merge really needs to be a
+ # reintegrate, so that an equivalent automatic merge would behave the same.
+ expected_disk.tweak(
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/H/omega',
+ contents="New content")
+ expected_status.tweak(
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/H/omega',
+ 'A_COPY',
+ wc_rev=7)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit()
+
+ # Switch a subdir of the target.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/G/chi' : Item(status='A '),
+ 'A/D/G/psi' : Item(status='A '),
+ 'A/D/G/omega' : Item(status='A '),
+ })
+ expected_disk.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_disk.add({
+ 'A/D/G/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A/D/G/psi' : Item(contents="New content"),
+ 'A/D/G/omega' : Item(contents="New content"),
+ })
+ expected_status.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status.add({
+ 'A/D/G' : Item(status=' ', wc_rev=7, switched='S'),
+ 'A/D/G/chi' : Item(status=' ', wc_rev=7),
+ 'A/D/G/psi' : Item(status=' ', wc_rev=7),
+ 'A/D/G/omega' : Item(status=' ', wc_rev=7),
+ })
+ svntest.actions.run_and_verify_switch(wc_dir,
+ G_path,
+ switch_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+ sbox.simple_update() # avoid mixed-revision error
+ expected_skip = wc.State(wc_dir, {})
+ run_and_verify_reintegrate(
+ A_path, sbox.repo_url + '/A_COPY', None, None, None,
+ None, None, expected_skip,
+ ".*Cannot merge into a working copy with a switched subtree.*",
+ True, False)
+
+#----------------------------------------------------------------------
+# Test for issue #3603 'allow reintegrate merges into WCs with
+# missing subtrees'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3603)
+def reintegrate_on_shallow_wc(sbox):
+ "merge --reintegrate in shallow wc"
+
+ # Create a standard greek tree, branch A to A_COPY in r2.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox, branch_only = True)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A_D_path = sbox.ospath('A/D')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # r3 - Make a change on the A_COPY branch that will be
+ # reintegrated back to A.
+ svntest.main.file_write(mu_COPY_path, "branch work")
+ svntest.main.run_svn(None, 'commit', '-m',
+ 'Some work on the A_COPY branch', wc_dir)
+
+ # First try a reintegrate where the target WC has a shallow subtree
+ # that is not affected by the reintegrate. In this case we set the
+ # depth of A/D to empty. Since the only change made on the branch
+ # since the branch point is to A_COPY/mu, the reintegrate should
+ # simply work and update A/mu with the branch's contents.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth', 'empty', A_D_path)
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='M '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '), # Don't expect anything under D,
+ # its depth is empty!
+ })
+ expected_A_status.tweak(wc_rev=3)
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-3'}),
+ 'B' : Item(),
+ 'mu' : Item("branch work"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(), # Don't expect anything under D, its depth is empty!
+ })
+ expected_A_skip = wc.State(A_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], 1, 1)
+
+ # Now revert the reintegrate and make a second change on the
+ # branch in r4, but this time change a subtree that corresponds
+ # to the missing (shallow) portion of the source. The reintegrate
+ # should still succeed.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.main.file_write(psi_COPY_path, "more branch work")
+ svntest.main.run_svn(None, 'commit', '-m',
+ 'Some more work on the A_COPY branch', wc_dir)
+ # Reuse the same expectations as the prior merge, except for the mergeinfo
+ # on the target root that now includes the latest rev on the branch.
+ expected_mergeinfo_output.add({
+ 'D' : Item(status=' U')
+ })
+ expected_A_status.tweak('D', status=' M')
+ expected_A_disk.tweak('D', props={SVN_PROP_MERGEINFO : '/A_COPY/D:2-4*'})
+ # ... a depth-restricted item is skipped ...
+ expected_A_skip.add({
+ 'D/H' : Item(verb='Skipped missing target')
+ })
+ expected_output.add({
+ # Below the skip
+ 'D/H/psi' : Item(status=' ', treeconflict='U'),
+ })
+ # Currently this fails due to r1424469. For a full explanation see
+ # http://svn.haxx.se/dev/archive-2012-12/0472.shtml
+ # and http://svn.haxx.se/dev/archive-2012-12/0475.shtml
+ expected_A_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A_COPY:2-4'})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], 1, 1)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_fail_on_stale_source(sbox):
+ "merge --reintegrate should fail on stale source"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+ A_path = sbox.ospath('A')
+ mu_path = os.path.join(A_path, "mu")
+ svntest.main.file_append(mu_path, 'some text appended to mu\n')
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'a change to mu', mu_path)
+ # Unmix the revisions in the working copy.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+ # The merge --reintegrate succeeds but since there were no changes
+ # on A_COPY after it was branched the only result is updated mergeinfo
+ # on the reintegrate target.
+ expected_output = wc.State(A_path, {})
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=7)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-7'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\nsome text appended to mu\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_path, { })
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_file_with_space_in_its_path(sbox):
+ "merge a file with space in its path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ some_dir = sbox.ospath('some dir')
+ file1 = os.path.join(some_dir, "file1")
+ file2 = os.path.join(some_dir, "file2")
+
+ # Make r2.
+ os.mkdir(some_dir)
+ svntest.main.file_append(file1, "Initial text in the file.\n")
+ svntest.main.run_svn(None, "add", some_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r2", wc_dir)
+
+ # Make r3.
+ svntest.main.run_svn(None, "copy", file1, file2)
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r3", wc_dir)
+
+ # Make r4.
+ svntest.main.file_append(file2, "Next line of text in the file.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r4", wc_dir)
+
+ target_url = sbox.repo_url + '/some%20dir/file2'
+ run_reintegrate(target_url, file1)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_with_subtree_mergeinfo(sbox):
+ "merge --reintegrate with subtree mergeinfo"
+
+ # Create a standard greek tree, branch A to A_COPY in r2, A to A_COPY_2 in
+ # r3, A to A_COPY_3 in r4, and then make some changes under A in r5-8.
+ #
+ # A_COPY_3 4---------
+ # /
+ # A -1--------5-6-7-8-
+ # \ \
+ # A_COPY 2-\-----------
+ # \
+ # A_COPY_2 3---------
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox, False, 3)
+
+ # Some paths we'll care about
+ gamma_COPY_3_path = sbox.ospath('A_COPY_3/D/gamma')
+ D_path = sbox.ospath('A/D')
+ gamma_path = sbox.ospath('A/D/gamma')
+ mu_COPY_2_path = sbox.ospath('A_COPY_2/mu')
+ mu_path = sbox.ospath('A/mu')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ D_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ gamma_COPY_path = sbox.ospath('A_COPY/D/gamma')
+ gamma_moved_COPY_path = sbox.ospath('A_COPY/D/gamma_moved')
+ gamma_moved_path = sbox.ospath('A/D/gamma_moved')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ alpha_COPY_path = sbox.ospath('A_COPY/B/E/alpha')
+ A_path = sbox.ospath('A')
+
+ # Now set up a situation where we try to reintegrate A_COPY back to A but
+ # both of these paths have subtree mergeinfo. Iff the mergeinfo on A_COPY
+ # reflects that the same revisions have been applied across all of A_COPY,
+ # then the reintegrate merge should succeed. We'll try that case first.
+ #
+ # A_COPY_3 4-------[9]--
+ # / \
+ # / \
+ # A -1--------5-6-7-8---10-------------------WC--
+ # \ \ (D) \ /reint.
+ # \ \ (mu) \ /
+ # A_COPY 2-\--------------------12---13--14------
+ # \ /
+ # \ /
+ # A_COPY_2 3-------------[11]--
+ #
+ # Key: [#] = cherry-picked revision; (foo) = merge of subtree 'foo'
+ # Note: These diagrams show an overview and do not capture every detail.
+
+ # r9 - Make a text change to A_COPY_3/D/gamma
+ svntest.main.file_write(gamma_COPY_3_path, "New content")
+ expected_output = wc.State(wc_dir, {'A_COPY_3/D/gamma' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY_3/D/gamma', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r10 - Merge r9 from A_COPY_3/D to A/D, creating explicit subtree
+ # mergeinfo under A. For this and every subsequent merge we update the WC
+ # first to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+ expected_status.tweak(wc_rev=9)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[9]],
+ ['U ' + gamma_path + '\n',
+ ' U ' + D_path + '\n',]),
+ [], 'merge', '-c9', sbox.repo_url + '/A_COPY_3/D', D_path)
+ expected_output = wc.State(wc_dir,
+ {'A/D' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending')})
+ expected_status.tweak('A/D', 'A/D/gamma', wc_rev=10)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r11 - Make a text change to A_COPY_2/mu
+ svntest.main.file_write(mu_COPY_2_path, "New content")
+ expected_output = wc.State(wc_dir, {'A_COPY_2/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY_2/mu', wc_rev=11)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r12 - Merge r11 from A_COPY_2/mu to A_COPY/mu
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up',
+ wc_dir)
+ expected_status.tweak(wc_rev=11)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[11]],
+ ['U ' + mu_COPY_path + '\n',
+ ' U ' + mu_COPY_path + '\n',]),
+ [], 'merge', '-c11', sbox.repo_url + '/A_COPY_2/mu', mu_COPY_path)
+ expected_output = wc.State(wc_dir,
+ {'A_COPY/mu' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/mu', wc_rev=12)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r13 - Do a 'synch' cherry harvest merge of all available revisions
+ # from A to A_COPY
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(12), [], 'up',
+ wc_dir)
+ expected_status.tweak(wc_rev=12)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2,12]],
+ ['U ' + beta_COPY_path + '\n',
+ 'U ' + gamma_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' U ' + D_COPY_path + '\n',
+ ' G ' + D_COPY_path + '\n',]),
+ [], 'merge', sbox.repo_url + '/A', A_COPY_path)
+ expected_output = wc.State(wc_dir,
+ {'A_COPY' : Item(verb='Sending'),
+ #'A_COPY/mu' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/gamma' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY',
+ #'A_COPY/mu',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/omega',
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/gamma',
+ wc_rev=13)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r14 - Make a text change on A_COPY/B/E/alpha
+ svntest.main.file_write(alpha_COPY_path, "New content")
+ expected_output = wc.State(wc_dir, {'A_COPY/B/E/alpha' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/B/E/alpha', wc_rev=14)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now, reintegrate A_COPY to A. This should succeed.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up',
+ wc_dir)
+ expected_status.tweak(wc_rev=14)
+ expected_output = wc.State(A_path, {
+ 'B/E/alpha' : Item(status='U '),
+ 'mu' : Item(status='UU'),
+ 'D' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' G'),
+ 'D' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='MM'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status='M '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' M'),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_A_status.tweak(wc_rev=14)
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-14'}),
+ 'B' : Item(),
+ 'mu' : Item("New content",
+ props={SVN_PROP_MERGEINFO :
+ '/A_COPY/mu:2-14\n/A_COPY_2/mu:11'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("New content"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(props=
+ {SVN_PROP_MERGEINFO : '/A_COPY/D:2-14\n/A_COPY_3/D:9'}),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("New content"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_A_skip = wc.State(A_COPY_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], 1, 1)
+
+ # Make some more changes to A_COPY so that the same revisions have *not*
+ # been uniformly applied from A to A_COPY. In this case the reintegrate
+ # merge should fail, but should provide a helpful message as to where the
+ # problems are.
+ #
+ # A_COPY_3 4-------[9]--
+ # / \
+ # / \ [-8]___
+ # A -1---------5-6-7-8---10----------------\-------WC--
+ # \ \ (D) \ \ /reint.
+ # \ \ (mu) \ \ /
+ # A_COPY 2-\--------------------12---13--14--15--------
+ # \ / (D)
+ # \ /
+ # A_COPY_2 3-------------[11]--
+
+ # First revert the previous reintegrate merge
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', wc_dir)
+
+ # r15 - Reverse Merge r8 from A/D to A_COPY/D.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-8]],
+ ['U ' + omega_COPY_path + '\n',
+ ' U ' + D_COPY_path + '\n',]),
+ [], 'merge', '-c-8', sbox.repo_url + '/A/D', D_COPY_path)
+ expected_output = wc.State(wc_dir,
+ {'A_COPY/D' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending')})
+ expected_status.tweak('A_COPY/D', 'A_COPY/D/H/omega', wc_rev=15)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now reintegrate A_COPY back to A. Since A_COPY/D no longer has r8 merged
+ # to it from A, the merge should fail. Further we expect an error message
+ # that highlights the fact that A_COPY/D is the offending subtree.
+ #
+ # We want to know that the error provides specific information about the
+ # paths that are stopping --reintegrate from working.
+ run_reintegrate_expect_error(sbox.repo_url + '/A_COPY', A_path,
+ [],
+ "(svn: E195016: Reintegrate can only be used if "
+ "revisions 2 through 15 were previously "
+ "merged from .*/A to the reintegrate source, "
+ "but this is not the case:\n)"
+ "|( A_COPY/D\n)"
+ "|( Missing ranges: /A/D:8\n)"
+ "|( A_COPY/mu\n)"
+ "|( Missing ranges: /A/mu:2-12\n)"
+ "|(\n)")
+
+ # Test another common situation that can break reintegrate as a result
+ # of copies and moves:
+ #
+ # A) On our 'trunk' rename a subtree in such a way as the new
+ # subtree has explicit mergeinfo. Commit this rename as rev N.
+ #
+ # B) Synch merge the rename in A) to our 'branch' in rev N+1. The
+ # renamed subtree now has the same explicit mergeinfo on both
+ # the branch and trunk.
+ #
+ # C) Make some more changes on the renamed subtree in 'trunk' and
+ # commit in rev N+2.
+ #
+ # D) Synch merge the changes in C) from 'trunk' to 'branch' and commit in
+ # rev N+3. The renamed subtree on 'branch' now has additional explicit
+ # mergeinfo describing the synch merge from trunk@N+1 to trunk@N+2.
+ #
+ # E) Reintegrate 'branch' to 'trunk'.
+ #
+ # Step: A B C D E
+ # A_COPY_3 ---[9]--
+ # / \ (D/g.->
+ # / \ [-8]___ D/g.m.) (D/g.m.)
+ # A ------------10----------------\------16-------18--------WC
+ # \\ (D) \ \ \ \ /reint.
+ # \\ (mu) \ \ \ \ /
+ # A_COPY -\--------------12---13--14--15-------17-------19------
+ # \ / (D)
+ # \ /
+ # A_COPY_2 --------[11]--
+
+ # r16 - A) REPOS-to-REPOS rename of A/D/gamma to A/D/gamma_moved. Since
+ # r874258 WC-to-WC moves won't create mergeinfo on the dest if the source
+ # doesn't have any. So do a repos-to-repos move so explicit mergeinfo
+ # *is* created on the destination.
+ svntest.actions.run_and_verify_svn(None,[], 'move',
+ sbox.repo_url + '/A/D/gamma',
+ sbox.repo_url + '/A/D/gamma_moved',
+ '-m', 'REPOS-to-REPOS move'
+ )
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_status.tweak(wc_rev=16)
+ expected_status.remove('A/D/gamma')
+ expected_status.add({'A/D/gamma_moved' : Item(status=' ', wc_rev=16)})
+
+ # Why is gamma_moved notified as ' G' rather than ' U'? It was
+ # added by the merge and there is only a single editor drive, so
+ # how can any prop changes be merged to it? The answer is that
+ # the merge code does some quiet housekeeping, merging gamma_moved's
+ # inherited mergeinfo into its incoming mergeinfo, see
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=4309
+ # This test is not covering issue #4309 so we let the current
+ # behavior pass.
+ # r17 - B) Synch merge from A to A_COPY
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[8], [13,16], [2,16]],
+ ['U ' + omega_COPY_path + '\n',
+ 'A ' + gamma_moved_COPY_path + '\n',
+ 'D ' + gamma_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' U ' + D_COPY_path + '\n',
+ ' G ' + gamma_moved_COPY_path + '\n']),
+ [], 'merge', sbox.repo_url + '/A', A_COPY_path)
+ expected_output = wc.State(
+ wc_dir,
+ {'A_COPY' : Item(verb='Sending'), # Mergeinfo update
+ 'A_COPY/D' : Item(verb='Sending'), # Mergeinfo update
+ 'A_COPY/D/gamma' : Item(verb='Deleting'),
+ 'A_COPY/D/gamma_moved' : Item(verb='Adding'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'), # Redoing r15's
+ # reverse merge of r8.
+ })
+ expected_status.remove('A_COPY/D/gamma')
+
+ expected_status.tweak('A_COPY',
+ 'A_COPY/D',
+ 'A_COPY/D/H/omega',
+ wc_rev=17)
+ expected_status.add({'A_COPY/D/gamma_moved' : Item(status=' ', wc_rev=17)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r18 - C) Text mod to A/D/gamma_moved
+ svntest.main.file_write(gamma_moved_path, "Even newer content")
+ expected_output = wc.State(wc_dir, {'A/D/gamma_moved' : Item(verb='Sending')})
+ expected_status.tweak('A/D/gamma_moved', wc_rev=18)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r19 - D) Synch merge from A to A_COPY
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[17,18], [2,18]],
+ ['U ' + gamma_moved_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' U ' + D_COPY_path + '\n',
+ ' U ' + gamma_moved_COPY_path + '\n']),
+ [], 'merge', '--allow-mixed-revisions', sbox.repo_url + '/A', A_COPY_path)
+ expected_output = wc.State(
+ wc_dir,
+ {'A_COPY' : Item(verb='Sending'), # Mergeinfo update
+ 'A_COPY/D' : Item(verb='Sending'), # Mergeinfo update
+ 'A_COPY/D/gamma_moved' : Item(verb='Sending'), # Text change
+ })
+ expected_status.tweak('A_COPY',
+ 'A_COPY/D',
+ 'A_COPY/D/gamma_moved',
+ wc_rev=19)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Reintegrate A_COPY to A, this should work since
+ # A_COPY/D/gamma_moved's natural history,
+ #
+ # /A/D/gamma:1-15
+ # /A/D/gamma_moved:16
+ # /A_COPY/D/gamma_moved:17-19
+ #
+ # shows that it is fully synched up with trunk.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(19), [], 'up',
+ wc_dir)
+ expected_output = wc.State(A_path, {
+ 'B/E/alpha' : Item(status='U '),
+ 'mu' : Item(status='UU'),
+ 'D' : Item(status=' U'),
+ 'D/gamma_moved' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' G'),
+ 'D' : Item(status=' U'),
+ 'D/gamma_moved' : Item(status=' G'), # More issue #4309 (see above)
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='MM'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status='M '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' M'),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma_moved' : Item(status=' M'),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_A_status.tweak(wc_rev=19)
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-19'}),
+ 'B' : Item(),
+ 'mu' : Item("New content",
+ props={SVN_PROP_MERGEINFO :
+ '/A_COPY/mu:2-19\n/A_COPY_2/mu:11'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("New content"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(props={SVN_PROP_MERGEINFO :
+ '/A_COPY/D:2-19\n/A_COPY_3/D:9'}),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ # What's with all this mergeinfo?
+ #
+ # '/A/D/gamma_moved:2-7,9-12' - Incoming from the merge source. Yes,
+ # this mergeinfo describes non-existent path-revs, this is the effect
+ # of issue #3669 'inheritance can result in mergeinfo describing
+ # nonexistent sources', but there is already a test for that issue so
+ # we tolerate it here.
+ #
+ # '/A_COPY/D/gamma_moved:17-19' - Describes the merge performed.
+ #
+ # '/A_COPY_3/D/gamma:9' - Explicit prior to the merge.
+ #
+ #'/A_COPY_3/D/gamma_moved:9' - Incoming from the merge source.
+ # For the curious, this was originally created in r17 when we merged
+ # ^/A to A_COPY. This merge added A_COPY/D/gamma_moved, which had
+ # explicit mergeinfo and due to issue #4309 'wrong notification and
+ # bogus mergeinfo during merge which adds subtree with mergeinfo'
+ # this file inherited this bogus mergeinfo from A_COPY/D. Yes, this
+ # is all quite ugly as the intersection or multiple known issues
+ # is likely to be. However, given that none of this mergeinfo is
+ # particularly harmful and that this test is *not* about issues #3669
+ # or #4309, we are tolerting it.
+ 'D/gamma_moved' : Item(
+ "Even newer content", props={SVN_PROP_MERGEINFO :
+ '/A/D/gamma_moved:2-7,9-12\n'
+ '/A_COPY/D/gamma_moved:17-19\n'
+ '/A_COPY_3/D/gamma:9\n'
+ '/A_COPY_3/D/gamma_moved:9'}),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_A_skip = wc.State(A_COPY_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], 1, 1)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def multiple_reintegrates_from_the_same_branch(sbox):
+ "multiple reintegrates create self-referential"
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ mu_path = sbox.ospath('A/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ Feature_branch_path = sbox.ospath('A_FEATURE_BRANCH')
+ Feature_beta_path = os.path.join(wc_dir, "A_FEATURE_BRANCH", "B", "E",
+ "beta")
+
+ # Create a feature branch and do multiple reintegrates from the branch
+ # without deleting and recreating it. We don't recommend doing this,
+ # but regardless, it shouldn't create self-referential mergeinfo on
+ # the reintegrate target.
+ #
+ # r7 - Create the feature branch.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A_path, Feature_branch_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'Make a feature branch',
+ wc_dir)
+
+ # r8 - Make a change under 'A'.
+ svntest.main.file_write(mu_path, "New trunk content.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "A text change under 'A'",
+ wc_dir)
+
+ # r9 - Make a change on the feature branch.
+ svntest.main.file_write(Feature_beta_path, "New branch content.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "A text change on the feature branch",
+ wc_dir)
+
+ # r10 - Sync merge all changes from 'A' to the feature branch.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A',
+ Feature_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "Sync merge 'A' to feature branch",
+ wc_dir)
+
+ # r11 - Reintegrate the feature branch back to 'A'.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ run_reintegrate(sbox.repo_url + '/A_FEATURE_BRANCH', A_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "Reintegrate feature branch back to 'A'",
+ wc_dir)
+
+ # r12 - Do a --record-only merge from 'A' to the feature branch so we
+ # don't try to merge r11 from trunk during the next sync merge.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c11',
+ '--record-only',
+ sbox.repo_url + '/A',
+ Feature_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "Sync merge 'A' to feature branch",
+ wc_dir)
+
+ # r13 - Make another change on the feature branch.
+ svntest.main.file_write(Feature_beta_path, "Even newer branch content.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "Different text on the feature branch",
+ wc_dir)
+
+ # r14 - Sync merge all changes from 'A' to the feature branch in
+ # preparation for a second reintegrate from this branch.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A',
+ Feature_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "2nd Sync merge 'A' to feature branch",
+ wc_dir)
+
+ # r15 - Reintegrate the feature branch back to 'A' a second time.
+ # No self-referential mergeinfo should be applied on 'A'.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_path, {
+ #'' : Item(status=' U'), #<-- no self-referential mergeinfo applied!
+ 'B/E/beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=14)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO :
+ # Prior to r????? we'd get this
+ # self-referential mergeinfo:
+ #'/A:2-6\n/A_FEATURE_BRANCH:7-14'}),
+ '/A_FEATURE_BRANCH:7-14'}),
+ 'B' : Item(),
+ 'mu' : Item("New trunk content.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("Even newer branch content.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_path, { })
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_FEATURE_BRANCH',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], 1, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "2nd Reintegrate feature branch back to 'A'",
+ wc_dir)
+
+ # Demonstrate the danger of any self-referential mergeinfo on trunk.
+ #
+ # Merge all available revisions except r3 from 'A' to 'A_COPY'.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-r3:HEAD',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ "Merge -r3:HEAD from 'A' to 'A_COPY'",
+ wc_dir)
+ # No self-referential mergeinfo should have been carried on 'A_COPY' from
+ # 'A' that would prevent the following merge from being operative.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2,3],[2,16]],
+ ['U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', '--allow-mixed-revisions', sbox.repo_url + '/A', A_COPY_path)
+
+#----------------------------------------------------------------------
+# Test for a reintegrate bug which can occur when the merge source
+# has mergeinfo that explicitly describes common history with the reintegrate
+# target, see http://svn.haxx.se/dev/archive-2009-12/0338.shtml
+#
+# Also tests Issue #3591 'reintegrate merges update subtree mergeinfo
+# unconditionally'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3591)
+def reintegrate_with_self_referential_mergeinfo(sbox):
+ "source has target's history as explicit mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make some changes under 'A' in r2-5.
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=0)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A2_path = sbox.ospath('A2')
+ A2_B_path = sbox.ospath('A2/B')
+ A2_1_path = sbox.ospath('A2.1')
+ A2_1_mu_path = sbox.ospath('A2.1/mu')
+
+ # r6 Copy A to A2 and then manually set some self-referential mergeinfo on
+ # A2/B and A2.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(5), [],
+ 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A_path, A2_path)
+ # /A:3 describes A2's natural history, a.k.a. it's implicit mergeinfo, so
+ # it is self-referential. Same for /A/B:4 and A2/B. Normally this is
+ # redundant but not harmful.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:mergeinfo', '/A:3', A2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:mergeinfo', '/A/B:4', A2_B_path)
+ svntest.actions.run_and_verify_svn(
+ None, [], 'ci', '-m',
+ 'copy A to A2 and set some self-referential mergeinfo on the latter.',
+ wc_dir)
+
+ # r7 Copy A2 to A2.1
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A2_path, A2_1_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'copy A2to A2.1.', wc_dir)
+
+ # r8 Make a change on A2.1/mu
+ svntest.main.file_write(A2_1_mu_path, 'New A2.1 stuff')
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Work done on the A2.1 branch.',
+ wc_dir)
+
+ # Update to uniform revision and reintegrate A2.1 back to A2.
+ # Note that the mergeinfo on A2/B is not changed by the reintegration
+ # and so is not expected to by updated to describe the merge.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+ expected_output = wc.State(A2_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A2_path, {
+ })
+ expected_status = wc.State(A2_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='M '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=8)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3\n/A2.1:7-8'}),
+ 'B' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'mu' : Item("New A2.1 stuff"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A2_path, { })
+ # Previously failed with this error:
+ #
+ # svn merge ^/A2.1" A2 --reintegrate
+ # ..\..\..\subversion\svn\merge-cmd.c:349: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:9219: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\ra.c:728: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\mergeinfo.c:733: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\ra.c:526: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\rev_hunt.c:908: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\rev_hunt.c:607: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:2886: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:669: (apr_err=160013)
+ # svn: File not found: revision 4, path '/A2'
+ run_and_verify_reintegrate(A2_path,
+ sbox.repo_url + '/A2.1',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], 1, 0)
+
+#----------------------------------------------------------------------
+# Test for issue #3577 '1.7 subtree mergeinfo recording breaks reintegrate'
+# and issue #4329 'automatic merge uses reintegrate type merge if source is
+# fully synced'.
+@Issue(3577,4329)
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_with_subtree_merges(sbox):
+ "reintegrate with prior subtree merges to source"
+
+ # Create a standard greek tree, branch A to A_COPY in r2, and make
+ # some changes under A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ psi_path = sbox.ospath('A/D/H/psi')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ B_COPY_path = sbox.ospath('A_COPY/B')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+
+ # r7 - Make a change on the A_COPY branch that will be
+ # reintegrated back to A.
+ svntest.main.file_write(mu_COPY_path, "branch work")
+ svntest.main.run_svn(None, 'commit', '-m',
+ 'Some work on the A_COPY branch', wc_dir)
+
+ # Update the WC to a uniform revision, then merge all of the changes
+ # from A to A_COPY, but do it via subtree merges so the mergeinfo
+ # record of the merges insn't neatly reflected in the root of the
+ # branch. Commit the merge as r8.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5',
+ sbox.repo_url + '/A/B',
+ B_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c4',
+ sbox.repo_url + '/A/D/G/rho',
+ rho_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c3',
+ sbox.repo_url + '/A/D/H',
+ H_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c6',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit', '-m',
+ 'Merge everything from A to A_COPY',
+ wc_dir)
+
+ # Now update the WC and try to reintegrate. Since we really have merged
+ # everything from A to A_COPY, even though it was done via subtree merges,
+ # the reintegrate should succeed. Previously it failed because the naive
+ # interpretation of the mergeinfo on A_COPY didn't reflect that it was
+ # fully synced with A, resulting in this error:
+ #
+ # svn merge ^/A_COPY A --reintegrate
+ # ..\..\..\subversion\svn\merge-cmd.c:358: (apr_err=195016)
+ # ..\..\..\subversion\libsvn_client\merge.c:9318: (apr_err=195016)
+ # svn: Reintegrate can only be used if revisions 2 through 7 were
+ # previously merged from file:///C%3A/SVN/src-trunk-2/Debug/subversion
+ # /tests/cmdline/svn-test-work/repositories/merge_tests-142/A to the
+ # reintegrate source, but this is not the case:
+ # A_COPY
+ # Missing ranges: /A:2-5
+ # A_COPY/B
+ # Missing ranges: /A/B:2-4,6
+ # A_COPY/D/G/rho
+ # Missing ranges: /A/D/G/rho:2-3,5-6
+ # A_COPY/D/H
+ # Missing ranges: /A/D/H:2,4-5
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='M '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_A_status.tweak(wc_rev=8)
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-8'}),
+ 'B' : Item(),
+ 'mu' : Item("branch work"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_A_skip = wc.State(A_COPY_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], 1, 1)
+
+ # Test issue #4329. Revert previous merge and commit a new edit to
+ # A/D/H/psi. Attempt the same merge without the --reintegrate option.
+ # It should succeed because the automatic merge code should detect that
+ # a reintegrate-style merge is required, that merge should succeed and
+ # there should be not conflict on A/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.main.file_write(psi_path, "Non-conflicting trunk edit.\n")
+ svntest.main.run_svn(None, 'commit', '-m',
+ 'An edit on trunk prior to reintegrate.', wc_dir)
+ sbox.simple_update()
+ expected_A_status.tweak(wc_rev=9)
+ expected_A_disk.tweak('', props={SVN_PROP_MERGEINFO: '/A_COPY:2-9'})
+ expected_A_disk.tweak('D/H/psi', contents='Non-conflicting trunk edit.\n')
+ svntest.actions.run_and_verify_merge(A_path, None, None,
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk, expected_A_status,
+ expected_A_skip,
+ [], True, False, A_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3654 'added subtrees with mergeinfo break reintegrate'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3654)
+def added_subtrees_with_mergeinfo_break_reintegrate(sbox):
+ "added subtrees with mergeinfo break reintegrate"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ nu_path = sbox.ospath('A/C/nu')
+ mu_path = sbox.ospath('A/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ lambda_COPY_path = sbox.ospath('A_COPY/B/lambda')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+ nu_COPY_2_path = sbox.ospath('A_COPY_2/C/nu')
+
+ # Branch A@1 to A_COPY and A_COPY_2 in r2 and r3 respectively.
+ # Make some changes under 'A' in r4-7.
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2)
+
+ # r8 - Add a new file A_COPY_2/C/nu.
+ svntest.main.file_write(nu_COPY_2_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Add new file in A_COPY_2 branch',
+ wc_dir)
+
+
+ # r9 - Cyclic cherry pick merge r8 from A_COPY_2 back to A.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', '-c', '8',
+ sbox.repo_url + '/A_COPY_2',
+ A_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Merge r8 from A_COPY_2 to A.',
+ wc_dir)
+
+ # r10 - Make an edit to A_COPY_2/C/nu.
+ svntest.main.file_write(nu_COPY_2_path, "A_COPY_2 edit to file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Edit new file on A_COPY_2 branch',
+ wc_dir)
+
+ # r11 - Cyclic subtree cherry pick merge r10 from A_COPY_2/C/nu
+ # back to A/C/nu.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', '-c', '10',
+ sbox.repo_url + '/A_COPY_2/C/nu',
+ nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r8 from A_COPY_2/C/nu to A/C/nu.',
+ wc_dir)
+
+ # r12 - Edit under A_COPY.
+ svntest.main.file_write(mu_path, "mu edits on A_COPY.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Work on A_COPY branch.',
+ wc_dir)
+
+ # r13 - Sync merge A to A_COPY in preparation for reintegrate.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Prep for reintegrate: Sync A to A_COPY.',
+ wc_dir)
+
+ # r14 - Reintegrate A_COPY to A.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ run_reintegrate(sbox.repo_url + '/A_COPY', A_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Reintegrate A_COPY to A.',
+ wc_dir)
+
+ # r15 - Delete A_COPY.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'delete', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Delete A_COPY branch', wc_dir)
+
+ # r16 - Create new A_COPY from A@HEAD=15.
+ #
+ # Update so we copy HEAD:
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'copy', A_path, A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Create new A_COPY branch from A', wc_dir)
+
+ # r17 - Unrelated edits under both A and A_COPY.
+ svntest.main.file_write(nu_path, "Trunk work on nu.\n")
+ svntest.main.file_write(lambda_COPY_path, "lambda edit on A_COPY.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Unrelated edits on A and A_COPY branch.',
+ wc_dir)
+
+ # r18 - Sync A to A_COPY in preparation for another reintegrate.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Prep for reintegrate: Sync A to A_COPY.',
+ wc_dir)
+
+ # Reintegrate A_COPY back to A. We just synced A_COPY with A, so this
+ # should work. The only text change should be the change made to
+ # A_COPY/B/lambda in r17 after the new A_COPY was created.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ 'B/lambda' : Item(status='U '),
+ 'C/nu' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ 'C/nu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status='M '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'C/nu' : Item(status=' M'),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=18)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO :
+ '/A_COPY:2-13,16-18\n'
+ # ^ ^
+ # | |
+ # from _| |
+ # 1st |
+ # reintegrate |
+ # |
+ # from this reintegrate
+ #
+ '/A_COPY_2:8'}), # <-- From cyclic merge in r9
+ 'B' : Item(),
+ 'mu' : Item("mu edits on A_COPY.\n"), # From earlier reintegrate.
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("lambda edit on A_COPY.\n"), # From this reintegrate.
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'C/nu' : Item("Trunk work on nu.\n",
+ props={SVN_PROP_MERGEINFO :
+ '/A_COPY/C/nu:13,16-18\n'
+ '/A_COPY_2/C/nu:10'}), # <-- From cyclic
+ # merge in r11
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], 1, 1)
+
+#----------------------------------------------------------------------
+# Test for issue #3648 '2-URL merges incorrectly reverse-merge mergeinfo
+# for merge target'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3648)
+def two_URL_merge_removes_valid_mergeinfo_from_target(sbox):
+ "2-URL merge removes valid mergeinfo from target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ lambda_COPY_path = sbox.ospath('A_COPY/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+
+ # Branch A@1 to A_COPY r2
+ # Branch A@1 to A_COPY_2 in r3.
+ # Make some changes under 'A' in r4-7.
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2)
+
+ # r8 - A simple text edit on the A_COPY branch.
+ svntest.main.file_write(lambda_COPY_path, "Edit on 'branch 1'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', "Work on 'branch 1'.",
+ wc_dir)
+
+ # r9 - Sync the A_COPY branch with A up the HEAD (r8). Now A_COPY
+ # differs from A only by the change made in r8 and by the mergeinfo
+ # '/A:2-8' on A_COPY which was set to describe the merge.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Sync A to A_COPY.',
+ wc_dir)
+
+ # r10 - A simple text edit on our "trunk" A.
+ svntest.main.file_write(mu_path, "Edit on 'trunk'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', "Work on 'trunk'",
+ wc_dir)
+
+ # r11 - Sync the A_COPY_2 branch with A up to HEAD (r10).
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'merge', sbox.repo_url + '/A',
+ A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Sync A to A_COPY_2.',
+ wc_dir)
+
+ # Confirm that the mergeinfo on each branch is what we expect.
+ svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-8\n'],
+ [], 'pg', SVN_PROP_MERGEINFO,
+ '-R', A_COPY_path)
+ svntest.actions.run_and_verify_svn([A_COPY_2_path + ' - /A:3-10\n'],
+ [], 'pg', SVN_PROP_MERGEINFO,
+ '-R', A_COPY_2_path)
+
+ # Now say we want to apply the changes made on the first branch (A_COPY)
+ # to the second branch (A_COPY_2). One way to do this is a 2-URL merge
+ # between A at the revision last synced to A_COPY and A_COPY_2 at HEAD (r11),
+ # i.e.:
+ #
+ # svn merge ^/A@8 ^/A_COPY@11 A_COPY_2_WC
+ #
+ # Recall from the note on r9 that this diff is simply the one text change
+ # made on branch 1 and some mergeinfo:
+ #
+ # >svn diff ^/A@8 ^/A_COPY@11
+ # Index: B/lambda
+ # ===================================================================
+ # --- B/lambda (.../A) (revision 8)
+ # +++ B/lambda (.../A_COPY) (revision 11)
+ # @@ -1 +1 @@
+ # -This is the file 'lambda'.
+ # +Edit on 'branch 1'.
+ #
+ # Property changes on: .
+ # ___________________________________________________________________
+ # Added: svn:mergeinfo
+ # Merged /A:r2-8
+ #
+ # The mergeinfo diff is already represented in A_COPY_2's mergeinfo, so the
+ # result of the merge should be the text change to lambda and the addition
+ # of mergeinfo showing that the history of A_COPY is now part of A_COPY_2,
+ # i.e. '/A_COPY:2-11'
+ #
+ # Before issue #3648 was fixed this test failed because the valid mergeinfo
+ # '/A:r3-8' on A_COPY_2 was removed by the merge.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' G'),
+ 'B/lambda' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status='M '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=11)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO :
+ '/A:3-10\n/A_COPY:2-11'}),
+ 'B' : Item(),
+ 'mu' : Item("Edit on 'trunk'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("Edit on 'branch 1'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, 8, 11,
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+# Test for issue #3867 'reintegrate merges create mergeinfo for
+# non-existent paths'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3867)
+def reintegrate_creates_bogus_mergeinfo(sbox):
+ "reintegrate creates bogus mergeinfo"
+
+ sbox.build()
+ wc_dir=sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ lambda_path = sbox.ospath('A/B/lambda')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+ A_path = sbox.ospath('A')
+ A_path_1 = sbox.ospath('A@1')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_psi_path = sbox.ospath('A_COPY/D/H/psi')
+ A_COPY_url = sbox.repo_url + "/A_COPY"
+
+ # Make 2 commits under /A pushing the repo to rev3
+
+ svntest.main.file_write(mu_path, "New content.\n")
+ svntest.main.run_svn(None, "ci", "-m", "simple text edit", wc_dir)
+ svntest.main.file_write(lambda_path, "New content.\n")
+ svntest.main.run_svn(None, "ci", "-m", "simple text edit", wc_dir)
+
+ # Branch A@1 as A_COPY in revision 4
+
+ svntest.main.run_svn(None, "cp", A_path_1, A_COPY_path)
+ svntest.main.run_svn(None, "ci", "-m", "create a branch", wc_dir)
+
+ # Make a text edit on the branch pushing the repo to r5
+ svntest.main.file_write(A_COPY_psi_path, "Branch edit.\n")
+ svntest.main.run_svn(None, "ci", "-m", "branch edit", wc_dir)
+
+ # Sync the A_COPY with A in preparation for reintegrate and commit as r6.
+ svntest.main.run_svn(None, "up", wc_dir)
+ svntest.main.run_svn(None, "merge", sbox.repo_url + "/A", A_COPY_path)
+ svntest.main.run_svn(None, "ci", "-m", "sync A_COPY with A", wc_dir)
+
+ # Update the working copy to allow the merge
+ svntest.main.run_svn(None, "up", wc_dir)
+
+ # Reintegrate A_COPY to A. The resulting merginfo on A should be
+ # /A_COPY:4-6
+ expected_output = wc.State(A_path, {
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO :
+ '/A_COPY:4-6'}),
+ 'B' : Item(),
+ 'mu' : Item("New content.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("New content.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("Branch edit.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+
+ run_and_verify_reintegrate(A_path,
+ A_COPY_url,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk, None, expected_skip,
+ [],
+ 1, 1)
+
+
+#----------------------------------------------------------------------
+# Test for regression on 1.6.x branch, merge fails when source without
+# subtree mergeinfo is reintegrated into a target with subtree
+# mergeinfo. Deliberately written in a style that works with the 1.6
+# testsuite.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3957)
+def no_source_subtree_mergeinfo(sbox):
+ "source without subtree mergeinfo"
+
+ sbox.build()
+ wc_dir=sbox.wc_dir
+
+ svntest.main.file_write(sbox.ospath('A/B/E/alpha'),
+ 'AAA\n' +
+ 'X\n' +
+ 'BBB\n' +
+ 'Y\n' +
+ 'CCC\n')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create branch-1
+ svntest.main.run_svn(None, 'copy',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B1'))
+ sbox.simple_commit()
+
+ # Create branch-1
+ svntest.main.run_svn(None, 'copy',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/B2'))
+ sbox.simple_commit()
+
+ # Change on trunk
+ svntest.main.file_write(sbox.ospath('A/B/E/alpha'),
+ 'AAAxx\n' +
+ 'X\n' +
+ 'BBB\n' +
+ 'Y\n' +
+ 'CCC\n')
+ sbox.simple_commit()
+
+ # Change on branch-1
+ svntest.main.file_write(sbox.ospath('A/B1/E/alpha'),
+ 'AAA\n' +
+ 'X\n' +
+ 'BBBxx\n' +
+ 'Y\n' +
+ 'CCC\n')
+ sbox.simple_commit()
+
+ # Change on branch-2
+ svntest.main.file_write(sbox.ospath('A/B2/E/alpha'),
+ 'AAA\n' +
+ 'X\n' +
+ 'BBB\n' +
+ 'Y\n' +
+ 'CCCxx\n')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Merge trunk to branch-1
+ # svntest.main.run_svn(None, 'merge', '^/A/B', sbox.ospath('A/B1'))
+ A_B1 = sbox.ospath('A/B1')
+ expected_output = wc.State(A_B1, {
+ 'E/alpha' : Item(status='U '),
+ })
+ expected_skip = wc.State(A_B1, { })
+ svntest.actions.run_and_verify_merge(A_B1, None, None, '^/A/B', None,
+ expected_output, None, None, None, None,
+ expected_skip, [])
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Reintegrate branch-1 subtree to trunk subtree
+ run_reintegrate('^/A/B1/E', sbox.ospath('A/B/E'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Merge trunk to branch-2
+ #svntest.main.run_svn(None, 'merge', '^/A/B', sbox.ospath('A/B2'))
+ A_B2 = sbox.ospath('A/B2')
+ expected_output = wc.State(A_B2, {
+ 'E' : Item(status=' U'),
+ 'E/alpha' : Item(status='U '),
+ })
+ expected_skip = wc.State(A_B1, { })
+ svntest.actions.run_and_verify_merge(A_B2, None, None, '^/A/B', None,
+ expected_output, None, None, None, None,
+ expected_skip, [])
+ sbox.simple_commit()
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+ # Reverse merge branch-1 subtree to branch-2 subtree, this removes
+ # the subtree mergeinfo from branch 2
+ #svntest.main.run_svn(None, 'merge', '-r8:2',
+ # '^/A/B1/E', sbox.ospath('A/B2/E'))
+ A_B2_E = sbox.ospath('A/B2/E')
+ expected_output = wc.State(A_B2_E, {
+ 'alpha' : Item(status='U '),
+ })
+ expected_skip = wc.State(A_B2_E, { })
+ svntest.actions.run_and_verify_merge(A_B2_E, 8, 2, '^/A/B1/E', None,
+ expected_output, None, None, None, None,
+ expected_skip, [])
+ sbox.simple_commit()
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+ # Verify that merge results in no subtree mergeinfo
+ expected_stderr = '.*W200017: Property.*not found'
+ svntest.actions.run_and_verify_svn([], expected_stderr,
+ 'propget', 'svn:mergeinfo',
+ sbox.repo_url + '/A/B2/E')
+
+ # Merge trunk to branch-2
+ svntest.main.run_svn(None, 'merge', '^/A/B', sbox.ospath('A/B2'))
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Verify that there is still no subtree mergeinfo
+ svntest.actions.run_and_verify_svn([], expected_stderr,
+ 'propget', 'svn:mergeinfo',
+ sbox.repo_url + '/A/B2/E')
+
+ # Reintegrate branch-2 to trunk, this fails in 1.6.x from 1.6.13.
+ # The error message states revisions /A/B/E:3-11 are missing from
+ # /A/B2/E and yet the mergeinfo on /A/B2 is /A/B:3-11 and /A/B2/E
+ # has no mergeinfo.
+ expected_output = wc.State(sbox.ospath('A/B'), {
+ 'E' : Item(status=' U'),
+ 'E/alpha' : Item(status='U '),
+ })
+ expected_mergeinfo = wc.State(sbox.ospath('A/B'), {
+ '' : Item(status=' U'),
+ })
+ expected_elision = wc.State(sbox.ospath('A/B'), {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B2:4-12'}),
+ 'E' : Item(),
+ 'E/alpha' : Item("AAA\n" +
+ "X\n" +
+ "BBB\n" +
+ "Y\n" +
+ "CCCxx\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_skip = wc.State(sbox.ospath('A/B'), {
+ })
+ run_and_verify_reintegrate(sbox.ospath('A/B'),
+ '^/A/B2',
+ expected_output, expected_mergeinfo,
+ expected_elision, expected_disk,
+ None, expected_skip,
+ [],
+ 1, 1)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3961)
+def reintegrate_replaced_source(sbox):
+ "reintegrate a replaced source branch"
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+
+ # Using cherrypick merges, simulate a series of sync merges from A to
+ # A_COPY with a replace of A_COPY along the way.
+ #
+ # r7 - Merge r3 from A to A_COPY
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path,
+ '-c3')
+ sbox.simple_commit(message='Merge r3 from A to A_COPY')
+
+ # r8 - Merge r4 from A to A_COPY
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path,
+ '-c4')
+ sbox.simple_commit(message='Merge r4 from A to A_COPY')
+
+ # r9 - Merge r5 from A to A_COPY. Make an additional edit to
+ # A_COPY/B/E/beta.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path,
+ '-c5')
+ svntest.main.file_write(beta_COPY_path, "Branch edit mistake.\n")
+ sbox.simple_commit(message='Merge r5 from A to A_COPY')
+
+ # r10 - Delete A_COPY and replace it with A_COPY@8. This removes the edit
+ # we made above in r9 to A_COPY/B/E/beta.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'delete', A_COPY_path)
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A_COPY@8',
+ A_COPY_path)
+ sbox.simple_commit(message='Replace A_COPY with A_COPY@8')
+
+ # r11 - Make an edit on A_COPY/mu.
+ svntest.main.file_write(mu_COPY_path, "Branch edit.\n")
+ sbox.simple_commit(message='Branch edit')
+
+ # r12 - Do a final sync merge of A to A_COPY in preparation for
+ # reintegration.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit(message='Sync A_COPY with A')
+
+ # Reintegrate A_COPY to A. The resulting mergeinfo should be
+ # '/A_COPY:2-8,10-12' because of the replacement which removed /A_COPY:9
+ # from the reintegrate source's history.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='M '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=12)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:2-8,10-12'}),
+ 'B' : Item(),
+ 'mu' : Item("Branch edit.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_path, { })
+ run_and_verify_reintegrate(A_path,
+ sbox.repo_url + '/A_COPY',
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_posix_os)
+@SkipUnless(server_has_mergeinfo)
+@Issue(4052)
+def reintegrate_symlink_deletion(sbox):
+ "reintegrate symlink deletion"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ ## path vars
+ A_path = sbox.ospath('A')
+ A_omicron_path = sbox.ospath('A/omicron')
+ mu_path = sbox.ospath('A/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_omicron_path = sbox.ospath('A_COPY/omicron')
+ A_url = sbox.repo_url + "/A"
+ A_COPY_url = sbox.repo_url + "/A_COPY"
+
+ ## add symlink
+ os.symlink(mu_path, A_omicron_path)
+ sbox.simple_add('A/omicron')
+ sbox.simple_commit(message='add symlink')
+
+ ## branch
+ sbox.simple_repo_copy('A', 'A_COPY')
+ sbox.simple_update()
+
+ ## branch rm
+ sbox.simple_rm('A_COPY/omicron')
+ sbox.simple_commit(message='remove symlink on branch')
+
+ ## Note: running update at this point avoids the bug.
+
+ ## reintegrate
+ # ### TODO: verify something here
+ run_reintegrate(A_COPY_url, A_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def no_op_reintegrate(sbox):
+ """no-op reintegrate"""
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Sync merge from trunk to branch
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Reintegrate; there are no relevant changes on the branch.
+ # ### TODO: Check the result more carefully than merely that it completed.
+ run_reintegrate(sbox.repo_url + '/A_COPY', A_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def renamed_branch_reintegrate(sbox):
+ """reintegrate a branch that has been renamed"""
+
+ # The idea of this test is to ensure that the reintegrate merge is able to
+ # cope when one or both of the branches have been renamed.
+ #
+ # A -1-----3-4-5-6----------------------9--------
+ # \ \ / reintegrate
+ # A_COPY 2--------------7-------- /
+ # sync \ /
+ # RENAMED rename 8----------------
+
+ # TODO: Make some changes between the sync/rename/reintegrate steps so
+ # the reintegrate merge actually has to do something.
+ # TODO: Rename the other branch as well.
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Sync merge from trunk to branch
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Rename the branch
+ sbox.simple_move('A_COPY', 'RENAMED')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Reintegrate; there are no relevant changes on the branch.
+ # ### TODO: Check the result more carefully than merely that it completed.
+ run_reintegrate(sbox.repo_url + '/RENAMED@8', A_path)
+
+@SkipUnless(server_has_mergeinfo)
+def reintegrate_noop_branch_into_renamed_branch(sbox):
+ """reintegrate no-op branch into renamed branch"""
+ # In this test, the branch has no unique changes but contains a
+ # revision cherry-picked from trunk. Reintegrating such a branch
+ # should work, but used to error out when this test was written.
+
+ # Make A_COPY branch in r2, and do a few more commits to A in r3-6.
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Cherry-pick merge from trunk to branch
+ youngest_rev = sbox.youngest()
+ svntest.main.run_svn(None, 'merge', '-c', youngest_rev,
+ sbox.repo_url + '/A', A_COPY_path)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Rename the trunk
+ sbox.simple_move('A', 'A_RENAMED')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Try to reintegrate the branch. This should work but used to fail with:
+ # svn: E160013: File not found: revision 5, path '/A_RENAMED'
+ run_reintegrate(sbox.repo_url + '/A_COPY', sbox.ospath('A_RENAMED'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_reintegrate,
+ reintegrate_with_rename,
+ reintegrate_branch_never_merged_to,
+ reintegrate_fail_on_modified_wc,
+ reintegrate_fail_on_mixed_rev_wc,
+ reintegrate_fail_on_switched_wc,
+ reintegrate_on_shallow_wc,
+ reintegrate_fail_on_stale_source,
+ merge_file_with_space_in_its_path,
+ reintegrate_with_subtree_mergeinfo,
+ multiple_reintegrates_from_the_same_branch,
+ reintegrate_with_self_referential_mergeinfo,
+ reintegrate_with_subtree_merges,
+ added_subtrees_with_mergeinfo_break_reintegrate,
+ two_URL_merge_removes_valid_mergeinfo_from_target,
+ reintegrate_creates_bogus_mergeinfo,
+ no_source_subtree_mergeinfo,
+ reintegrate_replaced_source,
+ reintegrate_symlink_deletion,
+ no_op_reintegrate,
+ renamed_branch_reintegrate,
+ reintegrate_noop_branch_into_renamed_branch,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/merge_tests.py b/subversion/tests/cmdline/merge_tests.py
new file mode 100755
index 0000000..a67dada
--- /dev/null
+++ b/subversion/tests/cmdline/merge_tests.py
@@ -0,0 +1,18687 @@
+#!/usr/bin/env python
+#
+# merge_tests.py: testing merge
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+import svntest
+from svntest import main, wc, verify, actions
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import server_has_mergeinfo
+from svntest.actions import fill_file_with_lines
+from svntest.actions import make_conflict_marker_text
+from svntest.actions import inject_conflict_into_expected_state
+from svntest.verify import RegexListOutput
+
+from svntest.mergetrees import expected_merge_output, \
+ check_mergeinfo_recursively, \
+ set_up_dir_replace, \
+ set_up_branch, \
+ local_path, \
+ svn_mkfile, \
+ svn_modfile, \
+ svn_copy, \
+ svn_merge, \
+ noninheritable_mergeinfo_test_set_up
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def textual_merges_galore(sbox):
+ "performing a merge, with mixed results"
+
+ ## The Plan:
+ ##
+ ## The goal is to test that "svn merge" does the right thing in the
+ ## following cases:
+ ##
+ ## 1 : _ : Received changes already present in unmodified local file
+ ## 2 : U : No local mods, received changes folded in without trouble
+ ## 3 : G : Received changes already exist as local mods
+ ## 4 : G : Received changes do not conflict with local mods
+ ## 5 : C : Received changes conflict with local mods
+ ##
+ ## So first modify these files and commit:
+ ##
+ ## Revision 2:
+ ## -----------
+ ## A/mu ............... add ten or so lines
+ ## A/D/G/rho .......... add ten or so lines
+ ##
+ ## Now check out an "other" working copy, from revision 2.
+ ##
+ ## Next further modify and commit some files from the original
+ ## working copy:
+ ##
+ ## Revision 3:
+ ## -----------
+ ## A/B/lambda ......... add ten or so lines
+ ## A/D/G/pi ........... add ten or so lines
+ ## A/D/G/tau .......... add ten or so lines
+ ## A/D/G/rho .......... add an additional ten or so lines
+ ##
+ ## In the other working copy (which is at rev 2), update rho back
+ ## to revision 1, while giving other files local mods. This sets
+ ## things up so that "svn merge -r 1:3" will test all of the above
+ ## cases except case 4:
+ ##
+ ## case 1: A/mu .......... do nothing, the only change was in rev 2
+ ## case 2: A/B/lambda .... do nothing, so we accept the merge easily
+ ## case 3: A/D/G/pi ...... add same ten lines as committed in rev 3
+ ## case 5: A/D/G/tau ..... add ten or so lines at the end
+ ## [none]: A/D/G/rho ..... ignore what happens to this file for now
+ ##
+ ## Now run
+ ##
+ ## $ cd wc.other
+ ## $ svn merge -r 1:3 url-to-repo
+ ##
+ ## ...and expect the right output.
+ ##
+ ## Now revert rho, then update it to revision 2, then *prepend* a
+ ## bunch of lines, which will be separated by enough distance from
+ ## the changes about to be received that the merge will be clean.
+ ##
+ ## $ cd wc.other/A/D/G
+ ## $ svn merge -r 2:3 url-to-repo/A/D/G
+ ##
+ ## Which tests case 4. (Ignore the changes to the other files,
+ ## we're only interested in rho here.)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ # url = os.path.join(svntest.main.test_area_url, sbox.repo_dir)
+
+ # Change mu and rho for revision 2
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ mu_text = fill_file_with_lines(mu_path, 2)
+ rho_text = fill_file_with_lines(rho_path, 2)
+
+ # Create expected output tree for initial commit
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ # Initial commit.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make the "other" working copy
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ # Now commit some more mods from the original working copy, to
+ # produce revision 3.
+ lambda_path = sbox.ospath('A/B/lambda')
+ pi_path = sbox.ospath('A/D/G/pi')
+ tau_path = sbox.ospath('A/D/G/tau')
+
+ lambda_text = fill_file_with_lines(lambda_path, 2)
+ pi_text = fill_file_with_lines(pi_path, 2)
+ tau_text = fill_file_with_lines(tau_path, 2)
+ additional_rho_text = fill_file_with_lines(rho_path, 2)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/G/tau' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('A/B/lambda', 'A/D/G/pi', 'A/D/G/tau', 'A/D/G/rho',
+ wc_rev=3)
+
+ # Commit revision 3.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make local mods in wc.other
+ other_pi_path = os.path.join(other_wc, 'A', 'D', 'G', 'pi')
+ other_rho_path = os.path.join(other_wc, 'A', 'D', 'G', 'rho')
+ other_tau_path = os.path.join(other_wc, 'A', 'D', 'G', 'tau')
+
+ # For A/mu and A/B/lambda, we do nothing. For A/D/G/pi, we add the
+ # same ten lines as were already committed in revision 3.
+ # (Remember, wc.other is only at revision 2, so it doesn't have
+ # these changes.)
+ svntest.main.file_append(other_pi_path, pi_text)
+
+ # We skip A/D/G/rho in this merge; it will be tested with a separate
+ # merge command. Temporarily put it back to revision 1, so this
+ # merge succeeds cleanly.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1', other_rho_path)
+
+ # For A/D/G/tau, we append few different lines, to conflict with the
+ # few lines appended in revision 3.
+ other_tau_text = fill_file_with_lines(other_tau_path, 2,
+ line_descrip="Conflicting line")
+
+ # Do the first merge, revs 1:3. This tests all the cases except
+ # case 4, which we'll handle in a second pass.
+ expected_output = wc.State(other_wc, {'A/B/lambda' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='U '),
+ 'A/D/G/tau' : Item(status='C '),
+ })
+ expected_mergeinfo_output = wc.State(other_wc, {'' : Item(status=' U')})
+ expected_elision_output = wc.State(other_wc, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + mu_text)
+ expected_disk.tweak('A/B/lambda',
+ contents=expected_disk.desc['A/B/lambda'].contents
+ + lambda_text)
+ expected_disk.tweak('A/D/G/rho',
+ contents=expected_disk.desc['A/D/G/rho'].contents
+ + rho_text + additional_rho_text)
+ expected_disk.tweak('A/D/G/pi',
+ contents=expected_disk.desc['A/D/G/pi'].contents
+ + pi_text)
+
+ expected_status = svntest.actions.get_virginal_state(other_wc, 1)
+ expected_status.tweak('', status=' M')
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('A/B/lambda', status='M ')
+ expected_status.tweak('A/D/G/pi', status='M ')
+ expected_status.tweak('A/D/G/rho', status='M ')
+
+ inject_conflict_into_expected_state('A/D/G/tau', expected_disk,
+ expected_status, other_tau_text, tau_text,
+ 1, 3)
+
+ expected_skip = wc.State('', { })
+
+ tau_conflict_support_files = ["tau\.working",
+ "tau\.merge-right\.r3",
+ "tau\.merge-left\.r1"]
+
+ svntest.actions.run_and_verify_merge(other_wc, '1', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, True,
+ '--allow-mixed-revisions', other_wc,
+ extra_files=list(tau_conflict_support_files))
+
+ # Now reverse merge r3 into A/D/G/rho, give it non-conflicting local
+ # mods, then merge in the 2:3 change. ### Not bothering to do the
+ # whole expected_foo routine for these intermediate operations;
+ # they're not what we're here to test, after all, so it's enough to
+ # know that they worked. Is this a bad practice? ###
+ #
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-3]],
+ ['G ' + other_rho_path + '\n',
+ ' G ' + other_rho_path + '\n',]),
+ [], 'merge', '-c-3',
+ sbox.repo_url + '/A/D/G/rho',
+ other_rho_path)
+
+ # Now *prepend* ten or so lines to A/D/G/rho. Since rho had ten
+ # lines appended in revision 2, and then another ten in revision 3,
+ # these new local mods will be separated from the rev 3 changes by
+ # enough distance that they won't conflict, so the merge should be
+ # clean.
+ other_rho_text = ""
+ for x in range(1,10):
+ other_rho_text = other_rho_text + 'Unobtrusive line ' + repr(x) + ' in rho\n'
+ current_other_rho_text = open(other_rho_path).read()
+ svntest.main.file_write(other_rho_path,
+ other_rho_text + current_other_rho_text)
+
+ # We expect no merge attempt for pi and tau because they inherit
+ # mergeinfo from the WC root. There is explicit mergeinfo on rho
+ # ('/A/D/G/rho:2') so expect it to be merged (cleanly).
+ G_path = os.path.join(other_wc, 'A', 'D', 'G')
+ expected_output = wc.State(os.path.join(other_wc, 'A', 'D', 'G'),
+ {'rho' : Item(status='G ')})
+ expected_mergeinfo_output = wc.State(G_path, {
+ '' : Item(status=' G'),
+ 'rho' : Item(status=' G')
+ })
+ expected_elision_output = wc.State(G_path, {
+ '' : Item(status=' U'),
+ 'rho' : Item(status=' U')
+ })
+ expected_disk = wc.State("", {
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("This is the file 'rho'.\n"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_disk.tweak('rho',
+ contents=other_rho_text
+ + expected_disk.desc['rho'].contents
+ + rho_text
+ + additional_rho_text)
+ expected_disk.tweak('pi',
+ contents=expected_disk.desc['pi'].contents
+ + pi_text)
+
+ expected_status = wc.State(os.path.join(other_wc, 'A', 'D', 'G'),
+ { '' : Item(wc_rev=1, status=' '),
+ 'rho' : Item(wc_rev=1, status='M '),
+ 'pi' : Item(wc_rev=1, status='M '),
+ 'tau' : Item(wc_rev=1, status='C '),
+ })
+
+ inject_conflict_into_expected_state('tau', expected_disk, expected_status,
+ other_tau_text, tau_text, 1, 3)
+
+ # Do the merge, but check svn:mergeinfo props separately since
+ # run_and_verify_merge would attempt to proplist tau's conflict
+ # files if we asked it to check props.
+ svntest.actions.run_and_verify_merge(
+ os.path.join(other_wc, 'A', 'D', 'G'),
+ '2', '3',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ extra_files=list(tau_conflict_support_files))
+
+
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'propget', SVN_PROP_MERGEINFO,
+ os.path.join(other_wc,
+ "A", "D", "G", "rho"))
+
+
+#----------------------------------------------------------------------
+# Merge should copy-with-history when adding files or directories
+@SkipUnless(server_has_mergeinfo)
+def add_with_history(sbox):
+ "merge and add new files/dirs with history"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+
+ Q_path = os.path.join(F_path, 'Q')
+ Q2_path = os.path.join(F_path, 'Q2')
+ foo_path = os.path.join(F_path, 'foo')
+ foo2_path = os.path.join(F_path, 'foo2')
+ bar_path = os.path.join(F_path, 'Q', 'bar')
+ bar2_path = os.path.join(F_path, 'Q', 'bar2')
+
+ svntest.main.run_svn(None, 'mkdir', Q_path)
+ svntest.main.run_svn(None, 'mkdir', Q2_path)
+ svntest.main.file_append(foo_path, "foo")
+ svntest.main.file_append(foo2_path, "foo2")
+ svntest.main.file_append(bar_path, "bar")
+ svntest.main.file_append(bar2_path, "bar2")
+ svntest.main.run_svn(None, 'add', foo_path, foo2_path, bar_path, bar2_path)
+ svntest.main.run_svn(None, 'propset', 'x', 'x', Q2_path)
+ svntest.main.run_svn(None, 'propset', 'y', 'y', foo2_path)
+ svntest.main.run_svn(None, 'propset', 'z', 'z', bar2_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/Q' : Item(verb='Adding'),
+ 'A/B/F/Q2' : Item(verb='Adding'),
+ 'A/B/F/Q/bar' : Item(verb='Adding'),
+ 'A/B/F/Q/bar2': Item(verb='Adding'),
+ 'A/B/F/foo' : Item(verb='Adding'),
+ 'A/B/F/foo2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/Q' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q2' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q/bar' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q/bar2': Item(status=' ', wc_rev=2),
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo2' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ expected_output = wc.State(C_path, {
+ 'Q' : Item(status='A '),
+ 'Q2' : Item(status='A '),
+ 'Q/bar' : Item(status='A '),
+ 'Q/bar2' : Item(status='A '),
+ 'foo' : Item(status='A '),
+ 'foo2' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}),
+ 'Q' : Item(),
+ 'Q2' : Item(props={'x' : 'x'}),
+ 'Q/bar' : Item("bar"),
+ 'Q/bar2' : Item("bar2", props={'z' : 'z'}),
+ 'foo' : Item("foo"),
+ 'foo2' : Item("foo2", props={'y' : 'y'}),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'Q' : Item(status='A ', wc_rev='-', copied='+'),
+ 'Q2' : Item(status='A ', wc_rev='-', copied='+'),
+ 'Q/bar' : Item(status=' ', wc_rev='-', copied='+'),
+ 'Q/bar2' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo' : Item(status='A ', wc_rev='-', copied='+'),
+ 'foo2' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+
+ expected_skip = wc.State(C_path, { })
+
+ # Add some unversioned directory obstructions to the incoming
+ # additions. This should be tolerated and *not* result in any
+ # difference between the --dry-run and actual merge.
+ # See http://svn.haxx.se/dev/archive-2012-11/0696.shtml
+ os.mkdir(sbox.ospath('A/C/Q'))
+ os.mkdir(sbox.ospath('A/C/Q2'))
+
+ svntest.actions.run_and_verify_merge(C_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/C/Q' : Item(verb='Adding'),
+ 'A/C/Q2' : Item(verb='Adding'),
+ 'A/C/foo' : Item(verb='Adding'),
+ 'A/C/foo2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/C' : Item(status=' ', wc_rev=3),
+ 'A/B/F/Q' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q2' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q/bar' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q/bar2': Item(status=' ', wc_rev=2),
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo2' : Item(status=' ', wc_rev=2),
+ 'A/C/Q' : Item(status=' ', wc_rev=3),
+ 'A/C/Q2' : Item(status=' ', wc_rev=3),
+ 'A/C/Q/bar' : Item(status=' ', wc_rev=3),
+ 'A/C/Q/bar2' : Item(status=' ', wc_rev=3),
+ 'A/C/foo' : Item(status=' ', wc_rev=3),
+ 'A/C/foo2' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Issue 953
+@SkipUnless(server_has_mergeinfo)
+@Issue(953)
+def simple_property_merges(sbox):
+ "some simple property merges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file and a directory
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+ E_path = sbox.ospath('A/B/E')
+
+ svntest.actions.set_prop('foo', 'foo_val', alpha_path)
+ # A binary, non-UTF8 property value
+ svntest.actions.set_prop('foo', b'foo\201val', beta_path)
+ svntest.actions.set_prop('foo', 'foo_val', E_path)
+
+ # Commit change as rev 2
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Copy B to B2 as rev 3
+ B_url = sbox.repo_url + '/A/B'
+ B2_url = sbox.repo_url + '/A/B2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '-m', 'copy B to B2',
+ B_url, B2_url)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Modify a property and add a property for the file and directory
+ svntest.actions.set_prop('foo', 'mod_foo', alpha_path)
+ svntest.actions.set_prop('bar', 'bar_val', alpha_path)
+ svntest.actions.set_prop('foo', b'mod\201foo', beta_path)
+ svntest.actions.set_prop('bar', b'bar\201val', beta_path)
+ svntest.actions.set_prop('foo', 'mod_foo', E_path)
+ svntest.actions.set_prop('bar', 'bar_val', E_path)
+
+ # Commit change as rev 4
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ wc_rev=4, status=' ')
+ expected_status.add({
+ 'A/B2' : Item(status=' ', wc_rev=3),
+ 'A/B2/E' : Item(status=' ', wc_rev=3),
+ 'A/B2/E/alpha' : Item(status=' ', wc_rev=3),
+ 'A/B2/E/beta' : Item(status=' ', wc_rev=3),
+ 'A/B2/F' : Item(status=' ', wc_rev=3),
+ 'A/B2/lambda' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ pristine_status = expected_status
+ pristine_status.tweak(wc_rev=4)
+
+ # Merge B 3:4 into B2
+ B2_path = sbox.ospath('A/B2')
+ expected_output = wc.State(B2_path, {
+ 'E' : Item(status=' U'),
+ 'E/alpha' : Item(status=' U'),
+ 'E/beta' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(B2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B2_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_disk.tweak('E', 'E/alpha',
+ props={'foo' : 'mod_foo', 'bar' : 'bar_val'})
+ expected_disk.tweak('E/beta',
+ props={'foo' : b'mod\201foo', 'bar' : b'bar\201val'})
+ expected_status = wc.State(B2_path, {
+ '' : Item(status=' M'),
+ 'E' : Item(status=' M'),
+ 'E/alpha' : Item(status=' M'),
+ 'E/beta' : Item(status=' M'),
+ 'F' : Item(status=' '),
+ 'lambda' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=4)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(B2_path, '3', '4', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Revert merge
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, pristine_status)
+
+ # Merge B 2:1 into B2 (B2's mergeinfo should get elided away)
+ expected_status.tweak('', status=' ')
+ expected_disk.remove('')
+ expected_disk.tweak('E', 'E/alpha', 'E/beta', props={})
+ expected_elision_output = wc.State(B2_path, {
+ '' : Item(status=' U'),
+ })
+ svntest.actions.run_and_verify_merge(B2_path, '2', '1', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ def error_message(property, old_value, new_value):
+ return "Trying to change property '%s'\n" \
+ "but the property has been locally deleted.\n" \
+ "<<<<<<< (local property value)\n" \
+ "||||||| (incoming 'changed from' value)\n" \
+ "%s=======\n" \
+ "%s>>>>>>> (incoming 'changed to' value)\n" % (property, old_value, new_value)
+
+ # Merge B 3:4 into B2 now causes a conflict
+ expected_disk.add({
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'E/dir_conflicts.prej'
+ : Item(error_message('foo', 'foo_val', 'mod_foo')),
+ 'E/alpha.prej'
+ : Item(error_message('foo', 'foo_val', 'mod_foo')),
+ 'E/beta.prej'
+ : Item(error_message('foo', 'foo?\\81val', 'mod?\\81foo')),
+ })
+ expected_disk.tweak('E', 'E/alpha', props={'bar' : 'bar_val'})
+ expected_disk.tweak('E/beta', props={'bar' : b'bar\201val'})
+ expected_status.tweak('', status=' M')
+ expected_status.tweak('E', 'E/alpha', 'E/beta', status=' C')
+ expected_output.tweak('E', 'E/alpha', 'E/beta', status=' C')
+ expected_elision_output = wc.State(B2_path, {
+ })
+ svntest.actions.run_and_verify_merge(B2_path, '3', '4', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # issue 1109 : single file property merge. This test performs a merge
+ # that should be a no-op (adding properties that are already present).
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, pristine_status)
+
+ # Copy A at rev 4 to A2 to make revision 5.
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 5.\n'], [],
+ 'copy', '-m', 'copy A to A2',
+ A_url, A2_url)
+
+ # Re-root the WC at A2.
+ svntest.main.safe_rmtree(wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ A2_url, wc_dir)
+
+ # Attempt to re-merge rev 4 of the original A's alpha. Mergeinfo
+ # inherited from A2 (created by its copy from A) allows us to avoid
+ # a repeated merge.
+ alpha_url = sbox.repo_url + '/A/B/E/alpha'
+ alpha_path = sbox.ospath('B/E/alpha')
+
+ # Cannot use run_and_verify_merge with a file target
+ svntest.actions.run_and_verify_svn([], [], 'merge', '-r', '3:4',
+ alpha_url, alpha_path)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'pl', alpha_path)
+
+ saw_foo = 0
+ saw_bar = 0
+ for line in output:
+ if re.match("\\s*foo\\s*$", line):
+ saw_foo = 1
+ if re.match("\\s*bar\\s*$", line):
+ saw_bar = 1
+
+ if not saw_foo or not saw_bar:
+ raise svntest.Failure("Expected properties not found")
+
+#----------------------------------------------------------------------
+# This is a regression for issue #1176.
+@Issue(1176)
+def merge_similar_unrelated_trees(sbox):
+ "merging similar trees ancestrally unrelated"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=1249. ##
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Simple test. Make three directories with the same content.
+ # Modify some stuff in the second one. Now merge
+ # (firstdir:seconddir->thirddir).
+
+ base1_path = sbox.ospath('base1')
+ base2_path = sbox.ospath('base2')
+ apply_path = sbox.ospath('apply')
+
+ base1_url = os.path.join(sbox.repo_url + '/base1')
+ base2_url = os.path.join(sbox.repo_url + '/base2')
+
+ # Make a tree of stuff ...
+ os.mkdir(base1_path)
+ svntest.main.file_append(os.path.join(base1_path, 'iota'),
+ "This is the file iota\n")
+ os.mkdir(os.path.join(base1_path, 'A'))
+ svntest.main.file_append(os.path.join(base1_path, 'A', 'mu'),
+ "This is the file mu\n")
+ os.mkdir(os.path.join(base1_path, 'A', 'B'))
+ svntest.main.file_append(os.path.join(base1_path, 'A', 'B', 'alpha'),
+ "This is the file alpha\n")
+ svntest.main.file_append(os.path.join(base1_path, 'A', 'B', 'beta'),
+ "This is the file beta\n")
+
+ # ... Copy it twice ...
+ shutil.copytree(base1_path, base2_path)
+ shutil.copytree(base1_path, apply_path)
+
+ # ... Gonna see if merge is naughty or nice!
+ svntest.main.file_append(os.path.join(base2_path, 'A', 'mu'),
+ "A new line in mu.\n")
+ os.rename(os.path.join(base2_path, 'A', 'B', 'beta'),
+ os.path.join(base2_path, 'A', 'B', 'zeta'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', base1_path, base2_path, apply_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'rev 2', wc_dir)
+
+ expected_output = wc.State(apply_path, {
+ 'A/mu' : Item(status='U '),
+ 'A/B/zeta' : Item(status='A '),
+ 'A/B/beta' : Item(status='D '),
+ })
+
+ # run_and_verify_merge doesn't support 'svn merge URL URL path'
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge',
+ '--ignore-ancestry',
+ base1_url, base2_url,
+ apply_path)
+
+ expected_status = wc.State(apply_path, {
+ '' : Item(status=' '),
+ 'A' : Item(status=' '),
+ 'A/mu' : Item(status='M '),
+ 'A/B' : Item(status=' '),
+ 'A/B/zeta' : Item(status='A ', copied='+'),
+ 'A/B/alpha' : Item(status=' '),
+ 'A/B/beta' : Item(status='D '),
+ 'iota' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A/B/zeta', wc_rev='-')
+ svntest.actions.run_and_verify_status(apply_path, expected_status)
+
+#----------------------------------------------------------------------
+def merge_one_file_helper(sbox, arg_flav, record_only = 0):
+ """ARG_FLAV is one of 'r' (revision range) or 'c' (single change) or
+ '*' (no revision specified)."""
+
+ if arg_flav not in ('r', 'c', '*'):
+ raise svntest.Failure("Unrecognized flavor of merge argument")
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ rho_rel_path = os.path.join('A', 'D', 'G', 'rho')
+ rho_path = os.path.join(wc_dir, rho_rel_path)
+ G_path = sbox.ospath('A/D/G')
+ rho_url = sbox.repo_url + '/A/D/G/rho'
+
+ # Change rho for revision 2
+ svntest.main.file_append(rho_path, 'A new line in rho.\n')
+
+ expected_output = wc.State(wc_dir, { rho_rel_path : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Backdate rho to revision 1, so we can merge in the rev 2 changes.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1', rho_path)
+
+ # Try one merge with an explicit target; it should succeed.
+ ### Yes, it would be nice to use run_and_verify_merge(), but it
+ # appears to be impossible to get the expected_foo trees working
+ # right. I think something is still assuming a directory target.
+ if arg_flav == 'r':
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2]],
+ ['U ' + rho_path + '\n',
+ ' U ' + rho_path + '\n']),
+ [], 'merge', '-r', '1:2', rho_url, rho_path)
+ elif arg_flav == 'c':
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2]],
+ ['U ' + rho_path + '\n',
+ ' U ' + rho_path + '\n']),
+ [], 'merge', '-c', '2', rho_url, rho_path)
+ elif arg_flav == '*':
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2]],
+ ['U ' + rho_path + '\n',
+ ' U ' + rho_path + '\n']),
+ [], 'merge', rho_url, rho_path)
+
+ expected_status.tweak(wc_rev=1)
+ expected_status.tweak('A/D/G/rho', status='MM')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Inspect rho, make sure it's right.
+ rho_text = svntest.tree.get_text(rho_path)
+ if rho_text != "This is the file 'rho'.\nA new line in rho.\n":
+ raise svntest.Failure("Unexpected text in merged '" + rho_path + "'")
+
+ # Restore rho to pristine revision 1, for another merge.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', rho_path)
+ expected_status.tweak('A/D/G/rho', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Cd into the directory and run merge with no targets.
+ # It should still merge into rho.
+ saved_cwd = os.getcwd()
+ os.chdir(G_path)
+
+ # Cannot use run_and_verify_merge with a file target
+ merge_cmd = ['merge']
+ if arg_flav == 'r':
+ merge_cmd += ['-r', '1:2']
+ elif arg_flav == 'c':
+ merge_cmd += ['-c', '2']
+
+ if record_only:
+ expected_output = expected_merge_output([[2]],
+ [' U rho\n'])
+ merge_cmd.append('--record-only')
+ rho_expected_status = ' M'
+ else:
+ expected_output = expected_merge_output([[2]],
+ ['U rho\n',
+ ' U rho\n'])
+ rho_expected_status = 'MM'
+ merge_cmd.append(rho_url)
+
+ svntest.actions.run_and_verify_svn(expected_output, [], *merge_cmd)
+
+ # Inspect rho, make sure it's right.
+ rho_text = svntest.tree.get_text('rho')
+ if record_only:
+ expected_text = "This is the file 'rho'.\n"
+ else:
+ expected_text = "This is the file 'rho'.\nA new line in rho.\n"
+ if rho_text != expected_text:
+ print("")
+ raise svntest.Failure("Unexpected text merged to 'rho' in '" +
+ G_path + "'")
+ os.chdir(saved_cwd)
+
+ expected_status.tweak('A/D/G/rho', status=rho_expected_status)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(1150)
+def merge_one_file_using_r(sbox):
+ "merge one file using the -r option"
+ merge_one_file_helper(sbox, 'r')
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(1150)
+def merge_one_file_using_c(sbox):
+ "merge one file using the -c option"
+ merge_one_file_helper(sbox, 'c')
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_one_file_using_implicit_revs(sbox):
+ "merge one file without explicit revisions"
+ merge_one_file_helper(sbox, '*')
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_record_only(sbox):
+ "mark a revision range as merged"
+ merge_one_file_helper(sbox, 'r', 1)
+
+#----------------------------------------------------------------------
+# This is a regression test for the enhancement added in issue #785 "add
+# friendly enhancement to 'svn merge'", which is about inferring that
+# the default target of "svn merge [-r...] FILE" should not be "." but
+# rather should be "FILE".
+def merge_with_implicit_target_helper(sbox, arg_flav):
+ """ARG_FLAV is one of 'r' (revision range) or 'c' (single change) or
+ '*' (no revision specified)."""
+
+ if arg_flav not in ('r', 'c', '*'):
+ raise svntest.Failure("Unrecognized flavor of merge argument")
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Change mu for revision 2
+ mu_path = sbox.ospath('A/mu')
+ orig_mu_text = svntest.tree.get_text(mu_path)
+ added_mu_text = ""
+ for x in range(2,11):
+ added_mu_text = added_mu_text + 'This is line ' + repr(x) + ' in mu\n'
+ svntest.main.file_append(mu_path, added_mu_text)
+
+ # Create expected output tree for initial commit
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ # Initial commit.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make the "other" working copy, at r1
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+ svntest.main.run_svn(None, 'up', '-r', 1, other_wc)
+
+ # Try the merge without an explicit target; it should succeed.
+ # Can't use run_and_verify_merge cuz it expects a directory argument.
+ mu_url = sbox.repo_url + '/A/mu'
+
+ os.chdir(os.path.join(other_wc, 'A'))
+
+ # merge using filename for sourcepath
+ # Cannot use run_and_verify_merge with a file target
+ if arg_flav == 'r':
+ svntest.actions.run_and_verify_svn(expected_merge_output([[2]],
+ ['U mu\n',
+ ' U mu\n']),
+ [],
+ 'merge', '-r', '1:2', 'mu')
+ elif arg_flav == 'c':
+ svntest.actions.run_and_verify_svn(expected_merge_output([[2]],
+ ['U mu\n',
+ ' U mu\n']),
+ [],
+ 'merge', '-c', '2', 'mu')
+
+ elif arg_flav == '*':
+ # Without a peg revision, the default merge range of BASE:1 (which
+ # is a no-op) will be chosen. Let's do it both ways (no-op first,
+ # of course).
+ svntest.actions.run_and_verify_svn(None, [], 'merge', 'mu')
+ svntest.actions.run_and_verify_svn(expected_merge_output([[2]],
+ ['U mu\n',
+ ' U mu\n']),
+ [],
+ 'merge', 'mu@2')
+
+ # sanity-check resulting file
+ if svntest.tree.get_text('mu') != orig_mu_text + added_mu_text:
+ raise svntest.Failure("Unexpected text in 'mu'")
+
+ # merge using URL for sourcepath
+ if arg_flav == 'r':
+ svntest.actions.run_and_verify_svn(expected_merge_output([[-2]],
+ ['G mu\n',
+ ' U mu\n',
+ ' G mu\n',],
+ elides=True),
+ [],
+ 'merge', '-r', '2:1', mu_url)
+ elif arg_flav == 'c':
+ svntest.actions.run_and_verify_svn(expected_merge_output([[-2]],
+ ['G mu\n',
+ ' U mu\n',
+ ' G mu\n'],
+ elides=True),
+ [],
+ 'merge', '-c', '-2', mu_url)
+ elif arg_flav == '*':
+ # Implicit merge source URL and revision range detection is for
+ # forward merges only (e.g. non-reverts). Undo application of
+ # r2 to enable continuation of the test case.
+ svntest.actions.run_and_verify_svn(expected_merge_output([[-2]],
+ ['G mu\n',
+ ' U mu\n',
+ ' G mu\n'],
+ elides=True),
+ [],
+ 'merge', '-c', '-2', mu_url)
+
+ # sanity-check resulting file
+ if svntest.tree.get_text('mu') != orig_mu_text:
+ raise svntest.Failure("Unexpected text '%s' in 'mu', expected '%s'" %
+ (svntest.tree.get_text('mu'), orig_mu_text))
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(785)
+def merge_with_implicit_target_using_r(sbox):
+ "merging a file w/no explicit target path using -r"
+ merge_with_implicit_target_helper(sbox, 'r')
+
+#----------------------------------------------------------------------
+@Issue(785)
+def merge_with_implicit_target_using_c(sbox):
+ "merging a file w/no explicit target path using -c"
+ merge_with_implicit_target_helper(sbox, 'c')
+
+#----------------------------------------------------------------------
+@Issue(785)
+def merge_with_implicit_target_and_revs(sbox):
+ "merging a file w/no explicit target path or revs"
+ merge_with_implicit_target_helper(sbox, '*')
+
+#----------------------------------------------------------------------
+def merge_with_prev(sbox):
+ "merge operations using PREV revision"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Change mu for revision 2
+ mu_path = sbox.ospath('A/mu')
+ orig_mu_text = svntest.tree.get_text(mu_path)
+ added_mu_text = ""
+ for x in range(2,11):
+ added_mu_text = added_mu_text + '\nThis is line ' + repr(x) + ' in mu'
+ added_mu_text += "\n"
+ svntest.main.file_append(mu_path, added_mu_text)
+
+ zot_path = sbox.ospath('A/zot')
+
+ svntest.main.file_append(zot_path, "bar")
+ svntest.main.run_svn(None, 'add', zot_path)
+
+ # Create expected output tree for initial commit
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/zot' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.add({'A/zot' : Item(status=' ', wc_rev=2)})
+
+ # Initial commit.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make some other working copies
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ another_wc = sbox.add_wc_path('another')
+ svntest.actions.duplicate_dir(wc_dir, another_wc)
+
+ was_cwd = os.getcwd()
+
+ os.chdir(os.path.join(other_wc, 'A'))
+
+ # Try to revert the last change to mu via svn merge
+ # Cannot use run_and_verify_merge with a file target
+ svntest.actions.run_and_verify_svn(expected_merge_output([[-2]],
+ ['U mu\n',
+ ' U mu\n'],
+ elides=True),
+ [],
+ 'merge', '-r', 'HEAD:PREV', 'mu')
+
+ # sanity-check resulting file
+ if svntest.tree.get_text('mu') != orig_mu_text:
+ raise svntest.Failure("Unexpected text in 'mu'")
+
+ os.chdir(was_cwd)
+
+ other_status = expected_status
+ other_status.wc_dir = other_wc
+ other_status.tweak('A/mu', status='M ', wc_rev=2)
+ other_status.tweak('A/zot', wc_rev=2)
+ svntest.actions.run_and_verify_status(other_wc, other_status)
+
+ os.chdir(another_wc)
+
+ # ensure 'A' will be at revision 2
+ svntest.actions.run_and_verify_svn(None, [], 'up')
+
+ # now try a revert on a directory, and verify that it removed the zot
+ # file we had added previously
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-r', 'COMMITTED:PREV',
+ 'A', 'A')
+
+ if svntest.tree.get_text('A/zot') != None:
+ raise svntest.Failure("Unexpected text in 'A/zot'")
+
+ os.chdir(was_cwd)
+
+ another_status = expected_status
+ another_status.wc_dir = another_wc
+ another_status.tweak(wc_rev=2)
+ another_status.tweak('A/mu', status='M ')
+ another_status.tweak('A/zot', status='D ')
+ svntest.actions.run_and_verify_status(another_wc, another_status)
+
+#----------------------------------------------------------------------
+# Regression test for issue #1319: 'svn merge' should *not* 'C' when
+# merging a change into a binary file, unless it has local mods, or has
+# different contents from the left side of the merge.
+@SkipUnless(server_has_mergeinfo)
+@Issue(1319)
+def merge_binary_file(sbox):
+ "merge change into unchanged binary file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a binary file to the project
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ # Write PNG file data into 'A/theta'.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Commit the new binary file, creating revision 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make the "other" working copy
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ # Change the binary file in first working copy, commit revision 3.
+ svntest.main.file_append(theta_path, "some extra junk")
+ expected_output = wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # In second working copy, attempt to 'svn merge -r 2:3'.
+ # We should *not* see a conflict during the update, but a 'U'.
+ # And after the merge, the status should be 'M'.
+ expected_output = wc.State(other_wc, {
+ 'A/theta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(other_wc, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(other_wc, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ '' : Item(props={SVN_PROP_MERGEINFO : '/:3'}),
+ 'A/theta' : Item(theta_contents + b"some extra junk",
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+ expected_status = svntest.actions.get_virginal_state(other_wc, 1)
+ expected_status.add({
+ '' : Item(status=' M', wc_rev=1),
+ 'A/theta' : Item(status='M ', wc_rev=2),
+ })
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge(other_wc, '2', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [],
+ True, True, '--allow-mixed-revisions',
+ other_wc)
+
+#----------------------------------------------------------------------
+# Regression test for Issue #1297:
+# A merge that creates a new file followed by an immediate diff
+# The diff should succeed.
+@SkipUnless(server_has_mergeinfo)
+@Issue(1297)
+def merge_in_new_file_and_diff(sbox):
+ "diff after merge that creates a new file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ trunk_url = sbox.repo_url + '/A/B/E'
+
+ # Create a branch
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ trunk_url,
+ sbox.repo_url + '/branch',
+ '-m', "Creating the Branch")
+
+ # Update to revision 2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ new_file_path = sbox.ospath('A/B/E/newfile')
+ svntest.main.file_write(new_file_path, "newfile\n")
+
+ # Add the new file, and commit revision 3.
+ svntest.actions.run_and_verify_svn(None, [], "add", new_file_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m',
+ "Changing the trunk.", wc_dir)
+
+ branch_path = sbox.ospath('branch')
+ url_branch_path = branch_path.replace(os.path.sep, '/')
+
+ # Merge our addition into the branch.
+ expected_output = svntest.wc.State(branch_path, {
+ 'newfile' : Item(status='A '),
+ })
+ expected_mergeinfo_output = svntest.wc.State(branch_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(branch_path, {
+ })
+ expected_disk = wc.State('', {
+ 'alpha' : Item("This is the file 'alpha'.\n"),
+ 'beta' : Item("This is the file 'beta'.\n"),
+ 'newfile' : Item("newfile\n"),
+ })
+ expected_status = wc.State(branch_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'alpha' : Item(status=' ', wc_rev=2),
+ 'beta' : Item(status=' ', wc_rev=2),
+ 'newfile' : Item(status='A ', wc_rev='-', copied='+')
+ })
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge(branch_path,
+ '1', 'HEAD', trunk_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+ # Finally, run diff.
+ expected_output = [
+ "Index: " + url_branch_path + "/newfile\n",
+ "===================================================================\n",
+ "--- "+ url_branch_path + "/newfile (nonexistent)\n",
+ "+++ "+ url_branch_path + "/newfile (working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+newfile\n",
+
+ "Index: " + url_branch_path + "\n",
+ "===================================================================\n",
+ "--- "+ url_branch_path + "\t(revision 2)\n",
+ "+++ "+ url_branch_path + "\t(working copy)\n",
+ "\n",
+ "Property changes on: " + url_branch_path + "\n",
+ "___________________________________________________________________\n",
+ "Added: " + SVN_PROP_MERGEINFO + "\n",
+ "## -0,0 +0,1 ##\n",
+ " Merged /A/B/E:r2-3\n",
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '--show-copies-as-adds', branch_path)
+
+
+#----------------------------------------------------------------------
+# Issue #1425: 'svn merge' should skip over any unversioned obstructions.
+# This test involves tree conflicts. - but attempting to test for
+# pre-tree-conflict behaviour
+@SkipUnless(server_has_mergeinfo)
+@Issues(1425, 2898)
+def merge_skips_obstructions(sbox):
+ "merge should skip over unversioned obstructions"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+
+ Q_path = os.path.join(F_path, 'Q')
+ foo_path = os.path.join(F_path, 'foo')
+ bar_path = os.path.join(F_path, 'Q', 'bar')
+
+ svntest.main.run_svn(None, 'mkdir', Q_path)
+ svntest.main.file_append(foo_path, "foo")
+ svntest.main.file_append(bar_path, "bar")
+ svntest.main.run_svn(None, 'add', foo_path, bar_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/Q' : Item(verb='Adding'),
+ 'A/B/F/Q/bar' : Item(verb='Adding'),
+ 'A/B/F/foo' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/Q' : Item(status=' ', wc_rev=2),
+ 'A/B/F/Q/bar' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ pre_merge_status = expected_status
+
+ # Revision 2 now has A/B/F/foo, A/B/F/Q, A/B/F/Q/bar. Let's merge
+ # those 'F' changes into empty dir 'C'. But first, create an
+ # unversioned 'foo' within C, and make sure 'svn merge' doesn't
+ # error when the addition of foo is obstructed.
+
+ expected_output = wc.State(C_path, {
+ 'Q' : Item(status='A '),
+ 'Q/bar' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}),
+ 'Q' : Item(),
+ 'Q/bar' : Item("bar"),
+ 'foo' : Item("foo"),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'Q' : Item(status='A ', wc_rev='-', copied='+'),
+ 'Q/bar' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ expected_skip = wc.State(C_path, {
+ 'foo' : Item(verb='Skipped'),
+ })
+ # Unversioned:
+ svntest.main.file_append(os.path.join(C_path, "foo"), "foo")
+
+ svntest.actions.run_and_verify_merge(C_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True)
+
+ # Revert the local mods, and this time make "Q" obstructed. An
+ # unversioned file called "Q" will obstruct the adding of the
+ # directory of the same name.
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', wc_dir)
+ os.unlink(os.path.join(C_path, "foo"))
+ svntest.main.safe_rmtree(os.path.join(C_path, "Q"))
+ svntest.main.file_append(os.path.join(C_path, "Q"), "foo") # unversioned
+ svntest.actions.run_and_verify_status(wc_dir, pre_merge_status)
+
+ expected_output = wc.State(C_path, {
+ 'foo' : Item(status='A '),
+ 'Q/bar' : Item(status=' ', treeconflict='A'), # Skipped
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}),
+ 'Q' : Item("foo"),
+ 'foo' : Item("foo"),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'foo' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ expected_skip = wc.State(C_path, {
+ 'Q' : Item(verb='Skipped'),
+ })
+
+ svntest.actions.run_and_verify_merge(C_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True)
+
+ # Revert the local mods, and commit the deletion of iota and A/D/G. (r3)
+ os.unlink(os.path.join(C_path, "foo"))
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, pre_merge_status)
+
+ iota_path = sbox.ospath('iota')
+ G_path = sbox.ospath('A/D/G')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path, G_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G' : Item(verb='Deleting'),
+ 'iota' : Item(verb='Deleting'),
+ })
+ expected_status = pre_merge_status
+ expected_status.remove('iota', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now create unversioned iota and A/D/G, try running a merge -r2:3.
+ # The merge process should skip over these targets, since they're
+ # unversioned.
+
+ svntest.main.file_append(iota_path, "foo") # unversioned
+ os.mkdir(G_path) # unversioned
+
+ expected_output = wc.State(wc_dir, {
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_disk.add({
+ '' : Item(props={SVN_PROP_MERGEINFO : '/:3'}),
+ 'A/B/F/Q' : Item(),
+ 'A/B/F/Q/bar' : Item("bar"),
+ 'A/B/F/foo' : Item("foo"),
+ 'A/C/Q' : Item("foo"),
+ })
+ expected_disk.tweak('iota', contents="foo")
+ # No-op merge still sets mergeinfo
+ expected_status.tweak('', status=' M')
+ expected_skip = wc.State(wc_dir, {
+ 'iota' : Item(verb='Skipped'),
+ 'A/D/G' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_merge(wc_dir, '2', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status.copy(wc_dir),
+ expected_skip,
+ [],
+ True, False, '--allow-mixed-revisions',
+ wc_dir)
+
+ # Revert the local mods, and commit a change to A/B/lambda (r4), and then
+ # commit the deletion of the same file. (r5)
+ svntest.main.safe_rmtree(G_path)
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ expected_status.tweak('', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ lambda_path = sbox.ospath('A/B/lambda')
+ svntest.main.file_append(lambda_path, "more text")
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/B/lambda', wc_rev=4)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', lambda_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Deleting'),
+ })
+ expected_status.remove('A/B/lambda')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # lambda is gone, so create an unversioned lambda in its place.
+ # Then attempt to merge -r3:4, which is a change to lambda. The merge
+ # should simply skip the unversioned file.
+
+ svntest.main.file_append(lambda_path, "foo") # unversioned
+
+ expected_output = wc.State(wc_dir, { })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk.add({
+ 'A/B/lambda' : Item("foo"),
+ })
+ expected_disk.remove('A/D/G')
+ expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/:4'})
+ expected_skip = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Skipped'),
+ })
+ # No-op merge still sets mergeinfo.
+ expected_status_short = expected_status.copy(wc_dir)
+ expected_status_short.tweak('', status=' M')
+
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '4',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status_short,
+ expected_skip,
+ [],
+ True, False, '--allow-mixed-revisions',
+ wc_dir)
+
+ # OK, so let's commit the new lambda (r6), and then delete the
+ # working file. Then re-run the -r3:4 merge, and see how svn deals
+ # with a file being under version control, but missing.
+
+ svntest.actions.run_and_verify_svn(None, [], 'add', lambda_path)
+
+ # Mergeinfo prop changed so update to avoid out of date error.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ expected_output = wc.State(wc_dir, {
+ '' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Adding'),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {})
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev=5)
+ expected_status.add({
+ 'A/B/lambda' : Item(wc_rev=6, status=' '),
+ })
+ expected_status.tweak('', status=' ', wc_rev=6)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ os.unlink(lambda_path)
+
+ expected_output = wc.State(wc_dir, { })
+ expected_disk.remove('A/B/lambda')
+ expected_status.tweak('A/B/lambda', status='! ')
+ expected_status.tweak('', status=' ')
+ expected_skip = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Skipped missing target'),
+ })
+ # Why do we need to --ignore-ancestry? Because the previous merge of r4,
+ # despite being inoperative, set mergeinfo for r4 on the WC. With the
+ # advent of merge tracking this repeat merge attempt would not be attempted.
+ # By using --ignore-ancestry we disregard the mergeinfo and *really* try to
+ # merge into a missing path. This is another facet of issue #2898.
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '4',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status.copy(wc_dir),
+ expected_skip,
+ [],
+ 1, 0, '--ignore-ancestry',
+ '--allow-mixed-revisions', wc_dir)
+
+#----------------------------------------------------------------------
+# At one time, a merge that added items with the same name as missing
+# items would attempt to add the items and fail, leaving the working
+# copy locked and broken.
+
+# This test involves tree conflicts.
+@SkipUnless(server_has_mergeinfo)
+def merge_into_missing(sbox):
+ "merge into missing must not break working copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+ Q_path = os.path.join(F_path, 'Q')
+ foo_path = os.path.join(F_path, 'foo')
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', Q_path)
+ svntest.main.file_append(foo_path, "foo")
+ svntest.actions.run_and_verify_svn(None, [], 'add', foo_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/Q' : Item(verb='Adding'),
+ 'A/B/F/foo' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/Q' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ R_path = os.path.join(Q_path, 'R')
+ bar_path = os.path.join(R_path, 'bar')
+ baz_path = os.path.join(Q_path, 'baz')
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', R_path)
+ svntest.main.file_append(bar_path, "bar")
+ svntest.actions.run_and_verify_svn(None, [], 'add', bar_path)
+ svntest.main.file_append(baz_path, "baz")
+ svntest.actions.run_and_verify_svn(None, [], 'add', baz_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/Q/R' : Item(verb='Adding'),
+ 'A/B/F/Q/R/bar' : Item(verb='Adding'),
+ 'A/B/F/Q/baz' : Item(verb='Adding'),
+ })
+ expected_status.add({
+ 'A/B/F/Q/R' : Item(status=' ', wc_rev=3),
+ 'A/B/F/Q/R/bar' : Item(status=' ', wc_rev=3),
+ 'A/B/F/Q/baz' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ os.unlink(foo_path)
+ svntest.main.safe_rmtree(Q_path)
+
+ expected_output = wc.State(F_path, {
+ })
+ expected_mergeinfo_output = wc.State(F_path, {
+ })
+ expected_elision_output = wc.State(F_path, {
+ })
+ expected_disk = wc.State('', {
+ })
+ expected_status = wc.State(F_path, {
+ '' : Item(status=' ', wc_rev=1),
+ 'foo' : Item(status='! ', wc_rev=2),
+ 'Q' : Item(status='! ', wc_rev=2),
+ # Missing data still available
+ 'Q/R' : Item(status='! ', wc_rev=3),
+ 'Q/R/bar' : Item(status='! ', wc_rev=3),
+ 'Q/baz' : Item(status='! ', wc_rev=3),
+ })
+ expected_skip = wc.State(F_path, {
+ 'Q' : Item(verb='Skipped missing target'),
+ 'foo' : Item(verb='Skipped missing target'),
+ })
+ # Use --ignore-ancestry because merge tracking aware merges raise an
+ # error when the merge target is missing subtrees due to OS-level
+ # deletes.
+
+ ### Need to real and dry-run separately since real merge notifies Q
+ ### twice!
+ svntest.actions.run_and_verify_merge(F_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, False,
+ '--dry-run',
+ '--ignore-ancestry',
+ '--allow-mixed-revisions',
+ F_path)
+
+ expected_status = wc.State(F_path, {
+ '' : Item(status=' ', wc_rev=1),
+ 'foo' : Item(status='! ', wc_rev=2),
+ 'Q' : Item(status='! ', wc_rev='2'),
+ # Revision is known and we can record mergeinfo
+ 'Q/R' : Item(status='! ', wc_rev='3'),
+ 'Q/R/bar' : Item(status='! ', wc_rev='3'),
+ 'Q/baz' : Item(status='! ', wc_rev='3'),
+ })
+ expected_mergeinfo_output = wc.State(F_path, {
+ })
+
+ svntest.actions.run_and_verify_merge(F_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, False,
+ '--ignore-ancestry',
+ '--allow-mixed-revisions',
+ F_path)
+
+ # This merge fails when it attempts to descend into the missing
+ # directory. That's OK, there is no real need to support merge into
+ # an incomplete working copy, so long as when it fails it doesn't
+ # break the working copy.
+ svntest.main.run_svn('Working copy not locked',
+ 'merge', '-r1:3', '--dry-run', F_url, F_path)
+
+ svntest.main.run_svn('Working copy not locked',
+ 'merge', '-r1:3', F_url, F_path)
+
+ # Check working copy is not locked.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F' : Item(status=' ', wc_rev=1),
+ 'A/B/F/foo' : Item(status='! ', wc_rev=2),
+ 'A/B/F/Q' : Item(status='! ', wc_rev=2),
+ 'A/B/F/Q/baz' : Item(status='! ', wc_rev='3'),
+ 'A/B/F/Q/R' : Item(status='! ', wc_rev='3'),
+ 'A/B/F/Q/R/bar' : Item(status='! ', wc_rev='3'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# A test for issue 1738
+@Issue(1738)
+@SkipUnless(server_has_mergeinfo)
+def dry_run_adds_file_with_prop(sbox):
+ "merge --dry-run adding a new file with props"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Commit a new file which has a property.
+ zig_path = sbox.ospath('A/B/E/zig')
+ svntest.main.file_append(zig_path, "zig contents")
+ svntest.actions.run_and_verify_svn(None, [], 'add', zig_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ zig_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/zig' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/E/zig' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Do a regular merge of that change into a different dir.
+ F_path = sbox.ospath('A/B/F')
+ E_url = sbox.repo_url + '/A/B/E'
+
+ expected_output = wc.State(F_path, {
+ 'zig' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(F_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(F_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:2'}),
+ 'zig' : Item("zig contents", {'foo':'foo_val'}),
+ })
+ expected_skip = wc.State('', { })
+ expected_status = None # status is optional
+
+ svntest.actions.run_and_verify_merge(F_path, '1', '2', E_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+# Regression test for issue #1673
+# Merge a binary file from two URL with a common ancestry
+@Issue(1673)
+def merge_binary_with_common_ancestry(sbox):
+ "merge binary files with common ancestry"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create the common ancestry path
+ I_path = sbox.ospath('I')
+ svntest.main.run_svn(None, 'mkdir', I_path)
+
+ # Add a binary file to the common ancestry path
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ theta_I_path = os.path.join(I_path, 'theta')
+ svntest.main.file_write(theta_I_path, theta_contents, mode='wb')
+ svntest.main.run_svn(None, 'add', theta_I_path)
+ svntest.main.run_svn(None, 'propset', 'svn:mime-type',
+ 'application/octet-stream', theta_I_path)
+
+ # Commit the ancestry
+ expected_output = wc.State(wc_dir, {
+ 'I' : Item(verb='Adding'),
+ 'I/theta' : Item(verb='Adding (bin)'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'I' : Item(status=' ', wc_rev=2),
+ 'I/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Create the first branch
+ J_path = sbox.ospath('J')
+ svntest.main.run_svn(None, 'copy', I_path, J_path)
+
+ # Commit the first branch
+ expected_output = wc.State(wc_dir, {
+ 'J' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'J' : Item(status=' ', wc_rev=3),
+ 'J/theta' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Create the path where the files will be merged
+ K_path = sbox.ospath('K')
+ svntest.main.run_svn(None, 'mkdir', K_path)
+
+ # Commit the new path
+ expected_output = wc.State(wc_dir, {
+ 'K' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'K' : Item(status=' ', wc_rev=4),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Copy 'I/theta' to 'K/'. This file will be merged later.
+ theta_K_path = os.path.join(K_path, 'theta')
+ svntest.main.run_svn(None, 'copy', theta_I_path, theta_K_path)
+
+ # Commit the new file
+ expected_output = wc.State(wc_dir, {
+ 'K/theta' : Item(verb='Adding (bin)'),
+ })
+
+ expected_status.add({
+ 'K/theta' : Item(status=' ', wc_rev=5),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Modify the original ancestry 'I/theta'
+ svntest.main.file_append(theta_I_path, "some extra junk")
+
+ # Commit the modification
+ expected_output = wc.State(wc_dir, {
+ 'I/theta' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('I/theta', wc_rev=6)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Create the second branch from the modified ancestry
+ L_path = sbox.ospath('L')
+ svntest.main.run_svn(None, 'copy', I_path, L_path)
+
+ # Commit the second branch
+ expected_output = wc.State(wc_dir, {
+ 'L' : Item(verb='Adding'),
+ 'L/theta' : Item(verb='Replacing'),
+ })
+
+ expected_status.add({
+ 'L' : Item(status=' ', wc_rev=7),
+ 'L/theta' : Item(status=' ', wc_rev=7),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Now merge first ('J/') and second ('L/') branches into 'K/'
+ saved_cwd = os.getcwd()
+
+ os.chdir(K_path)
+ theta_J_url = sbox.repo_url + '/J/theta'
+ theta_L_url = sbox.repo_url + '/L/theta'
+ svntest.actions.run_and_verify_svn(expected_merge_output(None,
+ ['U theta\n',
+ ' U theta\n',
+ ' G theta\n',],
+ two_url=True),
+ [],
+ 'merge', theta_J_url, theta_L_url)
+ os.chdir(saved_cwd)
+
+ expected_status.tweak('K/theta', status='MM')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# A test for issue 1905
+@Issue(1905)
+@SkipUnless(server_has_mergeinfo)
+def merge_funny_chars_on_path(sbox):
+ "merge with funny characters"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # In following lists: 'd' stands for directory, 'f' for file
+ # targets to be added by recursive add
+ add_by_add = [
+ ('d', 'dir_10', 'F%lename'),
+ ('d', 'dir%20', 'F lename'),
+ ('d', 'dir 30', 'Filename'),
+ ('d', 'dir 40', None),
+ ('f', 'F lename', None),
+ ]
+
+ # targets to be added by 'svn mkdir' + add
+ add_by_mkdir = [
+ ('d', 'dir_11', 'F%lename'),
+ ('d', 'dir%21', 'Filename'),
+ ('d', 'dir 31', 'F lename'),
+ ('d', 'dir 41', None),
+ ]
+
+ for target in add_by_add:
+ if target[0] == 'd':
+ target_dir = os.path.join(wc_dir, 'A', 'B', 'E', target[1])
+ os.mkdir(target_dir)
+ if target[2]:
+ target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1],
+ target[2])
+ svntest.main.file_append(target_path, "%s/%s" % (target[1], target[2]))
+ svntest.actions.run_and_verify_svn(None, [], 'add', target_dir)
+ elif target[0] == 'f':
+ target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1])
+ svntest.main.file_append(target_path, "%s" % target[1])
+ svntest.actions.run_and_verify_svn(None, [], 'add', target_path)
+ else:
+ raise svntest.Failure
+
+
+ for target in add_by_mkdir:
+ if target[0] == 'd':
+ target_dir = os.path.join(wc_dir, 'A', 'B', 'E', target[1])
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', target_dir)
+ if target[2]:
+ target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1],
+ target[2])
+ svntest.main.file_append(target_path, "%s/%s" % (target[1], target[2]))
+ svntest.actions.run_and_verify_svn(None, [], 'add', target_path)
+
+ expected_output_dic = {}
+ expected_status_dic = {}
+
+ for targets in add_by_add,add_by_mkdir:
+ for target in targets:
+ key = 'A/B/E/%s' % target[1]
+ expected_output_dic[key] = Item(verb='Adding')
+ expected_status_dic[key] = Item(status=' ', wc_rev=2)
+
+ if target[2]:
+ key = 'A/B/E/%s/%s' % (target[1], target[2])
+ expected_output_dic[key] = Item(verb='Adding')
+ expected_status_dic[key] = Item(status=' ', wc_rev=2)
+
+
+ expected_output = wc.State(wc_dir, expected_output_dic)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add(expected_status_dic)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Do a regular merge of that change into a different dir.
+ F_path = sbox.ospath('A/B/F')
+ E_url = sbox.repo_url + '/A/B/E'
+
+ expected_output_dic = {}
+ expected_disk_dic = {}
+
+ for targets in add_by_add,add_by_mkdir:
+ for target in targets:
+ key = '%s' % target[1]
+ expected_output_dic[key] = Item(status='A ')
+ if target[0] == 'd':
+ expected_disk_dic[key] = Item(None, {})
+ elif target[0] == 'f':
+ expected_disk_dic[key] = Item("%s" % target[1], {})
+ else:
+ raise svntest.Failure
+ if target[2]:
+ key = '%s/%s' % (target[1], target[2])
+ expected_output_dic[key] = Item(status='A ')
+ expected_disk_dic[key] = Item('%s/%s' % (target[1], target[2]), {})
+
+ expected_output = wc.State(F_path, expected_output_dic)
+ expected_mergeinfo_output = wc.State(F_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(F_path, {
+ })
+ expected_disk = wc.State('', expected_disk_dic)
+ expected_skip = wc.State('', { })
+ expected_status = None # status is optional
+
+ svntest.actions.run_and_verify_merge(F_path, '1', '2', E_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [],
+ False, # don't check props
+ True) # but do a dry-run
+
+ expected_output_dic = {}
+
+ for targets in add_by_add,add_by_mkdir:
+ for target in targets:
+ key = '%s' % target[1]
+ expected_output_dic[key] = Item(verb='Adding')
+
+ expected_output = wc.State(F_path, expected_output_dic)
+ expected_output.add({
+ '' : Item(verb='Sending'),
+ })
+
+ svntest.actions.run_and_verify_commit(F_path,
+ expected_output,
+ None)
+
+#-----------------------------------------------------------------------
+# Regression test for issue #2064
+@Issue(2064)
+def merge_keyword_expansions(sbox):
+ "merge changes to keyword expansion property"
+
+ sbox.build()
+
+ wcpath = sbox.wc_dir
+ tpath = os.path.join(wcpath, "t")
+ bpath = os.path.join(wcpath, "b")
+ t_fpath = os.path.join(tpath, 'f')
+ b_fpath = os.path.join(bpath, 'f')
+
+ os.mkdir(tpath)
+ svntest.main.run_svn(None, "add", tpath)
+ # Commit r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r2", wcpath)
+
+ # Copy t to b.
+ svntest.main.run_svn(None, "cp", tpath, bpath)
+ # Commit r3
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r3", wcpath)
+
+ # Add a file to t.
+ svntest.main.file_append(t_fpath, "$Revision$")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', t_fpath)
+ # Ask for keyword expansion in the file.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords', 'Revision',
+ t_fpath)
+ # Commit r4
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'r4', wcpath)
+
+ # Update the wc before the merge.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wcpath)
+
+ expected_status = svntest.actions.get_virginal_state(wcpath, 4)
+ expected_status.add({
+ 't' : Item(status=' ', wc_rev=4),
+ 't/f' : Item(status=' ', wc_rev=4),
+ 'b' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_status(wcpath, expected_status)
+
+ # Do the merge.
+
+ expected_output = wc.State(bpath, {
+ 'f' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(bpath, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(bpath, {
+ })
+ expected_disk = wc.State('', {
+ 'f' : Item("$Revision: 4 $"),
+ })
+ expected_status = wc.State(bpath, {
+ '' : Item(status=' M', wc_rev=4),
+ 'f' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ expected_skip = wc.State(bpath, { })
+
+ svntest.actions.run_and_verify_merge(bpath, '2', 'HEAD',
+ sbox.repo_url + '/t', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+#----------------------------------------------------------------------
+@Issue(2132)
+def merge_prop_change_to_deleted_target(sbox):
+ "merge prop change into deleted target"
+ # For issue #2132.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to alpha.
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ alpha_path)
+
+ # Commit the property add as r2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # Remove alpha entirely.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.remove('A/B/E/alpha')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [], alpha_path)
+
+ # Try merging the original propset, which applies to a target that
+ # no longer exists. The bug would only reproduce when run from
+ # inside the wc, so we cd in there. We have to use
+ # --ignore-ancestry here because our merge logic will otherwise
+ # prevent a merge of changes we already have.
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge',
+ '-r1:2', '--ignore-ancestry', '.')
+
+#----------------------------------------------------------------------
+# A merge that replaces a directory
+# Tests for Issue #2144 and Issue #2607
+@SkipUnless(server_has_mergeinfo)
+@Issue(2144,2607)
+def merge_dir_replace(sbox):
+ "merge a replacement of a directory"
+
+ set_up_dir_replace(sbox)
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+ foo_path = os.path.join(F_path, 'foo')
+ new_file2 = os.path.join(foo_path, "new file 2")
+
+ # Recreate foo in F and add a new folder and two files
+ bar_path = os.path.join(foo_path, 'bar')
+ foo_file = os.path.join(foo_path, "file foo")
+ new_file3 = os.path.join(bar_path, "new file 3")
+
+ # Make a couple of directories, and add some files within them.
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', bar_path)
+ svntest.main.file_append(new_file3, "Initial text in new file 3.\n")
+ svntest.main.run_svn(None, "add", new_file3)
+ svntest.main.file_append(foo_file, "Initial text in file foo.\n")
+ svntest.main.run_svn(None, "add", foo_file)
+
+ # Commit the new content, creating r5.
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/foo' : Item(verb='Adding'),
+ 'A/B/F/foo/file foo' : Item(verb='Adding'),
+ 'A/B/F/foo/bar' : Item(verb='Adding'),
+ 'A/B/F/foo/bar/new file 3' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/file foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar/new file 3' : Item(status=' ', wc_rev=5),
+ 'A/C' : Item(status=' ', wc_rev=3),
+ 'A/C/foo' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ # Merge replacement of foo onto C
+ expected_output = wc.State(C_path, {
+ 'foo' : Item(status='R '),
+ 'foo/file foo' : Item(status='A '),
+ 'foo/bar' : Item(status='A '),
+ 'foo/bar/new file 3' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2-5'}),
+ 'foo' : Item(),
+ 'foo/file foo' : Item("Initial text in file foo.\n"),
+ 'foo/bar' : Item(),
+ 'foo/bar/new file 3' : Item("Initial text in new file 3.\n"),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'foo' : Item(status='R ', wc_rev='-', copied='+'),
+ 'foo/new file 2' : Item(status='D ', wc_rev='3'),
+ 'foo/file foo' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/bar' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/bar/new file 3' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/new file' : Item(status='D ', wc_rev='3'),
+ })
+ expected_skip = wc.State(C_path, { })
+ svntest.actions.run_and_verify_merge(C_path, '2', '5', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True,
+ False) # don't do a dry-run
+ # the output differs
+
+ # Commit merge of foo onto C
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/C/foo' : Item(verb='Replacing'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/file foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar/new file 3' : Item(status=' ', wc_rev=5),
+ 'A/C' : Item(status=' ', wc_rev=6),
+ 'A/C/foo' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/file foo' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/bar' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/bar/new file 3' : Item(status=' ', wc_rev=6),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# A merge that replaces a directory and one of its children
+# Tests for Issue #2690
+@Issue(2690)
+def merge_dir_and_file_replace(sbox):
+ "replace both dir and one of its children"
+
+ set_up_dir_replace(sbox)
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+ foo_path = os.path.join(F_path, 'foo')
+ new_file2 = os.path.join(foo_path, "new file 2")
+
+ # Recreate foo and 'new file 2' in F and add a new folder with a file
+ bar_path = os.path.join(foo_path, 'bar')
+ new_file3 = os.path.join(bar_path, "new file 3")
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', bar_path)
+ svntest.main.file_append(new_file3, "Initial text in new file 3.\n")
+ svntest.main.run_svn(None, "add", new_file3)
+ svntest.main.file_append(new_file2, "New text in new file 2.\n")
+ svntest.main.run_svn(None, "add", new_file2)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/foo' : Item(verb='Adding'),
+ 'A/B/F/foo/new file 2' : Item(verb='Adding'),
+ 'A/B/F/foo/bar' : Item(verb='Adding'),
+ 'A/B/F/foo/bar/new file 3' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar/new file 3' : Item(status=' ', wc_rev=5),
+ 'A/C/foo' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3),
+ })
+ expected_status.tweak('A/C', wc_rev=3) # From mergeinfo
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ # Merge replacement of foo onto C
+ expected_output = wc.State(C_path, {
+ 'foo' : Item(status='R '),
+ 'foo/new file 2' : Item(status='A '),
+ 'foo/bar' : Item(status='A '),
+ 'foo/bar/new file 3' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2-5'}),
+ 'foo' : Item(),
+ 'foo/new file 2' : Item("New text in new file 2.\n"),
+ 'foo/bar' : Item(),
+ 'foo/bar/new file 3' : Item("Initial text in new file 3.\n"),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'foo' : Item(status='R ', wc_rev='-', copied='+'),
+ 'foo/new file 2' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/bar' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/bar/new file 3' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/new file' : Item(status='D ', wc_rev=3),
+ })
+ expected_skip = wc.State(C_path, { })
+ svntest.actions.run_and_verify_merge(C_path, '2', '5', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [],
+ True,
+ False) # don't do a dry-run
+ # the output differs
+
+ # Commit merge of foo onto C
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/C/foo' : Item(verb='Replacing'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar' : Item(status=' ', wc_rev=5),
+ 'A/B/F/foo/bar/new file 3' : Item(status=' ', wc_rev=5),
+ 'A/C' : Item(status=' ', wc_rev=6),
+ 'A/C/foo' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/new file 2' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/bar' : Item(status=' ', wc_rev=6),
+ 'A/C/foo/bar/new file 3' : Item(status=' ', wc_rev=6),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Confirm the files are present in the repository.
+ new_file_2_url = sbox.repo_url + '/A/C/foo/new file 2'
+ svntest.actions.run_and_verify_svn(["New text in new file 2.\n"],
+ [], 'cat',
+ new_file_2_url)
+ new_file_3_url = sbox.repo_url + '/A/C/foo/bar/new file 3'
+ svntest.actions.run_and_verify_svn(["Initial text in new file 3.\n"],
+ [], 'cat',
+ new_file_3_url)
+
+#----------------------------------------------------------------------
+@Issue(2144)
+def merge_file_with_space_in_its_name(sbox):
+ "merge a file whose name contains a space"
+ # For issue #2144
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ new_file = sbox.ospath('new file')
+
+ # Make r2.
+ svntest.main.file_append(new_file, "Initial text in the file.\n")
+ svntest.main.run_svn(None, "add", new_file)
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r2", wc_dir)
+
+ # Make r3.
+ svntest.main.file_append(new_file, "Next line of text in the file.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", "-m", "r3", wc_dir)
+
+ # Try to reverse merge.
+ #
+ # The reproduction recipe requires that no explicit merge target be
+ # passed, so we run merge from inside the wc dir where the target
+ # file (i.e., the URL basename) lives.
+ os.chdir(wc_dir)
+ target_url = sbox.repo_url + '/new%20file'
+ svntest.actions.run_and_verify_svn(None, [],
+ "merge", "-r3:2", target_url)
+
+#----------------------------------------------------------------------
+# A merge between two branches using no revision number with the dir being
+# created already existing as an unversioned directory.
+# Tests for Issue #2222
+@Issue(2222)
+def merge_dir_branches(sbox):
+ "merge between branches"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_uuid = svntest.actions.get_wc_uuid(wc_dir)
+
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+ C_url = sbox.repo_url + '/A/C'
+
+ # Create foo in F
+ foo_path = os.path.join(F_path, 'foo')
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/foo' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Create an unversioned foo
+ foo_path = sbox.ospath('foo')
+ os.mkdir(foo_path)
+
+ # Merge from C to F onto the wc_dir
+ # We can't use run_and_verify_merge because it doesn't support this
+ # syntax of the merge command.
+ ### TODO: We can use run_and_verify_merge() here now.
+ expected_output = expected_merge_output(None, "A " + foo_path + "\n")
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '--allow-mixed-revisions',
+ C_url, F_url, wc_dir)
+
+ # Run info to check the copied rev to make sure it's right
+ expected_info = {"Path" : re.escape(foo_path), # escape backslashes
+ "URL" : sbox.repo_url + "/foo",
+ "Repository Root" : sbox.repo_url,
+ "Repository UUID" : wc_uuid,
+ "Revision" : "2",
+ "Node Kind" : "directory",
+ "Schedule" : "add",
+ "Copied From URL" : F_url + "/foo",
+ "Copied From Rev" : "2",
+ }
+ svntest.actions.run_and_verify_info([expected_info], foo_path)
+
+
+#----------------------------------------------------------------------
+def safe_property_merge(sbox):
+ "property merges don't overwrite existing prop-mods"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to two files and a directory, commit as r2.
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+ E_path = sbox.ospath('A/B/E')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ alpha_path, beta_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ E_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Copy B to B2 as rev 3 (making a branch)
+ B_url = sbox.repo_url + '/A/B'
+ B2_url = sbox.repo_url + '/A/B2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '-m', 'copy B to B2',
+ B_url, B2_url)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Change the properties underneath B again, and commit as r4
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val2',
+ alpha_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', 'foo',
+ beta_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val2',
+ E_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, None)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Make local propchanges to E, alpha and beta in the branch.
+ alpha_path2 = sbox.ospath('A/B2/E/alpha')
+ beta_path2 = sbox.ospath('A/B2/E/beta')
+ E_path2 = sbox.ospath('A/B2/E')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'branchval',
+ alpha_path2, beta_path2)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'branchval',
+ E_path2)
+
+ # Now merge the recent B change to the branch. Because we already
+ # have local propmods, we should get property conflicts.
+ B2_path = sbox.ospath('A/B2')
+
+ expected_output = wc.State(B2_path, {
+ 'E' : Item(status=' C'),
+ 'E/alpha' : Item(status=' C'),
+ 'E/beta' : Item(status=' C'),
+ })
+ expected_mergeinfo_output = wc.State(B2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B2_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : "/A/B:4"}),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_disk.tweak('E', 'E/alpha', 'E/beta',
+ props={'foo' : 'branchval'}) # local mods still present
+
+ expected_status = wc.State(B2_path, {
+ '' : Item(status=' M'),
+ 'E' : Item(status=' C'),
+ 'E/alpha' : Item(status=' C'),
+ 'E/beta' : Item(status=' C'),
+ 'F' : Item(status=' '),
+ 'lambda' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=4)
+
+ expected_skip = wc.State('', { })
+
+ # should have 3 'prej' files left behind, describing prop conflicts:
+ extra_files = ['alpha.*\.prej', 'beta.*\.prej', 'dir_conflicts.*\.prej']
+
+ svntest.actions.run_and_verify_merge(B2_path, '3', '4', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ extra_files=extra_files)
+
+#----------------------------------------------------------------------
+# Test for issue 2035, whereby 'svn merge' wouldn't always mark
+# property conflicts when it should.
+@Issue(2035)
+@SkipUnless(server_has_mergeinfo)
+def property_merge_from_branch(sbox):
+ "property merge conflict even without local mods"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file and a directory, commit as r2.
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ E_path = sbox.ospath('A/B/E')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ alpha_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ E_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Copy B to B2 as rev 3 (making a branch)
+ B_url = sbox.repo_url + '/A/B'
+ B2_url = sbox.repo_url + '/A/B2'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '-m', 'copy B to B2',
+ B_url, B2_url)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Change the properties underneath B again, and commit as r4
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val2',
+ alpha_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val2',
+ E_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, None)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Make different propchanges changes to the B2 branch and commit as r5.
+ alpha_path2 = sbox.ospath('A/B2/E/alpha')
+ E_path2 = sbox.ospath('A/B2/E')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'branchval',
+ alpha_path2)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'branchval',
+ E_path2)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B2/E' : Item(verb='Sending'),
+ 'A/B2/E/alpha' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, None)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Now merge the recent B change to the branch. There are no local
+ # mods anywhere, but we should still get property conflicts anyway!
+ B2_path = sbox.ospath('A/B2')
+
+ expected_output = wc.State(B2_path, {
+ 'E' : Item(status=' C'),
+ 'E/alpha' : Item(status=' C'),
+ })
+ expected_mergeinfo_output = wc.State(B2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B2_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_disk.tweak('E', 'E/alpha',
+ props={'foo' : 'branchval'})
+
+ expected_status = wc.State(B2_path, {
+ '' : Item(status=' M'),
+ 'E' : Item(status=' C'),
+ 'E/alpha' : Item(status=' C'),
+ 'E/beta' : Item(status=' '),
+ 'F' : Item(status=' '),
+ 'lambda' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=5)
+
+ expected_skip = wc.State('', { })
+
+ # should have 2 'prej' files left behind, describing prop conflicts:
+ extra_files = ['alpha.*\.prej', 'dir_conflicts.*\.prej']
+
+ svntest.actions.run_and_verify_merge(B2_path, '3', '4', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ extra_files=extra_files)
+
+#----------------------------------------------------------------------
+# Another test for issue 2035, whereby sometimes 'svn merge' marked
+# property conflicts when it shouldn't!
+@Issue(2035)
+def property_merge_undo_redo(sbox):
+ "undo, then redo a property merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file, commit as r2.
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ alpha_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', wc_rev=2, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Use 'svn merge' to undo the commit. ('svn merge -r2:1')
+ # Result should be a single local-prop-mod.
+ expected_output = wc.State(wc_dir, {'A/B/E/alpha' : Item(status=' U'), })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/B/E/alpha', status=' M')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge(wc_dir, '2', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+ # Change mind, re-apply the change ('svn merge -r1:2').
+ # This should merge cleanly into existing prop-mod, status shows nothing.
+ expected_output = wc.State(wc_dir, {'A/B/E/alpha' : Item(status=' C'), })
+ expected_mergeinfo_output = wc.State(wc_dir, {})
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/B/E/alpha.prej'
+ : Item("Trying to add new property 'foo'\n"
+ + "but the property has been locally deleted.\n"
+ + "Incoming property value:\nfoo_val\n")})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/B/E/alpha', status=' C')
+
+ expected_skip = wc.State('', { })
+
+ # Re-merge r1. We have to use --ignore-ancestry here. Otherwise
+ # the merge logic will claim we already have this change (because it
+ # was unable to record the previous undoing merge).
+ svntest.actions.run_and_verify_merge(wc_dir, '1', '2',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--ignore-ancestry', wc_dir)
+
+
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def cherry_pick_text_conflict(sbox):
+ "cherry-pick a dependent change, get conflict"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_url = sbox.repo_url + '/A'
+ mu_path = os.path.join(A_path, 'mu')
+ branch_A_url = sbox.repo_url + '/copy-of-A'
+ branch_mu_path = sbox.ospath('copy-of-A/mu')
+
+ # Create a branch of A.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ A_url, branch_A_url,
+ '-m', "Creating copy-of-A")
+
+ # Update to get the branch.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ # Change mu's text on the branch, producing r3 through r6.
+ for rev in range(3, 7):
+ svntest.main.file_append(branch_mu_path, ("r%d\n" % rev) * 3)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m',
+ 'Add lines to mu in r%d.' % rev, wc_dir)
+
+ # Mark r5 as merged into trunk, to create disparate revision ranges
+ # which need to be merged.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ [' U ' + A_path + '\n']),
+ [], 'merge', '-c5', '--record-only',
+ branch_A_url, A_path)
+
+
+ # Try to merge r4:6 into trunk, without r3. It should fail.
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='C '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' G')
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_disk = wc.State('', {
+ 'mu' : Item("This is the file 'mu'.\n"
+ + make_conflict_marker_text('', "r3\n" * 3 + "r4\n" * 3, 3, 4,
+ old_text='r3\n' * 3)),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'mu' : Item(status='C '),
+ 'B' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_skip = wc.State('', { })
+ expected_error = ".*conflicts were produced while merging r3:4.*"
+ svntest.actions.run_and_verify_merge(A_path, '3', '6', branch_A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ expected_error,
+ extra_files=
+ ["mu\.working",
+ "mu\.merge-right\.r4",
+ "mu\.merge-left\.r3"])
+
+#----------------------------------------------------------------------
+# Test for issue 2135
+@Issue(2135)
+def merge_file_replace(sbox):
+ "merge a replacement of a file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/G/rho')
+
+ # Commit rev 2
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ # Create and add a new file.
+ svntest.main.file_write(rho_path, "new rho\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', rho_path)
+
+ # Commit revsion 3
+ expected_status.add({
+ 'A/D/G/rho' : Item(status='A ', wc_rev='0')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+ # Update working copy
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/rho', contents='new rho\n' )
+ expected_status.tweak(wc_rev='3')
+ expected_status.tweak('A/D/G/rho', status=' ')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # merge changes from r3:1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(status='R ')
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk.tweak('A/D/G/rho', contents="This is the file 'rho'.\n")
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+ # Now commit merged wc
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Replacing'),
+ })
+ expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='4')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Test for issue 2522
+# Same as merge_file_replace, but without update before merge.
+@Issue(2522)
+def merge_file_replace_to_mixed_rev_wc(sbox):
+ "merge a replacement of a file to mixed rev wc"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/G/rho')
+
+ # Commit rev 2
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update working copy
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/rho' )
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_status.tweak(wc_rev='2')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Create and add a new file.
+ svntest.main.file_write(rho_path, "new rho\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', rho_path)
+
+ # Commit revsion 3
+ expected_status.add({
+ 'A/D/G/rho' : Item(status='A ', wc_rev='0')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Adding'),
+ })
+
+ expected_disk.add({'A/D/G/rho' : Item(contents='new rho\n')} )
+ expected_status.tweak(wc_rev='2')
+ expected_status.tweak('A/D/G/rho', status=' ', wc_rev='3')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # merge changes from r3:1...
+ #
+ # ...but first:
+ #
+ # Since "." is at revision 2, r3 is not part of "."'s implicit mergeinfo.
+ # Merge tracking permits only reverse merges from explicit or implicit
+ # mergeinfo, so only r2 would be reverse merged if we left the WC as is.
+ # Normally we'd simply update the whole working copy, but since that would
+ # defeat the purpose of this test (see the comment below), instead we'll
+ # update only "." using --depth empty. This preserves the intent of the
+ # original mixed-rev test for this issue, but allows the merge tracking
+ # logic to consider r3 as valid for reverse merging.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--depth', 'empty', wc_dir)
+ expected_status.tweak('', wc_rev=3)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(status='R ')
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk.tweak('A/D/G/rho', contents="This is the file 'rho'.\n")
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [],
+ True, False, '--allow-mixed-revisions',
+ wc_dir)
+
+ # When issue #2522 was filed, svn used to break the WC if we didn't
+ # update here. But nowadays, this no longer happens, so the separate
+ # update step which was done here originally has been removed.
+
+ # Now commit merged wc
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Replacing'),
+ })
+ expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='4')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# use -x -w option for ignoring whitespace during merge
+@SkipUnless(server_has_mergeinfo)
+def merge_ignore_whitespace(sbox):
+ "ignore whitespace when merging"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # commit base version of iota
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+ file_url = sbox.repo_url + '/iota'
+
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\n"
+ "Cc\n")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # change the file, mostly whitespace changes + an extra line
+ svntest.main.file_write(file_path, "A a\nBb \n Cc\nNew line in iota\n")
+ expected_output = wc.State(wc_dir, { file_name : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak(file_name, wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Backdate iota to revision 2, so we can merge in the rev 3 changes.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '2', file_path)
+ # Make some local whitespace changes, these should not conflict
+ # with the remote whitespace changes as both will be ignored.
+ svntest.main.file_write(file_path, " Aa\nB b\nC c\n")
+
+ # Lines changed only by whitespace - both in local or remote -
+ # should be ignored
+ expected_output = wc.State(sbox.wc_dir, { file_name : Item(status='G ') })
+ expected_mergeinfo_output = wc.State(sbox.wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(sbox.wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak(file_name,
+ contents=" Aa\n"
+ "B b\n"
+ "C c\n"
+ "New line in iota\n")
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('', status=' M', wc_rev=1)
+ expected_status.tweak(file_name, status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge(sbox.wc_dir, '2', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, False,
+ '--allow-mixed-revisions',
+ '-x', '-w', wc_dir)
+
+#----------------------------------------------------------------------
+# use -x --ignore-eol-style option for ignoring eolstyle during merge
+@SkipUnless(server_has_mergeinfo)
+def merge_ignore_eolstyle(sbox):
+ "ignore eolstyle when merging"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # commit base version of iota
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+ file_url = sbox.repo_url + '/iota'
+
+ svntest.main.file_write(file_path,
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+ "wb")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # change the file, mostly eol changes + an extra line
+ svntest.main.file_write(file_path,
+ "Aa\r"
+ "Bb\n"
+ "Cc\r"
+ "New line in iota\n",
+ "wb")
+ expected_output = wc.State(wc_dir, { file_name : Item(verb='Sending'), })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak(file_name, wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Backdate iota to revision 2, so we can merge in the rev 3 changes.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '2', file_path)
+ # Make some local eol changes, these should not conflict
+ # with the remote eol changes as both will be ignored.
+ svntest.main.file_write(file_path,
+ "Aa\n"
+ "Bb\r"
+ "Cc\n",
+ "wb")
+
+ # Lines changed only by eolstyle - both in local or remote -
+ # should be ignored
+ expected_output = wc.State(sbox.wc_dir, { file_name : Item(status='G ') })
+ expected_mergeinfo_output = wc.State(sbox.wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(sbox.wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak(file_name,
+ contents="Aa\n"
+ "Bb\r"
+ "Cc\n"
+ "New line in iota\n")
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('', status=' M')
+ expected_status.tweak(file_name, status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge2(sbox.wc_dir, '2', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], False, False, True,
+ '--allow-mixed-revisions',
+ '-x', '--ignore-eol-style', wc_dir)
+
+#----------------------------------------------------------------------
+# eol-style handling during merge with conflicts, scenario 1:
+# when a merge creates a conflict on a file, make sure the file and files
+# r<left>, r<right> and .mine are in the eol-style defined for that file.
+#
+# This test for 'svn update' can be found in update_tests.py as
+# conflict_markers_matching_eol.
+@SkipUnless(server_has_mergeinfo)
+def merge_conflict_markers_matching_eol(sbox):
+ "conflict markers should match the file's eol style"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ filecount = 1
+
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ # Checkout a second working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url, wc_backup)
+
+ # set starting revision
+ cur_rev = 1
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, cur_rev)
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup,
+ cur_rev)
+
+ path_backup = os.path.join(wc_backup, 'A', 'mu')
+
+ # do the test for each eol-style
+ for eol, eolchar in zip(['CRLF', 'CR', 'native', 'LF'],
+ [crlf, '\015', '\n', '\012']):
+ # rewrite file mu and set the eol-style property.
+ svntest.main.file_write(mu_path, "This is the file 'mu'."+ eolchar, 'wb')
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol, mu_path)
+
+ expected_disk.add({
+ 'A/mu' : Item("This is the file 'mu'." + eolchar)
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status.tweak(wc_rev = cur_rev)
+ expected_status.add({
+ 'A/mu' : Item(status=' ', wc_rev = cur_rev + 1),
+ })
+
+ # Commit the original change and note the 'base' revision number
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ cur_rev = cur_rev + 1
+ base_rev = cur_rev
+
+ svntest.main.run_svn(None, 'update', wc_backup)
+
+ # Make a local mod to mu
+ svntest.main.file_append(mu_path,
+ 'Original appended text for mu' + eolchar)
+
+ # Commit the original change and note the 'theirs' revision number
+ svntest.main.run_svn(None, 'commit', '-m', 'test log', wc_dir)
+ cur_rev = cur_rev + 1
+ theirs_rev = cur_rev
+
+ # Make a local mod to mu, will conflict with the previous change
+ svntest.main.file_append(path_backup,
+ 'Conflicting appended text for mu' + eolchar)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='C '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_backup_disk = expected_disk.copy()
+
+ # verify content of resulting conflicted file
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'." + eolchar +
+ "<<<<<<< .working" + eolchar +
+ "Conflicting appended text for mu" + eolchar +
+ "||||||| .merge-left.r" + str(cur_rev - 1) + eolchar +
+ "=======" + eolchar +
+ "Original appended text for mu" + eolchar +
+ ">>>>>>> .merge-right.r" + str(cur_rev) + eolchar),
+ })
+ # verify content of base(left) file
+ expected_backup_disk.add({
+ 'A/mu.merge-left.r' + str(base_rev) :
+ Item(contents= "This is the file 'mu'." + eolchar)
+ })
+ # verify content of theirs(right) file
+ expected_backup_disk.add({
+ 'A/mu.merge-right.r' + str(theirs_rev) :
+ Item(contents= "This is the file 'mu'." + eolchar +
+ "Original appended text for mu" + eolchar)
+ })
+ # verify content of mine file
+ expected_backup_disk.add({
+ 'A/mu.working' : Item(contents= "This is the file 'mu'." +
+ eolchar +
+ "Conflicting appended text for mu" + eolchar)
+ })
+
+ # Create expected status tree for the update.
+ expected_backup_status.add({
+ 'A/mu' : Item(status=' ', wc_rev=cur_rev),
+ })
+ expected_backup_status.tweak('A/mu', status='C ')
+ expected_backup_status.tweak(wc_rev = cur_rev - 1)
+ expected_backup_status.tweak('', status= ' M')
+ expected_mergeinfo_output = wc.State(wc_backup, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_backup, {
+ })
+ expected_backup_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge2(wc_backup, cur_rev - 1, cur_rev,
+ sbox.repo_url, None,
+ expected_backup_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_backup_disk,
+ expected_backup_status,
+ expected_backup_skip,
+ keep_eol_style=keep_eol_style)
+
+ # cleanup for next run
+ svntest.main.run_svn(None, 'revert', '-R', wc_backup)
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+#----------------------------------------------------------------------
+# eol-style handling during merge, scenario 2:
+# if part of that merge is a propchange (add, change, delete) of
+# svn:eol-style, make sure the correct eol-style is applied before
+# calculating the merge (and conflicts if any)
+#
+# This test for 'svn update' can be found in update_tests.py as
+# update_eolstyle_handling.
+@SkipUnless(server_has_mergeinfo)
+def merge_eolstyle_handling(sbox):
+ "handle eol-style propchange during merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ # Checkout a second working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url, wc_backup)
+ path_backup = os.path.join(wc_backup, 'A', 'mu')
+
+ # Test 1: add the eol-style property and commit, change mu in the second
+ # working copy and merge the last revision; there should be no conflict!
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', "CRLF", mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'set eol-style property', wc_dir)
+
+ svntest.main.file_append_binary(path_backup, 'Added new line of text.\012')
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.tweak(
+ 'A/mu', contents= "This is the file 'mu'." + crlf +
+ "Added new line of text." + crlf)
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='GU'),
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_backup, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_backup, {
+ })
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 1)
+ expected_backup_status.tweak('', status=' M')
+ expected_backup_status.tweak('A/mu', status='MM')
+
+ expected_backup_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge2(wc_backup, '1', '2', sbox.repo_url,
+ None,
+ expected_backup_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_backup_disk,
+ expected_backup_status,
+ expected_backup_skip,
+ keep_eol_style=keep_eol_style)
+
+ # Test 2: now change the eol-style property to another value and commit,
+ # merge this revision in the still changed mu in the second working copy;
+ # there should be no conflict!
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', "CR", mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'set eol-style property', wc_dir)
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'.\015" +
+ "Added new line of text.\015")
+ })
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='GU'),
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_backup, {
+ '' : Item(status=' G'),
+ })
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 1)
+ expected_backup_status.tweak('', status=' M')
+ expected_backup_status.tweak('A/mu', status='MM')
+ svntest.actions.run_and_verify_merge2(wc_backup, '2', '3', sbox.repo_url,
+ None,
+ expected_backup_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_backup_disk,
+ expected_backup_status,
+ expected_backup_skip,
+ keep_eol_style=keep_eol_style)
+
+ # Test 3: now delete the eol-style property and commit, merge this revision
+ # in the still changed mu in the second working copy; there should be no
+ # conflict!
+ # EOL of mu should be unchanged (=CRLF).
+ svntest.main.run_svn(None, 'propdel', 'svn:eol-style', mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'del eol-style property', wc_dir)
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'.\015" +
+ "Added new line of text.\015")
+ })
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status=' G'),
+ })
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 1)
+ expected_backup_status.tweak('', status=' M')
+ expected_backup_status.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_merge2(wc_backup, '3', '4', sbox.repo_url,
+ None,
+ expected_backup_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_backup_disk,
+ expected_backup_status,
+ expected_backup_skip,
+ keep_eol_style=keep_eol_style)
+
+#----------------------------------------------------------------------
+def create_deep_trees(wc_dir):
+ """Create A/B/F/E by moving A/B/E to A/B/F/E.
+ Copy A/B/F/E to A/B/F/E1.
+ Copy A/B to A/copy-of-B, and return the expected status.
+ At the end of this function WC would be at r4"""
+
+ A_path = os.path.join(wc_dir, 'A')
+ A_B_path = os.path.join(A_path, 'B')
+ A_B_E_path = os.path.join(A_B_path, 'E')
+ A_B_F_path = os.path.join(A_B_path, 'F')
+ A_B_F_E_path = os.path.join(A_B_F_path, 'E')
+ A_B_F_E1_path = os.path.join(A_B_F_path, 'E1')
+
+ # Deepen the directory structure we're working with by moving E to
+ # underneath F and committing, creating revision 2.
+ svntest.main.run_svn(None, 'mv', A_B_E_path, A_B_F_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Deleting'),
+ 'A/B/F/E' : Item(verb='Adding')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_status.add({
+ 'A/B/F/E' : Item(status=' ', wc_rev=2),
+ 'A/B/F/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/F/E/beta' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.run_svn(None, 'cp', A_B_F_E_path, A_B_F_E1_path)
+
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/E1' : Item(verb='Adding')
+ })
+ expected_status.add({
+ 'A/B/F/E1' : Item(status=' ', wc_rev=3),
+ 'A/B/F/E1/alpha' : Item(status=' ', wc_rev=3),
+ 'A/B/F/E1/beta' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Bring the entire WC up to date with rev 3.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+ expected_status.tweak(wc_rev=3)
+
+ # Copy B and commit, creating revision 4.
+ copy_of_B_path = os.path.join(A_path, 'copy-of-B')
+ svntest.main.run_svn(None, "cp", A_B_path, copy_of_B_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/copy-of-B' : Item(verb='Adding'),
+ })
+ expected_status.add({
+ 'A/copy-of-B' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E/alpha' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E/beta' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E1' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E1/alpha' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/F/E1/beta' : Item(status=' ', wc_rev=4),
+ 'A/copy-of-B/lambda' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.add({
+ 'A/B/F/E' : Item(),
+ 'A/B/F/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/F/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/B/F/E1' : Item(),
+ 'A/B/F/E1/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/F/E1/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/copy-of-B' : Item(),
+ 'A/copy-of-B/F' : Item(props={}),
+ 'A/copy-of-B/F/E' : Item(),
+ 'A/copy-of-B/F/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/copy-of-B/F/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/copy-of-B/F/E1' : Item(),
+ 'A/copy-of-B/F/E1/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/copy-of-B/F/E1/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/copy-of-B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Bring the entire WC up to date with rev 4.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ expected_status.tweak(wc_rev=4)
+ expected_disk.tweak('A/copy-of-B/F/E', 'A/copy-of-B/F/E1', status=' M')
+ return expected_status
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def avoid_repeated_merge_using_inherited_merge_info(sbox):
+ "use inherited mergeinfo to avoid repeated merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_B_path = os.path.join(A_path, 'B')
+ A_B_E_path = os.path.join(A_B_path, 'E')
+ A_B_F_path = os.path.join(A_B_path, 'F')
+ copy_of_B_path = os.path.join(A_path, 'copy-of-B')
+
+ # Create a deeper directory structure.
+ expected_status = create_deep_trees(wc_dir)
+
+ # Edit alpha and commit it, creating revision 5.
+ alpha_path = os.path.join(A_B_F_path, 'E', 'alpha')
+ new_content_for_alpha = 'new content to alpha\n'
+ svntest.main.file_write(alpha_path, new_content_for_alpha)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/E/alpha' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/B/F/E/alpha', wc_rev=5)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Bring the entire WC up to date with rev 5.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # Merge changes from rev 5 of B (to alpha) into copy_of_B.
+ expected_output = wc.State(copy_of_B_path, {
+ 'F/E/alpha' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(copy_of_B_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(copy_of_B_path, {
+ })
+ expected_status = wc.State(copy_of_B_path, {
+ '' : Item(status=' M', wc_rev=5),
+ 'F/E' : Item(status=' ', wc_rev=5),
+ 'F/E/alpha' : Item(status='M ', wc_rev=5),
+ 'F/E/beta' : Item(status=' ', wc_rev=5),
+ 'F/E1' : Item(status=' ', wc_rev=5),
+ 'F/E1/alpha' : Item(status=' ', wc_rev=5),
+ 'F/E1/beta' : Item(status=' ', wc_rev=5),
+ 'lambda' : Item(status=' ', wc_rev=5),
+ 'F' : Item(status=' ', wc_rev=5),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5'}),
+ 'F/E' : Item(),
+ 'F/E/alpha' : Item(new_content_for_alpha),
+ 'F/E/beta' : Item("This is the file 'beta'.\n"),
+ 'F/E1' : Item(),
+ 'F/E1/alpha' : Item("This is the file 'alpha'.\n"),
+ 'F/E1/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n")
+ })
+ expected_skip = wc.State(copy_of_B_path, { })
+
+ svntest.actions.run_and_verify_merge(copy_of_B_path, '4', '5',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Commit the result of the merge, creating revision 6.
+ expected_output = svntest.wc.State(copy_of_B_path, {
+ '' : Item(verb='Sending'),
+ 'F/E/alpha' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(copy_of_B_path, expected_output,
+ None)
+
+ # Update the WC to bring /A/copy_of_B/F from rev 4 to rev 6.
+ # Without this update, a subsequent merge will not find any merge
+ # info for /A/copy_of_B/F -- nor its parent dir in the repos -- at
+ # rev 4. Mergeinfo wasn't introduced until rev 6.
+ copy_of_B_F_E_path = os.path.join(copy_of_B_path, 'F', 'E')
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # Attempt to re-merge changes to alpha from rev 4. Use the merge
+ # info inherited from the grandparent (copy-of-B) of our merge
+ # target (/A/copy-of-B/F/E) to avoid a repeated merge.
+ expected_status = wc.State(copy_of_B_F_E_path, {
+ '' : Item(status=' ', wc_rev=6),
+ 'alpha' : Item(status=' ', wc_rev=6),
+ 'beta' : Item(status=' ', wc_rev=6),
+ })
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ [' U ' + copy_of_B_F_E_path + '\n',
+ ' G ' + copy_of_B_F_E_path + '\n'],
+ elides=True),
+ [], 'merge', '-r4:5',
+ sbox.repo_url + '/A/B/F/E',
+ copy_of_B_F_E_path)
+ svntest.actions.run_and_verify_status(copy_of_B_F_E_path,
+ expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2821)
+def avoid_repeated_merge_on_subtree_with_merge_info(sbox):
+ "use subtree's mergeinfo to avoid repeated merge"
+ # Create deep trees A/B/F/E and A/B/F/E1 and copy A/B to A/copy-of-B
+ # with the help of 'create_deep_trees'
+ # As /A/copy-of-B/F/E1 is not a child of /A/copy-of-B/F/E,
+ # set_path should not be called on /A/copy-of-B/F/E1 while
+ # doing a implicit subtree merge on /A/copy-of-B/F/E.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_B_path = os.path.join(A_path, 'B')
+ A_B_E_path = os.path.join(A_B_path, 'E')
+ A_B_F_path = os.path.join(A_B_path, 'F')
+ A_B_F_E_path = os.path.join(A_B_F_path, 'E')
+ copy_of_B_path = os.path.join(A_path, 'copy-of-B')
+ copy_of_B_F_path = os.path.join(A_path, 'copy-of-B', 'F')
+ A_copy_of_B_F_E_alpha_path = os.path.join(A_path, 'copy-of-B', 'F',
+ 'E', 'alpha')
+
+ # Create a deeper directory structure.
+ expected_status = create_deep_trees(wc_dir)
+
+ # Edit alpha and commit it, creating revision 5.
+ alpha_path = os.path.join(A_B_F_E_path, 'alpha')
+ new_content_for_alpha1 = 'new content to alpha\n'
+ svntest.main.file_write(alpha_path, new_content_for_alpha1)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/E/alpha' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/B/F/E/alpha', wc_rev=5)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ for path_and_mergeinfo in (('E', '/A/B/F/E:5'),
+ ('E1', '/A/B/F/E:5')):
+ path_name = os.path.join(copy_of_B_path, 'F', path_and_mergeinfo[0])
+
+ # Merge r5 to path_name.
+ expected_output = wc.State(path_name, {
+ 'alpha' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(path_name, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(path_name, {})
+ expected_status = wc.State(path_name, {
+ '' : Item(status=' M', wc_rev=4),
+ 'alpha' : Item(status='M ', wc_rev=4),
+ 'beta' : Item(status=' ', wc_rev=4),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : path_and_mergeinfo[1]}),
+ 'alpha' : Item(new_content_for_alpha1),
+ 'beta' : Item("This is the file 'beta'.\n"),
+ })
+ expected_skip = wc.State(path_name, { })
+
+ svntest.actions.run_and_verify_merge(path_name, '4', '5',
+ sbox.repo_url + '/A/B/F/E', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Commit the result of the merge, creating new revision.
+ expected_output = svntest.wc.State(path_name, {
+ '' : Item(verb='Sending'),
+ 'alpha' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(path_name,
+ expected_output, None, [], wc_dir)
+
+ # Edit A/B/F/E/alpha and commit it, creating revision 8.
+ new_content_for_alpha = 'new content to alpha\none more line\n'
+ svntest.main.file_write(alpha_path, new_content_for_alpha)
+
+ expected_output = svntest.wc.State(A_B_F_E_path, {
+ 'alpha' : Item(verb='Sending'),
+ })
+ expected_status = wc.State(A_B_F_E_path, {
+ '' : Item(status=' ', wc_rev=4),
+ 'alpha' : Item(status=' ', wc_rev=8),
+ 'beta' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(A_B_F_E_path, expected_output,
+ expected_status, [], wc_dir)
+
+ # Update the WC to bring /A/copy_of_B to rev 8.
+ # Without this update expected_status tree would be cumbersome to
+ # understand.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # Merge changes from rev 4:8 of A/B into A/copy_of_B. A/copy_of_B/F/E1
+ # has explicit mergeinfo and exists at r4 in the merge source, so it
+ # should be treated as a subtree with intersecting mergeinfo and its
+ # mergeinfo updated.
+ expected_output = wc.State(copy_of_B_path, {
+ 'F/E/alpha' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(copy_of_B_path, {
+ '' : Item(status=' U'),
+ 'F/E' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(copy_of_B_path, {
+ 'F/E' : Item(status=' U')
+ })
+ expected_status = wc.State(copy_of_B_path, {
+ # The subtree mergeinfo on F/E1 is not updated because
+ # this merge does not affect that subtree.
+ '' : Item(status=' M', wc_rev=8),
+ 'F/E' : Item(status=' M', wc_rev=8),
+ 'F/E/alpha' : Item(status='M ', wc_rev=8),
+ 'F/E/beta' : Item(status=' ', wc_rev=8),
+ 'F/E1' : Item(status=' ', wc_rev=8),
+ 'F/E1/alpha' : Item(status=' ', wc_rev=8),
+ 'F/E1/beta' : Item(status=' ', wc_rev=8),
+ 'lambda' : Item(status=' ', wc_rev=8),
+ 'F' : Item(status=' ', wc_rev=8)
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5-8'}),
+ 'F/E' : Item(props={}), # elision!
+ 'F/E/alpha' : Item(new_content_for_alpha),
+ 'F/E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'F/E1' : Item(props={SVN_PROP_MERGEINFO :
+ '/A/B/F/E:5'}),
+ 'F/E1/alpha' : Item(new_content_for_alpha1),
+ 'F/E1/beta' : Item("This is the file 'beta'.\n"),
+ 'lambda' : Item("This is the file 'lambda'.\n")
+ })
+ expected_skip = wc.State(copy_of_B_path, { })
+ svntest.actions.run_and_verify_merge(copy_of_B_path, '4', '8',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Test for part of Issue #2821, see
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=2821#desc22
+ #
+ # Revert all local changes.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Make a text mod to A/copy-of-B/F/E/alpha
+ newer_content_for_alpha = "Conflicting content"
+ svntest.main.file_write(A_copy_of_B_F_E_alpha_path,
+ newer_content_for_alpha)
+
+ # Re-merge r5 to A/copy-of-B/F, this *should* be a no-op as the mergeinfo
+ # on A/copy-of-B/F/E should prevent any attempt to merge r5 into that
+ # subtree. The merge will leave a few local changes as mergeinfo is set
+ # on A/copy-of-B/F, the mergeinfo on A/copy-of-B/F/E elides to it. The
+ # mergeinfo on A/copy-of-B/F/E1 remains unchanged as that subtree was
+ # untouched by the merge.
+ expected_output = wc.State(copy_of_B_F_path, {})
+ expected_mergeinfo_output = wc.State(copy_of_B_F_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(copy_of_B_F_path, {
+ 'E' : Item(status=' U')
+ })
+ expected_status = wc.State(copy_of_B_F_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'E' : Item(status=' M', wc_rev=8),
+ 'E/alpha' : Item(status='M ', wc_rev=8),
+ 'E/beta' : Item(status=' ', wc_rev=8),
+ 'E1' : Item(status=' ', wc_rev=8),
+ 'E1/alpha' : Item(status=' ', wc_rev=8),
+ 'E1/beta' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:5'}),
+ 'E' : Item(props={}),
+ 'E/alpha' : Item(newer_content_for_alpha),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'E1' : Item(props={SVN_PROP_MERGEINFO :
+ '/A/B/F/E:5'}),
+ 'E1/alpha' : Item(new_content_for_alpha1),
+ 'E1/beta' : Item("This is the file 'beta'.\n")
+ })
+ expected_skip = wc.State(copy_of_B_F_path, { })
+ svntest.actions.run_and_verify_merge(copy_of_B_F_path, '4', '5',
+ sbox.repo_url + '/A/B/F', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+def tweak_src_then_merge_to_dest(sbox, src_path, dst_path,
+ canon_src_path, contents, cur_rev):
+ """Edit src and commit it. This results in new_rev.
+ Merge new_rev to dst_path. Return new_rev."""
+
+ wc_dir = sbox.wc_dir
+ new_rev = cur_rev + 1
+ svntest.main.file_write(src_path, contents)
+
+ expected_output = svntest.wc.State(src_path, {
+ '': Item(verb='Sending'),
+ })
+
+ expected_status = wc.State(src_path,
+ { '': Item(wc_rev=new_rev, status=' ')})
+
+ svntest.actions.run_and_verify_commit(src_path, expected_output,
+ expected_status)
+
+ # Update the WC to new_rev so that it would be easier to expect everyone
+ # to be at new_rev.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # Merge new_rev of src_path to dst_path.
+
+ expected_status = wc.State(dst_path,
+ { '': Item(wc_rev=new_rev, status='MM')})
+
+ merge_url = sbox.repo_url + '/' + canon_src_path
+ if sys.platform == 'win32':
+ merge_url = merge_url.replace('\\', '/')
+
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[new_rev]],
+ ['U ' + dst_path + '\n',
+ ' U ' + dst_path + '\n']),
+ [], 'merge', '-c', str(new_rev), merge_url, dst_path)
+
+ svntest.actions.run_and_verify_status(dst_path, expected_status)
+
+ return new_rev
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def obey_reporter_api_semantics_while_doing_subtree_merges(sbox):
+ "drive reporter api in depth first order"
+
+ # Copy /A/D to /A/copy-of-D it results in rONE.
+ # Create children at different hierarchies having some merge-info
+ # to test the set_path calls on a reporter in a depth-first order.
+ # On all 'file' descendants of /A/copy-of-D/ we run merges.
+ # We create /A/D/umlaut directly over URL it results in rev rTWO.
+ # When we merge rONE+1:TWO of /A/D on /A/copy-of-D it should merge smoothly.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_D_path = sbox.ospath('A/D')
+ copy_of_A_D_path = sbox.ospath('A/copy-of-D')
+
+ svntest.main.run_svn(None, "cp", A_D_path, copy_of_A_D_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/copy-of-D' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/copy-of-D' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/G' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/G/rho' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/G/tau' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/H' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/H/chi' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/H/omega' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/H/psi' : Item(status=' ', wc_rev=2),
+ 'A/copy-of-D/gamma' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+ cur_rev = 2
+ for path in (["A", "D", "G", "pi"],
+ ["A", "D", "G", "rho"],
+ ["A", "D", "G", "tau"],
+ ["A", "D", "H", "chi"],
+ ["A", "D", "H", "omega"],
+ ["A", "D", "H", "psi"],
+ ["A", "D", "gamma"]):
+ path_name = os.path.join(wc_dir, *path)
+ canon_path_name = os.path.join(*path)
+ path[1] = "copy-of-D"
+ copy_of_path_name = os.path.join(wc_dir, *path)
+ var_name = 'new_content_for_' + path[len(path) - 1]
+ file_contents = "new content to " + path[len(path) - 1] + "\n"
+ globals()[var_name] = file_contents
+ cur_rev = tweak_src_then_merge_to_dest(sbox, path_name,
+ copy_of_path_name, canon_path_name,
+ file_contents, cur_rev)
+
+ copy_of_A_D_wc_rev = cur_rev
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision ' + str(cur_rev+1) +
+ '.\n'],
+ [],
+ 'mkdir', sbox.repo_url + '/A/D/umlaut',
+ '-m', "log msg")
+ rev_to_merge_to_copy_of_D = cur_rev + 1
+
+ # All the file descendants of /A/copy-of-D/ have already been merged
+ # so the only notification we expect is for the added 'umlaut'.
+ expected_output = wc.State(copy_of_A_D_path, {
+ 'umlaut' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(copy_of_A_D_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(copy_of_A_D_path, {
+ })
+ # No subtree with explicit mergeinfo is affected by this merge, so they
+ # all remain unchanged from before the merge. The only mergeinfo updated
+ # is that on the target 'A/copy-of-D.
+ expected_status = wc.State(copy_of_A_D_path, {
+ '' : Item(status=' M', wc_rev=copy_of_A_D_wc_rev),
+ 'G' : Item(status=' ', wc_rev=copy_of_A_D_wc_rev),
+ 'G/pi' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'G/rho' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'G/tau' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'H' : Item(status=' ', wc_rev=copy_of_A_D_wc_rev),
+ 'H/chi' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'H/omega' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'H/psi' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'gamma' : Item(status='MM', wc_rev=copy_of_A_D_wc_rev),
+ 'umlaut' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+ merged_rangelist = "3-%d" % rev_to_merge_to_copy_of_D
+
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:' + merged_rangelist}),
+ 'G' : Item(),
+ 'G/pi' : Item(new_content_for_pi,
+ props={SVN_PROP_MERGEINFO : '/A/D/G/pi:3'}),
+ 'G/rho' : Item(new_content_for_rho,
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:4'}),
+ 'G/tau' : Item(new_content_for_tau,
+ props={SVN_PROP_MERGEINFO : '/A/D/G/tau:5'}),
+ 'H' : Item(),
+ 'H/chi' : Item(new_content_for_chi,
+ props={SVN_PROP_MERGEINFO : '/A/D/H/chi:6'}),
+ 'H/omega' : Item(new_content_for_omega,
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:7'}),
+ 'H/psi' : Item(new_content_for_psi,
+ props={SVN_PROP_MERGEINFO : '/A/D/H/psi:8'}),
+ 'gamma' : Item(new_content_for_gamma,
+ props={SVN_PROP_MERGEINFO : '/A/D/gamma:9'}),
+ 'umlaut' : Item(),
+ })
+ expected_skip = wc.State(copy_of_A_D_path, { })
+ svntest.actions.run_and_verify_merge(copy_of_A_D_path,
+ 2,
+ str(rev_to_merge_to_copy_of_D),
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issues(2733,2734)
+def mergeinfo_inheritance(sbox):
+ "target inherits mergeinfo from nearest ancestor"
+
+ # Test for Issues #2733 and #2734.
+ #
+ # When the target of a merge has no explicit mergeinfo and the merge
+ # would result in mergeinfo being added to the target which...
+ #
+ # ...is a subset of the *local* mergeinfo on one of the target's
+ # ancestors (it's nearest ancestor takes precedence), then the merge is
+ # not repeated and no mergeinfo should be set on the target (Issue #2734).
+ #
+ # OR
+ #
+ # ...is not a subset it's nearest ancestor, the target should inherit the
+ # non-inersecting mergeinfo (local or committed, the former takes
+ # precedence) from it's nearest ancestor (Issue #2733).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ B_COPY_path = sbox.ospath('A_COPY/B')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ E_COPY_path = sbox.ospath('A_COPY/B/E')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+
+ # Now start merging...
+
+ # Merge r4 into A_COPY/D/
+ expected_output = wc.State(D_COPY_path, {
+ 'G/rho' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'G' : Item(status=' ', wc_rev=2),
+ 'G/pi' : Item(status=' ', wc_rev=2),
+ 'G/rho' : Item(status='M ', wc_rev=2),
+ 'G/tau' : Item(status=' ', wc_rev=2),
+ 'H' : Item(status=' ', wc_rev=2),
+ 'H/chi' : Item(status=' ', wc_rev=2),
+ 'H/psi' : Item(status=' ', wc_rev=2),
+ 'H/omega' : Item(status=' ', wc_rev=2),
+ 'gamma' : Item(status=' ', wc_rev=2),
+ })
+ # We test issue #2733 here (with a directory as the merge target).
+ # r1 should be inherited from 'A_COPY'.
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:4'}),
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("New content"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, '3', '4',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r4 again, this time into A_COPY/D/G. An ancestor directory
+ # (A_COPY/D) exists with identical local mergeinfo, so the merge
+ # should not be repeated. We test issue #2734 here with (with a
+ # directory as the merge target).
+ expected_output = wc.State(G_COPY_path, { })
+ # A_COPY/D/G gets mergeinfo set, but it immediately elides to A_COPY/D.
+ expected_mergeinfo_output = wc.State(G_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_status = wc.State(G_COPY_path, {
+ '' : Item(status=' ', wc_rev=2),
+ 'pi' : Item(status=' ', wc_rev=2),
+ 'rho' : Item(status='M ', wc_rev=2),
+ 'tau' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("New content"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(G_COPY_path, { })
+ svntest.actions.run_and_verify_merge(G_COPY_path, '3', '4',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r5 into A_COPY/B. Again, r1 should be inherited from
+ # A_COPY (Issue #2733)
+ expected_output = wc.State(B_COPY_path, {
+ 'E/beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(B_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B_COPY_path, {
+ })
+ expected_status = wc.State(B_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status='M ', wc_rev=2),
+ 'lambda' : Item(status=' ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5'}),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("New content"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n")
+ })
+ expected_skip = wc.State(B_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(B_COPY_path, '4', '5',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r5 again, this time into A_COPY/B/E/beta. An ancestor
+ # directory (A_COPY/B) exists with identical local mergeinfo, so
+ # the merge should not be repeated (Issue #2734 with a file as the
+ # merge target).
+ expected_skip = wc.State(beta_COPY_path, { })
+
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn([], [], 'merge', '-c5',
+ sbox.repo_url + '/A/B/E/beta',
+ beta_COPY_path)
+
+ # The merge wasn't repeated so beta shouldn't have any mergeinfo.
+ # We are implicitly testing that without looking at the prop value
+ # itself, just beta's prop modification status.
+ expected_status = wc.State(beta_COPY_path, {
+ '' : Item(status='M ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_status(beta_COPY_path, expected_status)
+
+ # Merge r3 into A_COPY. A_COPY's has two subtrees with mergeinfo,
+ # A_COPY/B/E/beta and A_COPY/D. Only the latter is effected by this
+ # merge so only its mergeinfo is updated to include r3.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' M', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status='M ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' M', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status='M ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status='M ', wc_rev=2),
+ 'D/H/omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3'}),
+ 'B' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:3-4'}),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r6 into A_COPY/D/H/omega, it should inherit it's nearest
+ # ancestor's (A_COPY/D) mergeinfo (Issue #2733 with a file as the
+ # merge target).
+ expected_skip = wc.State(omega_COPY_path, { })
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[6]],
+ ['U ' + omega_COPY_path + '\n',
+ ' G ' + omega_COPY_path + '\n']),
+ [], 'merge', '-c6',
+ sbox.repo_url + '/A/D/H/omega',
+ omega_COPY_path)
+
+ # Check that mergeinfo was properly set on A_COPY/D/H/omega
+ svntest.actions.run_and_verify_svn(["/A/D/H/omega:3-4,6\n"],
+ [],
+ 'propget', SVN_PROP_MERGEINFO,
+ omega_COPY_path)
+
+ # Given a merge target *without* any of the following:
+ #
+ # 1) Explicit mergeinfo set on itself in the WC
+ # 2) Any WC ancestor to inherit mergeinfo from
+ # 3) Any mergeinfo for the target in the repository
+ #
+ # Check that the target still inherits mergeinfo from it's nearest
+ # repository ancestor.
+ #
+ # Commit all the merges thus far
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY', 'A_COPY/B', 'A_COPY/B/E/beta', 'A_COPY/D',
+ 'A_COPY/D/G/rho', 'A_COPY/D/H/omega', 'A_COPY/D/H/psi',
+ wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # In single-db mode you can't create a disconnected working copy by just
+ # copying a subdir
+
+ ## Copy the subtree A_COPY/B/E from the working copy, making the
+ ## disconnected WC E_only.
+ #other_wc = sbox.add_wc_path('E_only')
+ #svntest.actions.duplicate_dir(E_COPY_path, other_wc)
+ #
+ ## Update the disconnected WC it so it will get the most recent mergeinfo
+ ## from the repos when merging.
+ #svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up',
+ # other_wc)
+ #
+ ## Merge r5:4 into the root of the disconnected WC.
+ ## E_only has no explicit mergeinfo and since it's the root of the WC
+ ## cannot inherit any mergeinfo from a working copy ancestor path. Nor
+ ## does it have any mergeinfo explicitly set on it in the repository.
+ ## An ancestor path on the repository side, A_COPY/B does have the merge
+ ## info '/A/B:5' however and E_only should inherit this, resulting in
+ ## empty mergeinfo after the removal of r5 (A_COPY has mergeinfo of
+ ## '/A:3' so this empty mergeinfo is needed to override that.
+ #expected_output = wc.State(other_wc,
+ # {'beta' : Item(status='U ')})
+ #expected_mergeinfo_output = wc.State(other_wc, {
+ # '' : Item(status=' G')
+ # })
+ #expected_elision_output = wc.State(other_wc, {
+ # })
+ #expected_status = wc.State(other_wc, {
+ # '' : Item(status=' M', wc_rev=7),
+ # 'alpha' : Item(status=' ', wc_rev=7),
+ # 'beta' : Item(status='M ', wc_rev=7),
+ # })
+ #expected_disk = wc.State('', {
+ # '' : Item(props={SVN_PROP_MERGEINFO : ''}),
+ # 'alpha' : Item("This is the file 'alpha'.\n"),
+ # 'beta' : Item("This is the file 'beta'.\n"),
+ # })
+ #expected_skip = wc.State(other_wc, { })
+ #
+ #svntest.actions.run_and_verify_merge(other_wc, '5', '4',
+ # sbox.repo_url + '/A/B/E', None,
+ # expected_output,
+ # expected_mergeinfo_output,
+ # expected_elision_output,
+ # expected_disk,
+ # expected_status,
+ # expected_skip,
+ # check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_elision(sbox):
+ "mergeinfo elides to ancestor with identical info"
+
+ # When a merge would result in mergeinfo on a target which is identical
+ # to mergeinfo (local or committed) on one of the node's ancestors (the
+ # nearest ancestor takes precedence), then the mergeinfo elides from the
+ # target to the nearest ancestor (e.g. no mergeinfo is set on the target
+ # or committed mergeinfo is removed).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+
+ # Now start merging...
+
+ # Merge r5 into A_COPY/B/E/beta.
+ expected_skip = wc.State(beta_COPY_path, { })
+
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + beta_COPY_path + '\n',
+ ' U ' + beta_COPY_path + '\n']),
+ [], 'merge', '-c5',
+ sbox.repo_url + '/A/B/E/beta',
+ beta_COPY_path)
+
+ # Check beta's status and props.
+ expected_status = wc.State(beta_COPY_path, {
+ '' : Item(status='MM', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_status(beta_COPY_path, expected_status)
+
+ svntest.actions.run_and_verify_svn(["/A/B/E/beta:5\n"], [],
+ 'propget', SVN_PROP_MERGEINFO,
+ beta_COPY_path)
+
+ # Commit the merge
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY/B/E/beta', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # Update A_COPY to get all paths to the same working revision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=7)
+
+ # Merge r4 into A_COPY/D/G.
+ expected_output = wc.State(G_COPY_path, {
+ 'rho' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(G_COPY_path, {
+ })
+ expected_status = wc.State(G_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'pi' : Item(status=' ', wc_rev=7),
+ 'rho' : Item(status='M ', wc_rev=7),
+ 'tau' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:4'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("New content"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(G_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(G_COPY_path, '3', '4',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge r3:6 into A_COPY. The merge doesn't touch either of A_COPY's
+ # subtrees with explicit mergeinfo, so those are left alone.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/omega' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'B' : Item(status=' ', wc_rev=7),
+ 'mu' : Item(status=' ', wc_rev=7),
+ 'B/E' : Item(status=' ', wc_rev=7),
+ 'B/E/alpha' : Item(status=' ', wc_rev=7),
+ 'B/E/beta' : Item(status=' ', wc_rev=7),
+ 'B/lambda' : Item(status=' ', wc_rev=7),
+ 'B/F' : Item(status=' ', wc_rev=7),
+ 'C' : Item(status=' ', wc_rev=7),
+ 'D' : Item(status=' ', wc_rev=7),
+ 'D/G' : Item(status=' M', wc_rev=7),
+ 'D/G/pi' : Item(status=' ', wc_rev=7),
+ 'D/G/rho' : Item(status='M ', wc_rev=7),
+ 'D/G/tau' : Item(status=' ', wc_rev=7),
+ 'D/gamma' : Item(status=' ', wc_rev=7),
+ 'D/H' : Item(status=' ', wc_rev=7),
+ 'D/H/chi' : Item(status=' ', wc_rev=7),
+ 'D/H/psi' : Item(status=' ', wc_rev=7),
+ 'D/H/omega' : Item(status='M ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:4-6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/B/E/beta:5'}),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:4'}),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '3', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ # New repeat the above merge but with the --record-only option.
+ # This would result in identical mergeinfo
+ # (r4-6) on A_COPY and two of its descendants, A_COPY/D/G and
+ # A_COPY/B/E/beta, so the mergeinfo on the latter two should elide
+ # to A_COPY. In the case of A_COPY/D/G this means its wholly uncommitted
+ # mergeinfo is removed leaving no prop mods. In the case of
+ # A_COPY/B/E/beta its committed mergeinfo prop is removed leaving a prop
+ # change.
+
+ # to A_COPY.
+ expected_output = wc.State(A_COPY_path, {})
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ 'D/G' : Item(status=' G'),
+ 'B/E/beta' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status=' U'),
+ 'D/G' : Item(status=' U'),
+ })
+ expected_status.tweak('B/E/beta', status=' M')
+ expected_status.tweak('D/G', status=' ')
+ expected_disk.tweak('B/E/beta', 'D/G', props={})
+ svntest.actions.run_and_verify_merge(A_COPY_path, '3', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--record-only',
+ A_COPY_path)
+
+ # Reverse merge r5 out of A_COPY/B/E/beta. The mergeinfo on
+ # A_COPY/B/E/beta which previously elided will now return,
+ # minus r5 of course.
+ expected_skip = wc.State(beta_COPY_path, { })
+
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-5]],
+ ['U ' + beta_COPY_path + '\n',
+ ' G ' + beta_COPY_path + '\n']),
+ [], 'merge', '-c-5',
+ sbox.repo_url + '/A/B/E/beta',
+ beta_COPY_path)
+
+ # Check beta's status and props.
+ expected_status = wc.State(beta_COPY_path, {
+ '' : Item(status='MM', wc_rev=7),
+ })
+ svntest.actions.run_and_verify_status(beta_COPY_path, expected_status)
+
+ svntest.actions.run_and_verify_svn(["/A/B/E/beta:4,6\n"], [],
+ 'propget', SVN_PROP_MERGEINFO,
+ beta_COPY_path)
+
+ # Merge r5 back into A_COPY/B/E/beta. Now the mergeinfo on the merge
+ # target (A_COPY/B/E/beta) is identical to it's nearest ancestor with
+ # mergeinfo (A_COPY) and so the former should elide.
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['G ' + beta_COPY_path + '\n',
+ ' G ' + beta_COPY_path + '\n', # Update mergeinfo
+ ' U ' + beta_COPY_path + '\n',], # Elide mereginfo,
+ elides=True),
+ [], 'merge', '-c5',
+ sbox.repo_url + '/A/B/E/beta',
+ beta_COPY_path)
+
+ # Check beta's status and props.
+ expected_status = wc.State(beta_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ })
+ svntest.actions.run_and_verify_status(beta_COPY_path, expected_status)
+
+ # Once again A_COPY/B/E/beta has no mergeinfo.
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'propget', SVN_PROP_MERGEINFO,
+ beta_COPY_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_inheritance_and_discontinuous_ranges(sbox):
+ "discontinuous merges produce correct mergeinfo"
+
+ # When a merge target has no explicit mergeinfo and is subject
+ # to multiple merges, the resulting mergeinfo on the target
+ # should reflect the combination of the inherited mergeinfo
+ # with each merge performed.
+ #
+ # Also tests implied merge source and target when only a revision
+ # range is specified.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_url = sbox.repo_url + '/A'
+ A_COPY_path = sbox.ospath('A_COPY')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ A_COPY_rho_path = sbox.ospath('A_COPY/D/G/rho')
+
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Merge r4 into A_COPY
+ saved_cwd = os.getcwd()
+
+ os.chdir(A_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]],
+ ['U ' + os.path.join("D", "G", "rho") + '\n',
+ ' U .\n']),
+ [], 'merge', '-c4', A_url)
+ os.chdir(saved_cwd)
+
+ # Check the results of the merge.
+ expected_status.tweak("A_COPY", status=' M')
+ expected_status.tweak("A_COPY/D/G/rho", status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.run_and_verify_svn(["/A:4\n"], [],
+ 'propget', SVN_PROP_MERGEINFO,
+ A_COPY_path)
+
+ # Merge r2:6 into A_COPY/D
+ #
+ # A_COPY/D should inherit the mergeinfo '/A:4' from A_COPY
+ # combine it with the discontinous merges performed directly on
+ # it (A/D/ 2:3 and A/D 4:6) resulting in '/A/D:3-6'.
+ expected_output = wc.State(D_COPY_path, {
+ 'H/psi' : Item(status='U '),
+ 'H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'G' : Item(status=' ', wc_rev=2),
+ 'G/pi' : Item(status=' ', wc_rev=2),
+ 'G/rho' : Item(status='M ', wc_rev=2),
+ 'G/tau' : Item(status=' ', wc_rev=2),
+ 'H' : Item(status=' ', wc_rev=2),
+ 'H/chi' : Item(status=' ', wc_rev=2),
+ 'H/psi' : Item(status='M ', wc_rev=2),
+ 'H/omega' : Item(status='M ', wc_rev=2),
+ 'gamma' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:3-6'}),
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("New content"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("New content"),
+ 'H/omega' : Item("New content"),
+ 'gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(D_COPY_path, '2', '6',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Wipe the memory of a portion of the previous merge...
+ ### It'd be nice to use 'merge --record-only' here, but we can't (yet)
+ ### wipe all ranges for a file due to the bug pointed out in r864719.
+ mu_copy_path = os.path.join(A_COPY_path, 'mu')
+ svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO
+ + "' set on '" +
+ mu_copy_path + "'\n"], [], 'propset',
+ SVN_PROP_MERGEINFO, '', mu_copy_path)
+ # ...and confirm that we can commit the wiped mergeinfo...
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/mu' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None,
+ [],
+ mu_copy_path)
+ # ...and that the presence of the property is retained, even when
+ # the value has been wiped.
+ svntest.actions.run_and_verify_svn(['\n'], [], 'propget',
+ SVN_PROP_MERGEINFO, mu_copy_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2754)
+def merge_to_target_with_copied_children(sbox):
+ "merge works when target has copied children"
+
+ # Test for Issue #2754 Can't merge to target with copied/moved children
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+ rho_COPY_COPY_path = sbox.ospath('A_COPY/D/G/rho_copy')
+
+ # URL to URL copy A_COPY/D/G/rho to A_COPY/D/G/rho_copy
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.repo_url + '/A_COPY/D/G/rho',
+ sbox.repo_url + '/A_COPY/D/G/rho_copy',
+ '-m', 'copy')
+
+ # Update WC.
+ expected_output = wc.State(wc_dir,
+ {'A_COPY/D/G/rho_copy' : Item(status='A ')})
+ expected_disk.add({
+ 'A_COPY/D/G/rho_copy' : Item("This is the file 'rho'.\n", props={})
+ })
+ expected_status.tweak(wc_rev=7)
+ expected_status.add({'A_COPY/D/G/rho_copy' : Item(status=' ', wc_rev=7)})
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Merge r4 into A_COPY/D/G/rho_copy.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]],
+ ['U ' + rho_COPY_COPY_path + '\n',
+ ' U ' + rho_COPY_COPY_path + '\n']),
+ [], 'merge', '-c4',
+ sbox.repo_url + '/A/D/G/rho',
+ rho_COPY_COPY_path)
+
+ # Merge r3:5 into A_COPY/D/G.
+ expected_output = wc.State(G_COPY_path, {
+ 'rho' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(G_COPY_path, {
+ })
+ expected_status = wc.State(G_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'pi' : Item(status=' ', wc_rev=7),
+ 'rho' : Item(status='M ', wc_rev=7),
+ 'rho_copy' : Item(status='MM', wc_rev=7),
+ 'tau' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:4-5'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("New content"),
+ 'rho_copy' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:4'}),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(G_COPY_path, { })
+ svntest.actions.run_and_verify_merge(G_COPY_path, '3', '5',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3188)
+def merge_to_switched_path(sbox):
+ "merge to switched path does not inherit or elide"
+
+ # When the target of a merge is a switched path we don't inherit WC
+ # mergeinfo from above the target or attempt to elide the mergeinfo
+ # set on the target as a result of the merge.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_D_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A/D/G_COPY')
+ A_COPY_D_G_path = sbox.ospath('A_COPY/D/G')
+ A_COPY_D_G_rho_path = sbox.ospath('A_COPY/D/G/rho')
+
+ expected = svntest.verify.UnorderedOutput(
+ ["A " + os.path.join(G_COPY_path, "pi") + "\n",
+ "A " + os.path.join(G_COPY_path, "rho") + "\n",
+ "A " + os.path.join(G_COPY_path, "tau") + "\n",
+ "Checked out revision 6.\n",
+ "A " + G_COPY_path + "\n"])
+
+ # r7 - Copy A/D/G to A/D/G_COPY and commit.
+ svntest.actions.run_and_verify_svn(expected, [], 'copy',
+ sbox.repo_url + "/A/D/G",
+ G_COPY_path)
+
+ expected_output = wc.State(wc_dir, {'A/D/G_COPY' : Item(verb='Adding')})
+ wc_status.add({
+ "A/D/G_COPY" : Item(status=' ', wc_rev=7),
+ "A/D/G_COPY/pi" : Item(status=' ', wc_rev=7),
+ "A/D/G_COPY/rho" : Item(status=' ', wc_rev=7),
+ "A/D/G_COPY/tau" : Item(status=' ', wc_rev=7),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # r8 - modify and commit A/D/G_COPY/rho
+ svntest.main.file_write(sbox.ospath('A/D/G_COPY/rho'),
+ "New *and* improved rho content")
+ expected_output = wc.State(wc_dir, {'A/D/G_COPY/rho' : Item(verb='Sending')})
+ wc_status.tweak('A/D/G_COPY/rho', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Switch A_COPY/D/G to A/D/G.
+ wc_disk.add({
+ "A" : Item(),
+ "A/D/G_COPY" : Item(),
+ "A/D/G_COPY/pi" : Item("This is the file 'pi'.\n"),
+ "A/D/G_COPY/rho" : Item("New *and* improved rho content"),
+ "A/D/G_COPY/tau" : Item("This is the file 'tau'.\n"),
+ })
+ wc_disk.tweak('A_COPY/D/G/rho',contents="New content")
+ wc_status.tweak("A_COPY/D/G", wc_rev=8, switched='S')
+ wc_status.tweak("A_COPY/D/G/pi", wc_rev=8)
+ wc_status.tweak("A_COPY/D/G/rho", wc_rev=8)
+ wc_status.tweak("A_COPY/D/G/tau", wc_rev=8)
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A_COPY/D/G/rho" : Item(status='U '),
+ })
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_D_G_path,
+ sbox.repo_url + "/A/D/G",
+ expected_output, wc_disk, wc_status,
+ [], 1)
+
+ # Update working copy to allow elision (if any).
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+
+ # Set some mergeinfo on a working copy parent of our switched subtree
+ # A_COPY/D/G. Because the subtree is switched it should *not* inherit
+ # this mergeinfo.
+ svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO +
+ "' set on '" + A_COPY_path + "'" +
+ "\n"], [], 'ps', SVN_PROP_MERGEINFO,
+ '/A:4', A_COPY_path)
+
+ # Merge r8 from A/D/G_COPY into our switched target A_COPY/D/G.
+ # A_COPY/D/G should get mergeinfo for r8 as a result of the merge,
+ # but because it's switched should not inherit the mergeinfo from
+ # its nearest WC ancestor with mergeinfo (A_COPY: svn:mergeinfo : /A:4)
+ expected_output = wc.State(A_COPY_D_G_path, {
+ 'rho' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_D_G_path, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_D_G_path, {
+ })
+ # Note: A_COPY/D/G won't show as switched.
+ expected_status = wc.State(A_COPY_D_G_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'pi' : Item(status=' ', wc_rev=8),
+ 'rho' : Item(status='M ', wc_rev=8),
+ 'tau' : Item(status=' ', wc_rev=8),
+ })
+ expected_status.tweak('', switched='S')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G_COPY:8'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("New *and* improved rho content"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_D_G_path, { })
+
+ svntest.actions.run_and_verify_merge(A_COPY_D_G_path, '7', '8',
+ sbox.repo_url + '/A/D/G_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Check that the mergeinfo set on a switched target can elide to the
+ # repository.
+ #
+ # Specifically this is testing the "switched target" portions of
+ # issue #3188 'Mergeinfo on switched targets/subtrees should
+ # elide to repos'.
+ #
+ # Revert the previous merge and manually set 'svn:mergeinfo : /A/D:4'
+ # on 'merge_tests-1\A_COPY\D'. Now merge -c-4 from /A/D/G into A_COPY/D/G.
+ # This should produce no mergeinfo on A_COPY/D/G'. If the A_COPY/D/G was
+ # unswitched this merge would normally set empty mergeinfo on A_COPY/D/G,
+ # but as it is switched this empty mergeinfo just elides to the
+ # repository (empty mergeinfo on a path can elide if that path doesn't
+ # inherit *any* mergeinfo).
+ svntest.actions.run_and_verify_svn(["Reverted '" + A_COPY_path+ "'\n",
+ "Reverted '" + A_COPY_D_G_path+ "'\n",
+ "Reverted '" + A_COPY_D_G_rho_path +
+ "'\n"],
+ [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO +
+ "' set on '" + A_COPY_D_path+ "'" +
+ "\n"], [], 'ps', SVN_PROP_MERGEINFO,
+ '/A/D:4', A_COPY_D_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-4]],
+ ['U ' + A_COPY_D_G_rho_path + '\n',
+ ' U ' + A_COPY_D_G_path + '\n'],
+ elides=True),
+ [], 'merge', '-c-4',
+ sbox.repo_url + '/A/D/G_COPY',
+ A_COPY_D_G_path)
+ wc_status.tweak("A_COPY/D", status=' M')
+ wc_status.tweak("A_COPY/D/G/rho", status='M ')
+ wc_status.tweak(wc_rev=8)
+ svntest.actions.run_and_verify_status(wc_dir, wc_status)
+ check_mergeinfo_recursively(A_COPY_D_path,
+ { A_COPY_D_path : '/A/D:4' })
+
+#----------------------------------------------------------------------
+# Test for issues
+#
+# 2823: Account for mergeinfo differences for switched
+# directories when gathering mergeinfo
+#
+# 2839: Support non-inheritable mergeinfo revision ranges
+#
+# 3187: Reverse merges don't work properly with
+# non-inheritable ranges.
+#
+# 3188: Mergeinfo on switched targets/subtrees should
+# elide to repos
+@SkipUnless(server_has_mergeinfo)
+@Issue(2823,2839,3187,3188,4056)
+def merge_to_path_with_switched_children(sbox):
+ "merge to path with switched children"
+
+ # Merging to a target with switched children requires special handling
+ # to keep mergeinfo correct:
+ #
+ # 1) If the target of a merge has switched children without explicit
+ # mergeinfo, the switched children should get mergeinfo set on
+ # them as a result of the merge. This mergeinfo includes the
+ # mergeinfo resulting from the merge *and* any mergeinfo inherited
+ # from the repos for the switched path.
+ #
+ # 2) Mergeinfo on switched children should never elide.
+ #
+ # 3) The path the switched child overrides cannot be modified by the
+ # merge (it isn't present in the WC) so should not inherit any
+ # mergeinfo added as a result of the merge. To prevent this, the
+ # immediate parent of any switched child should have non-inheritable
+ # mergeinfo added/modified for the merge performed.
+ #
+ # 4) Because of 3, siblings of switched children will not inherit the
+ # mergeinfo resulting from the merge, so must get their own, full set
+ # of mergeinfo.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, False, 3)
+
+ # Some paths we'll care about
+ D_path = sbox.ospath('A/D')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_beta_path = sbox.ospath('A_COPY/B/E/beta')
+ A_COPY_chi_path = sbox.ospath('A_COPY/D/H/chi')
+ A_COPY_omega_path = sbox.ospath('A_COPY/D/H/omega')
+ A_COPY_psi_path = sbox.ospath('A_COPY/D/H/psi')
+ A_COPY_G_path = sbox.ospath('A_COPY/D/G')
+ A_COPY_rho_path = sbox.ospath('A_COPY/D/G/rho')
+ A_COPY_H_path = sbox.ospath('A_COPY/D/H')
+ A_COPY_D_path = sbox.ospath('A_COPY/D')
+ A_COPY_gamma_path = sbox.ospath('A_COPY/D/gamma')
+ H_COPY_2_path = sbox.ospath('A_COPY_2/D/H')
+
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=8)
+
+ # Switch a file and dir path in the branch:
+
+ # Switch A_COPY/D/G to A_COPY_2/D/G.
+ wc_status.tweak("A_COPY/D/G", switched='S')
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_G_path,
+ sbox.repo_url + "/A_COPY_2/D/G",
+ expected_output, wc_disk, wc_status,
+ [], 1)
+
+ # Switch A_COPY/D/G/rho to A_COPY_3/D/G/rho.
+ wc_status.tweak("A_COPY/D/G/rho", switched='S')
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_rho_path,
+ sbox.repo_url + "/A_COPY_3/D/G/rho",
+ expected_output, wc_disk, wc_status,
+ [], 1)
+
+ # Switch A_COPY/D/H/psi to A_COPY_2/D/H/psi.
+ wc_status.tweak("A_COPY/D/H/psi", switched='S')
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_psi_path,
+ sbox.repo_url + "/A_COPY_2/D/H/psi",
+ expected_output, wc_disk, wc_status,
+ [], 1)
+
+ # Target with switched file child:
+ #
+ # Merge r8 from A/D/H into A_COPY/D/H. The switched child of
+ # A_COPY/D/H, file A_COPY/D/H/psi (which has no mergeinfo prior
+ # to the merge), is unaffected by the merge so does not get it's
+ # own explicit mergeinfo.
+ #
+ # A_COPY/D/H/psi's parent A_COPY/D/H has no pre-exiting explicit
+ # mergeinfo so should get its own mergeinfo, the non-inheritable
+ # r8 resulting from the merge.
+ #
+ # A_COPY/D/H/psi's unswitched sibling, A_COPY/D/H/omega is affected
+ # by the merge but won't inherit r8 from A_COPY/D/H, so it needs its
+ # own mergeinfo.
+ expected_output = wc.State(A_COPY_H_path, {
+ 'omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ 'omega' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_H_path, {
+ 'omega' : Item(status=' U')
+ })
+ expected_status = wc.State(A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'psi' : Item(status=' ', wc_rev=8, switched='S'),
+ 'omega' : Item(status='M ', wc_rev=8),
+ 'chi' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8'}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_H_path, { })
+
+ svntest.actions.run_and_verify_merge(A_COPY_H_path, '7', '8',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Target with switched dir child:
+ #
+ # Merge r6 from A/D into A_COPY/D. The only subtrees with explicit
+ # mergeinfo (or switched) that are affected by the merge are A_COPY/D/G
+ # and A_COPY/D/G/rho. Only these two subtrees, and the target itself,
+ # should receive mergeinfo updates.
+ expected_output = wc.State(A_COPY_D_path, {
+ 'G/rho' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_D_path, {
+ '' : Item(status=' U'),
+ 'G' : Item(status=' U'),
+ 'G/rho' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_D_path, {
+ })
+ expected_status_D = wc.State(A_COPY_D_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'H' : Item(status=' M', wc_rev=8),
+ 'H/chi' : Item(status=' ', wc_rev=8),
+ 'H/omega' : Item(status='M ', wc_rev=8),
+ 'H/psi' : Item(status=' ', wc_rev=8, switched='S'),
+ 'G' : Item(status=' M', wc_rev=8, switched='S'),
+ 'G/pi' : Item(status=' ', wc_rev=8),
+ 'G/rho' : Item(status='MM', wc_rev=8, switched='S'),
+ 'G/tau' : Item(status=' ', wc_rev=8),
+ 'gamma' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk_D = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:6*'}),
+ 'H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8'}),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/omega' : Item("New content"),
+ 'H/psi' : Item("This is the file 'psi'.\n",),
+ 'G' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:6*'}),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'}),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n"),
+ })
+ expected_skip_D = wc.State(A_COPY_D_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_D_path, '5', '6',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_D,
+ expected_status_D, expected_skip_D,
+ check_props=True)
+
+
+ # Merge r5 from A/D into A_COPY/D. This updates the mergeinfo on the
+ # target A_COPY\D because the target is always updated. It also updates
+ # the mergeinfo on A_COPY\D\H because that path has explicit mergeinfo
+ # and has a subtree affected by the merge. Lastly, mergeinfo on
+ # A_COPY/D/H/psi is added because that path is switched.
+ expected_output = wc.State(A_COPY_D_path, {
+ 'H/psi' : Item(status='U ')})
+ expected_mergeinfo_output = wc.State(A_COPY_D_path, {
+ '' : Item(status=' G'),
+ 'H' : Item(status=' G'),
+ 'H/psi' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_D_path, {
+ })
+ expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5,6*'})
+ expected_disk_D.tweak('H', props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'})
+ expected_disk_D.tweak('H/psi', contents="New content",
+ props={SVN_PROP_MERGEINFO :'/A/D/H/psi:5'})
+ expected_status_D.tweak('H/psi', status='MM')
+ svntest.actions.run_and_verify_merge(A_COPY_D_path, '4', '5',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_D,
+ expected_status_D, expected_skip_D,
+ check_props=True)
+
+ # Finally, merge r4:8 into A_COPY. A_COPY gets mergeinfo for r5-8 added but
+ # since none of A_COPY's subtrees with mergeinfo are affected, none of them
+ # get any mergeinfo changes.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'B/E' : Item(status=' ', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status='M ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' M', wc_rev=8),
+ 'D/G' : Item(status=' M', wc_rev=8, switched='S'),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status='MM', wc_rev=8, switched='S'),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D/H' : Item(status=' M', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/psi' : Item(status='MM', wc_rev=8, switched='S'),
+ 'D/H/omega' : Item(status='M ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5,6*'}),
+ 'D/G' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:6*'}),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'}),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/psi:5'}),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '8',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+ # Commit changes thus far.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D' : Item(verb='Sending'),
+ 'A_COPY/D/G' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY', 'A_COPY/B/E/beta', 'A_COPY/D', 'A_COPY/D/G',
+ 'A_COPY/D/G/rho', 'A_COPY/D/H', 'A_COPY/D/H/omega',
+ 'A_COPY/D/H/psi', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Unswitch A_COPY/D/H/psi.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY/D/H/psi' : Item(status='UU')})
+ wc_status.tweak("A_COPY/D/H/psi", switched=None, wc_rev=9)
+ wc_disk.tweak("A_COPY",
+ props={SVN_PROP_MERGEINFO : '/A:5-8'})
+ wc_disk.tweak("A_COPY/B/E/beta",
+ contents="New content")
+ wc_disk.tweak("A_COPY/D",
+ props={SVN_PROP_MERGEINFO : '/A/D:5,6*'})
+ wc_disk.tweak("A_COPY/D/G",
+ props={SVN_PROP_MERGEINFO : '/A/D/G:6*'})
+ wc_disk.tweak("A_COPY/D/G/rho",
+ contents="New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'})
+ wc_disk.tweak("A_COPY/D/H",
+ props={SVN_PROP_MERGEINFO : '/A/D/H:5*,8'})
+ wc_disk.tweak("A_COPY/D/H/omega",
+ contents="New content")
+ wc_disk.tweak("A_COPY_2", props={})
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_psi_path,
+ sbox.repo_url + "/A_COPY/D/H/psi",
+ expected_output, wc_disk, wc_status,
+ [], 1)
+
+ # Non-inheritable mergeinfo ranges on a target don't prevent repeat
+ # merges of that range on the target's children.
+ #
+ # Non-inheritable mergeinfo ranges on a target are removed if the target
+ # no longer has any switched children and a repeat merge is performed.
+ #
+ # Merge r4:8 from A/D/H into A_COPY/D/H. A_COPY/D/H already has mergeinfo
+ # for r5 and r8 but it is marked as uninheritable so the repeat merge is
+ # allowed on its children, notably the now unswitched A_COPY/D/H/psi.
+ # Since A_COPY/D/H no longer has any switched children and the merge of
+ # r4:8 has been repeated the previously uninheritable ranges 5* and 8* on
+ # A_COPY/D/H are made inheritable and combined with r6-7. A_COPY/D/H/omega
+ # has explicit mergeinfo, but is not touched by the merge, so is left as-is.
+ expected_output = wc.State(A_COPY_H_path, {
+ 'psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ 'psi' : Item(status=' G')
+ })
+ expected_elision_output = wc.State(A_COPY_H_path, {
+ 'psi' : Item(status=' U')
+ })
+ expected_status = wc.State(A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'psi' : Item(status='M ', wc_rev=9),
+ 'omega' : Item(status=' ', wc_rev=9),
+ 'chi' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-8'}),
+ 'psi' : Item("New content"),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_H_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_H_path, '4', '8',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [],
+ True, False, '--allow-mixed-revisions',
+ A_COPY_H_path)
+
+ # Non-inheritable mergeinfo ranges on a target do prevent repeat
+ # merges on the target itself.
+ #
+ # Add a prop A/D and commit it as r10. Merge r10 into A_COPY/D. Since
+ # A_COPY/D has a switched child it gets r10 added as a non-inheritable
+ # range. Repeat the same merge checking that no repeat merge is
+ # attempted on A_COPY/D.
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ D_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', D_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Sending'),
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY/D', wc_rev=9)
+ wc_status.tweak('A/D', 'A_COPY/D/H', 'A_COPY/D/H/psi', wc_rev=10)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ expected_output = wc.State(A_COPY_D_path, {
+ '' : Item(status=' U')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_D_path, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_COPY_D_path, {
+ })
+ # Reuse expected status and disk from last merge to A_COPY/D
+ expected_status_D.tweak(status=' ')
+ expected_status_D.tweak('', status=' M', wc_rev=9)
+ expected_status_D.tweak('H', wc_rev=10)
+ expected_status_D.tweak('H/psi', wc_rev=10, switched=None)
+ expected_status_D.tweak('H/omega', wc_rev=9)
+ expected_status_D.tweak('G', 'G/rho', switched='S', wc_rev=9)
+ expected_disk_D.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:5,6*,10',
+ "prop:name" : "propval"})
+ expected_disk_D.tweak('G/rho',
+ props={SVN_PROP_MERGEINFO : '/A/D/G/rho:6'})
+ expected_disk_D.tweak('H', props={SVN_PROP_MERGEINFO : '/A/D/H:5-8'})
+ expected_disk_D.tweak('H/psi', contents="New content", props={})
+ svntest.actions.run_and_verify_merge(A_COPY_D_path, '9', '10',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_D,
+ expected_status_D, expected_skip_D,
+ [],
+ True, False, '--allow-mixed-revisions',
+ A_COPY_D_path)
+ # Repeated merge is a no-op, though we still see the notification reporting
+ # the mergeinfo describing the merge has been recorded, though this time it
+ # is a ' G' notification because there is a local mergeinfo change.
+ expected_output = wc.State(A_COPY_D_path, {})
+ expected_mergeinfo_output = wc.State(A_COPY_D_path, {
+ '' : Item(status=' G')
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_D_path, '9', '10',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_D,
+ expected_status_D, expected_skip_D,
+ [],
+ True, False, '--allow-mixed-revisions',
+ A_COPY_D_path)
+
+ # Test issue #3187 'Reverse merges don't work properly with
+ # non-inheritable ranges'.
+ #
+ # Test the "switched subtrees" portion of issue #3188 'Mergeinfo on
+ # switched targets/subtrees should elide to repos'.
+ #
+ # Reverse merge r5-8, this should revert all the subtree merges done to
+ # A_COPY thus far and remove all mergeinfo.
+
+ # Revert all local changes. This leaves just the mergeinfo for r5-8
+ # on A_COPY and its various subtrees.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Update merge target so working revisions are uniform and all
+ # possible elision occurs.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [],
+ 'up', A_COPY_path)
+
+ # Do the reverse merge.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' U'),
+ 'D/G' : Item(status=' U'),
+ 'D/G/rho' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' U'),
+ 'D/G' : Item(status=' U'),
+ 'D/G/rho' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=10),
+ 'B' : Item(status=' ', wc_rev=10),
+ 'mu' : Item(status=' ', wc_rev=10),
+ 'B/E' : Item(status=' ', wc_rev=10),
+ 'B/E/alpha' : Item(status=' ', wc_rev=10),
+ 'B/E/beta' : Item(status='M ', wc_rev=10),
+ 'B/lambda' : Item(status=' ', wc_rev=10),
+ 'B/F' : Item(status=' ', wc_rev=10),
+ 'C' : Item(status=' ', wc_rev=10),
+ 'D' : Item(status=' M', wc_rev=10),
+ 'D/G' : Item(status=' M', wc_rev=10, switched='S'),
+ 'D/G/pi' : Item(status=' ', wc_rev=10),
+ 'D/G/rho' : Item(status='MM', wc_rev=10, switched='S'),
+ 'D/G/tau' : Item(status=' ', wc_rev=10),
+ 'D/gamma' : Item(status=' ', wc_rev=10),
+ 'D/H' : Item(status=' M', wc_rev=10),
+ 'D/H/chi' : Item(status=' ', wc_rev=10),
+ 'D/H/psi' : Item(status='M ', wc_rev=10),
+ 'D/H/omega' : Item(status='M ', wc_rev=10),
+ })
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '8', '4',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue 2047: Merge from parent dir fails while it succeeds from
+# the direct dir
+@Issue(2047)
+def merge_with_implicit_target_file(sbox):
+ "merge a change to a file, using relative path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a change to A/mu, then revert it using 'svn merge -r 2:1 A/mu'
+
+ # change A/mu and commit
+ A_path = sbox.ospath('A')
+ mu_path = os.path.join(A_path, 'mu')
+
+ svntest.main.file_append(mu_path, "A whole new line.\n")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update to revision 2.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # Revert the change committed in r2
+ os.chdir(wc_dir)
+
+ # run_and_verify_merge doesn't accept file paths.
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-r', '2:1',
+ 'A/mu')
+
+#----------------------------------------------------------------------
+# Test practical application of issue #2769 fix, empty rev range elision,
+# and elision to the repos.
+@Issue(2769)
+@SkipUnless(server_has_mergeinfo)
+def empty_mergeinfo(sbox):
+ "mergeinfo can explicitly be empty"
+
+ # A bit o' history: The fix for issue #2769 originally permitted mergeinfo
+ # with empty range lists and as a result we permitted partial elision and
+ # had a whole slew of tests here for that. But the fix of issue #3029 now
+ # prevents svn ps or svn merge from creating mergeinfo with paths mapped to
+ # empty ranges, only empty mergeinfo is allowed. As a result this test now
+ # covers the following areas:
+ #
+ # A) Merging a set of revisions into a path, then reverse merging the
+ # same set out of a subtree of path results in empty mergeinfo
+ # (i.e. "") on the subtree.
+ #
+ # B) Empty mergeinfo elides to empty mergeinfo.
+ #
+ # C) If a merge sets empty mergeinfo on its target and that target has
+ # no ancestor in either the WC or the repository with explicit
+ # mergeinfo, then the target's mergeinfo is removed (a.k.a. elides
+ # to nothing).
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+
+ # Test area A -- Merge r2:4 into A_COPY then reverse merge 4:2 to
+ # A_COPY/D/G. A_COPY/D/G should end up with empty mergeinfo to
+ # override that of A_COPY.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status='M ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status='M ', wc_rev=2),
+ 'D/H/omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-4'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '4',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ # Now do the reverse merge into the subtree.
+ expected_output = wc.State(H_COPY_path, {
+ 'psi' : Item(status='G '),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ 'psi' : Item(status=' ', wc_rev=2),
+ 'omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : ''}),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, { })
+ svntest.actions.run_and_verify_merge(H_COPY_path, '4', '2',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Test areas B and C -- Reverse merge r3 into A_COPY, this would result in
+ # empty mergeinfo on A_COPY and A_COPY/D/H, but the empty mergeinfo on the
+ # latter elides to the former. And then the empty mergeinfo on A_COPY,
+ # which has no parent with explicit mergeinfo to override (in either the WC
+ # or the repos) itself elides. This leaves the WC in the same unmodified
+ # state as after the call to set_up_branch().
+ expected_output = expected_merge_output(
+ [[4,3]], ['G ' + rho_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',
+ ' U ' + H_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-r4:2',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_status(wc_dir, wc_status)
+ # Check that A_COPY's mergeinfo is gone.
+ svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found',
+ 'pg', 'svn:mergeinfo',
+ A_COPY_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2781)
+def prop_add_to_child_with_mergeinfo(sbox):
+ "merge adding prop to child of merge target works"
+
+ # Test for Issue #2781 Prop add to child of merge target corrupts WC if
+ # child has mergeinfo.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ beta_path = sbox.ospath('A/B/E/beta')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ B_COPY_path = sbox.ospath('A_COPY/B')
+
+ # Set a non-mergeinfo prop on a file.
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ beta_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', beta_path)
+ expected_disk.tweak('A/B/E/beta', props={'prop:name' : 'propval'})
+ expected_status.tweak('A/B/E/beta', wc_rev=7)
+ expected_output = wc.State(wc_dir,
+ {'A/B/E/beta' : Item(verb='Sending')})
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Merge r4:5 from A/B/E/beta into A_COPY/B/E/beta.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + beta_COPY_path +'\n',
+ ' U ' + beta_COPY_path +'\n',]),
+ [], 'merge', '-c5',
+ sbox.repo_url + '/A/B/E/beta',
+ beta_COPY_path)
+
+ # Merge r6:7 into A_COPY/B. In issue #2781 this adds a bogus
+ # and incomplete entry in A_COPY/B/.svn/entries for 'beta'.
+ expected_output = wc.State(B_COPY_path, {
+ 'E/beta' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(B_COPY_path, {
+ '' : Item(status=' U'),
+ 'E/beta' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(B_COPY_path, {
+ })
+ expected_status = wc.State(B_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status='MM', wc_rev=2),
+ 'lambda' : Item(status=' ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:7'}),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="New content",
+ props={SVN_PROP_MERGEINFO : '/A/B/E/beta:5,7',
+ 'prop:name' : 'propval'}),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n")
+ })
+ expected_skip = wc.State(B_COPY_path, { })
+ svntest.actions.run_and_verify_merge(B_COPY_path, '6', '7',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@Issue(2788,3383)
+def foreign_repos_does_not_update_mergeinfo(sbox):
+ "set no mergeinfo when merging from foreign repos"
+
+ # Test for issue #2788 and issue #3383.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Set up for test of issue #2788.
+
+ # Create a second repository with the same greek tree
+ repo_dir = sbox.repo_dir
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ other_wc_dir = sbox.add_wc_path("other")
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 6, 1)
+
+ # Merge r3:4 (using implied peg revisions) from 'other' repos into
+ # A_COPY/D/G. Merge should succeed, but no mergeinfo should be set.
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+ svntest.actions.run_and_verify_svn(expected_merge_output([[4]],
+ 'U ' +
+ os.path.join(G_COPY_path,
+ "rho") + '\n', True),
+ [], 'merge', '-c4',
+ other_repo_url + '/A/D/G',
+ G_COPY_path)
+
+ # Merge r4:5 (using explicit peg revisions) from 'other' repos into
+ # A_COPY/B/E. Merge should succeed, but no mergeinfo should be set.
+ E_COPY_path = sbox.ospath('A_COPY/B/E')
+ svntest.actions.run_and_verify_svn(expected_merge_output([[5]],
+ 'U ' +
+ os.path.join(E_COPY_path,
+ "beta") +'\n', True),
+ [], 'merge',
+ other_repo_url + '/A/B/E@4',
+ other_repo_url + '/A/B/E@5',
+ E_COPY_path)
+
+ expected_status.tweak('A_COPY/D/G/rho', 'A_COPY/B/E/beta', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Set up for test of issue #3383.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Get a working copy for the foreign repos.
+ svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url,
+ other_wc_dir)
+
+ # Create mergeinfo on the foreign repos on an existing directory and
+ # file and an added directory and file. Commit as r7. And no, we aren't
+ # checking these intermediate steps very thoroughly, but we test these
+ # simple merges to *death* elsewhere.
+
+ # Create mergeinfo on an existing directory.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ other_repo_url + '/A',
+ os.path.join(other_wc_dir, 'A_COPY'),
+ '-c5')
+
+ # Create mergeinfo on an existing file.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ other_repo_url + '/A/D/H/psi',
+ os.path.join(other_wc_dir, 'A_COPY',
+ 'D', 'H', 'psi'),
+ '-c3')
+
+ # Add a new directory with mergeinfo in the foreign repos.
+ new_dir = os.path.join(other_wc_dir, 'A_COPY', 'N')
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', new_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ SVN_PROP_MERGEINFO, '', new_dir)
+
+ # Add a new file with mergeinfo in the foreign repos.
+ new_file = os.path.join(other_wc_dir, 'A_COPY', 'nu')
+ svntest.main.file_write(new_file, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', new_file)
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ SVN_PROP_MERGEINFO, '', new_file)
+
+ expected_output = wc.State(other_wc_dir,{
+ 'A_COPY' : Item(verb='Sending'), # Mergeinfo created
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'), # Mergeinfo created
+ 'A_COPY/N' : Item(verb='Adding'), # Has empty mergeinfo
+ 'A_COPY/nu' : Item(verb='Adding'), # Has empty mergeinfo
+ })
+ svntest.actions.run_and_verify_commit(other_wc_dir, expected_output,
+ None, [], other_wc_dir,
+ '-m',
+ 'create mergeinfo on foreign repos')
+ # Now merge a diff from the foreign repos that contains the mergeinfo
+ # addition in r7 to A_COPY. The mergeinfo diff should *not* be applied
+ # to A_COPY since it refers to a foreign repository...
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ other_repo_url + '/A@1',
+ other_repo_url + '/A_COPY@7',
+ sbox.ospath('A_COPY'))
+ #...which means there should be no mergeinfo anywhere in WC_DIR, since
+ # this test never created any.
+ svntest.actions.run_and_verify_svn([], [], 'pg',
+ SVN_PROP_MERGEINFO, '-vR',
+ wc_dir)
+
+#----------------------------------------------------------------------
+# This test involves tree conflicts.
+@XFail()
+@Issue(2897)
+def avoid_reflected_revs(sbox):
+ "avoid repeated merges for cyclic merging"
+
+ # See <http://subversion.tigris.org/issues/show_bug.cgi?id=2897>.
+ #
+ # This test cherry-picks some changes (all of them, in fact) from the
+ # parent branch 'A' to the child branch 'A_COPY', and then tries to
+ # reintegrate 'A_COPY' to 'A' (explicitly specifying a revision range
+ # on the source branch). It expects the changes that are unique to the
+ # branch 'A_COPY' to be merged to 'A'.
+ #
+ # A --1----[3]---[5]----------?
+ # \ \_____\___ /
+ # \ \ \ /
+ # A_COPY 2-[---4-----6--7--8]-
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ tfile1_path = sbox.ospath('A/tfile1')
+ tfile2_path = sbox.ospath('A/tfile2')
+ bfile1_path = os.path.join(A_COPY_path, 'bfile1')
+ bfile2_path = os.path.join(A_COPY_path, 'bfile2')
+
+ # Contents to be added to files
+ tfile1_content = "This is tfile1\n"
+ tfile2_content = "This is tfile2\n"
+ bfile1_content = "This is bfile1\n"
+ bfile2_content = "This is bfile2\n"
+
+ # We'll consider A as the trunk and A_COPY as the feature branch
+ # r3 - Create a tfile1 in A
+ svntest.main.file_write(tfile1_path, tfile1_content)
+ svntest.actions.run_and_verify_svn(None, [], 'add', tfile1_path)
+ expected_output = wc.State(wc_dir, {'A/tfile1' : Item(verb='Adding')})
+ wc_status.add({'A/tfile1' : Item(status=' ', wc_rev=3)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r4 - Create a bfile1 in A_COPY
+ svntest.main.file_write(bfile1_path, bfile1_content)
+ svntest.actions.run_and_verify_svn(None, [], 'add', bfile1_path)
+ expected_output = wc.State(wc_dir, {'A_COPY/bfile1' : Item(verb='Adding')})
+ wc_status.add({'A_COPY/bfile1' : Item(status=' ', wc_rev=4)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r5 - Create one more file in A
+ svntest.main.file_write(tfile2_path, tfile2_content)
+ svntest.actions.run_and_verify_svn(None, [], 'add', tfile2_path)
+ expected_output = wc.State(wc_dir, {'A/tfile2' : Item(verb='Adding')})
+ wc_status.add({'A/tfile2' : Item(status=' ', wc_rev=5)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge r5 from /A to /A_COPY, creating r6
+ expected_output = wc.State(A_COPY_path, {
+ 'tfile2' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'tfile2' : Item(status='A ', wc_rev='-', copied='+'),
+ 'bfile1' : Item(status=' ', wc_rev=4),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha': Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/omega': Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5'}),
+ 'tfile2' : Item(tfile2_content),
+ 'bfile1' : Item(bfile1_content),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha': Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega': Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '5',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ A_COPY_path,
+ '--allow-mixed-revisions')
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/tfile2' : Item(verb='Adding'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # Merge r3 from /A to /A_COPY, creating r7
+ expected_output = wc.State(A_COPY_path, {
+ 'tfile1' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status.tweak(wc_rev=5)
+ expected_status.tweak('', wc_rev=6)
+ expected_status.tweak('tfile2', status=' ', copied=None, wc_rev=6)
+ expected_status.add({
+ 'tfile1' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A:3,5'})
+ expected_disk.add({
+ 'tfile1' : Item(tfile1_content),
+ })
+
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ A_COPY_path,
+ '--allow-mixed-revisions')
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/tfile1' : Item(verb='Adding'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # r8 - Add bfile2 to A_COPY
+ svntest.main.file_write(bfile2_path, bfile2_content)
+ svntest.actions.run_and_verify_svn(None, [], 'add', bfile2_path)
+ expected_output = wc.State(wc_dir, {'A_COPY/bfile2' : Item(verb='Adding')})
+ wc_status.tweak(wc_rev=6)
+ wc_status.add({
+ 'A_COPY/bfile2' : Item(status=' ', wc_rev=8),
+ 'A_COPY' : Item(status=' ', wc_rev=7),
+ 'A_COPY/tfile2' : Item(status=' ', wc_rev=6),
+ 'A_COPY/tfile1' : Item(status=' ', wc_rev=7),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge 2:8 from A_COPY(feature branch) to A(trunk).
+ expected_output = wc.State(A_path, {
+ 'bfile2' : Item(status='A '),
+ 'bfile1' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_status = wc.State(A_path, {
+ '' : Item(status=' M', wc_rev=6),
+ 'bfile2' : Item(status='A ', wc_rev='-', copied='+'),
+ 'bfile1' : Item(status='A ', wc_rev='-', copied='+'),
+ 'tfile2' : Item(status=' ', wc_rev=6),
+ 'tfile1' : Item(status=' ', wc_rev=6),
+ 'mu' : Item(status=' ', wc_rev=6),
+ 'C' : Item(status=' ', wc_rev=6),
+ 'D' : Item(status=' ', wc_rev=6),
+ 'B' : Item(status=' ', wc_rev=6),
+ 'B/lambda' : Item(status=' ', wc_rev=6),
+ 'B/E' : Item(status=' ', wc_rev=6),
+ 'B/E/alpha' : Item(status=' ', wc_rev=6),
+ 'B/E/beta' : Item(status=' ', wc_rev=6),
+ 'B/F' : Item(status=' ', wc_rev=6),
+ 'D/gamma' : Item(status=' ', wc_rev=6),
+ 'D/G' : Item(status=' ', wc_rev=6),
+ 'D/G/pi' : Item(status=' ', wc_rev=6),
+ 'D/G/rho' : Item(status=' ', wc_rev=6),
+ 'D/G/tau' : Item(status=' ', wc_rev=6),
+ 'D/H' : Item(status=' ', wc_rev=6),
+ 'D/H/chi' : Item(status=' ', wc_rev=6),
+ 'D/H/omega' : Item(status=' ', wc_rev=6),
+ 'D/H/psi' : Item(status=' ', wc_rev=6),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:3-8'}),
+ 'bfile2' : Item(bfile2_content),
+ 'bfile1' : Item(bfile1_content),
+ 'tfile2' : Item(tfile2_content),
+ 'tfile1' : Item(tfile1_content),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+
+ expected_skip = wc.State(A_path, {})
+
+ svntest.actions.run_and_verify_merge(A_path, '2', '8',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def update_loses_mergeinfo(sbox):
+ "update does not merge mergeinfo"
+
+ """
+ When a working copy path receives a fresh svn:mergeinfo property due to
+ an update, and the path has local mergeinfo changes, then the local
+ mergeinfo should be merged with the incoming mergeinfo.
+ """
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_C_wc_dir = sbox.ospath('A/C')
+ A_B_url = sbox.repo_url + '/A/B'
+ A_B_J_url = sbox.repo_url + '/A/B/J'
+ A_B_K_url = sbox.repo_url + '/A/B/K'
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'],
+ [],
+ 'mkdir', '-m', 'rev 2', A_B_J_url)
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'],
+ [],
+ 'mkdir', '-m', 'rev 3', A_B_K_url)
+
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ expected_output = wc.State(A_C_wc_dir, {'J' : Item(status='A ')})
+ expected_mergeinfo_output = wc.State(A_C_wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(A_C_wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'J' : Item(),
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2'}),
+ })
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(wc_rev=1, status=' M'),
+ 'J' : Item(status='A ',
+ wc_rev='-', copied='+')
+ }
+ )
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(A_C_wc_dir, '1', '2',
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=1)
+ expected_output = wc.State(A_C_wc_dir, {
+ '' : Item(verb='Sending'),
+ 'J' : Item(verb='Adding')
+ })
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(status=' ', wc_rev=4),
+ 'J' : Item(status=' ', wc_rev=4)
+ }
+ )
+ svntest.actions.run_and_verify_commit(A_C_wc_dir,
+ expected_output,
+ expected_status)
+
+ other_A_C_wc_dir = os.path.join(other_wc, 'A', 'C')
+ expected_output = wc.State(other_A_C_wc_dir, {'K' : Item(status='A ')})
+ expected_mergeinfo_output = wc.State(other_A_C_wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(other_A_C_wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'K' : Item(),
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}),
+ })
+ expected_status = wc.State(other_A_C_wc_dir,
+ { '' : Item(wc_rev=1, status=' M'),
+ 'K' : Item(status='A ',
+ wc_rev='-', copied='+')
+ }
+ )
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(other_A_C_wc_dir, '2', '3',
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=1)
+ expected_output = wc.State(other_A_C_wc_dir,
+ {'J' : Item(status='A '),
+ '' : Item(status=' G')
+ }
+ )
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3'}),
+ 'J' : Item(),
+ 'K' : Item(),
+ })
+ expected_status = wc.State(other_A_C_wc_dir,
+ { '' : Item(wc_rev=4, status=' M'),
+ 'J' : Item(status=' ', wc_rev='4'),
+ 'K' : Item(status='A ',
+ wc_rev='-', copied='+')
+ }
+ )
+ svntest.actions.run_and_verify_update(other_A_C_wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Tests part of issue# 2829.
+@Issue(2829)
+@SkipUnless(server_has_mergeinfo)
+def merge_loses_mergeinfo(sbox):
+ "merge should merge mergeinfo"
+
+ """
+ When a working copy has no mergeinfo(due to local full revert of all merges),
+ and merge is attempted for someother revision rX, The new mergeinfo should be
+ /merge/src: rX not all the reverted ones reappearing along with rX.
+ """
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_C_wc_dir = sbox.ospath('A/C')
+ A_B_url = sbox.repo_url + '/A/B'
+ A_B_J_url = sbox.repo_url + '/A/B/J'
+ A_B_K_url = sbox.repo_url + '/A/B/K'
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'],
+ [],
+ 'mkdir', '-m', 'rev 2', A_B_J_url)
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'],
+ [],
+ 'mkdir', '-m', 'rev 3', A_B_K_url)
+
+ expected_output = wc.State(A_C_wc_dir, {'J' : Item(status='A ')})
+ expected_mergeinfo_output = wc.State(A_C_wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_C_wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'J' : Item(),
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2'}),
+ })
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(wc_rev=1, status=' M'),
+ 'J' : Item(status='A ',
+ wc_rev='-', copied='+')
+ }
+ )
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(A_C_wc_dir, '1', '2',
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=1)
+ expected_output = wc.State(A_C_wc_dir, {
+ '' : Item(verb='Sending'),
+ 'J' : Item(verb='Adding')
+ })
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(status=' ', wc_rev=4),
+ 'J' : Item(status=' ', wc_rev=4)
+ }
+ )
+ svntest.actions.run_and_verify_commit(A_C_wc_dir,
+ expected_output,
+ expected_status)
+ expected_output = wc.State(A_C_wc_dir, {'J' : Item(status='D ')})
+ expected_elision_output = wc.State(A_C_wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_disk = wc.State('', {})
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(wc_rev=4, status=' M'),
+ 'J' : Item(wc_rev=4, status='D ')
+ }
+ )
+ svntest.actions.run_and_verify_merge(A_C_wc_dir, '2', '1',
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=1)
+
+ expected_output = wc.State(A_C_wc_dir, {'K' : Item(status='A ')})
+ expected_disk = wc.State('', {
+ 'K' : Item(),
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}),
+ })
+ expected_status = wc.State(A_C_wc_dir,
+ { '' : Item(wc_rev=4, status=' M'),
+ 'K' : Item(status='A ',
+ wc_rev='-', copied='+'),
+ 'J' : Item(wc_rev=4, status='D ')
+ }
+ )
+ expected_mergeinfo_output = wc.State(A_C_wc_dir, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_C_wc_dir, {
+ })
+ svntest.actions.run_and_verify_merge(A_C_wc_dir, '2', '3',
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=1)
+
+#----------------------------------------------------------------------
+@Issue(2853)
+def single_file_replace_style_merge_capability(sbox):
+ "replace-style merge capability for a single file"
+
+ # Test for issue #2853, do_single_file_merge() lacks "Replace-style
+ # merge" capability
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+
+ # delete mu and replace it with a copy of iota
+ svntest.main.run_svn(None, 'rm', mu_path)
+ svntest.main.run_svn(None, 'mv', iota_path, mu_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' ', wc_rev=2)
+ expected_status.remove('iota')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota': Item(verb='Deleting'),
+ 'A/mu': Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Merge the file mu alone to rev1
+ svntest.actions.run_and_verify_svn(expected_merge_output(None,
+ ['R ' + mu_path + '\n']),
+ [],
+ 'merge',
+ mu_path + '@2',
+ mu_path + '@1',
+ mu_path)
+
+#----------------------------------------------------------------------
+# Test for issue 2786 fix.
+@Issue(2786)
+@SkipUnless(server_has_mergeinfo)
+def merge_to_out_of_date_target(sbox):
+ "merge to ood path can lead to inaccurate mergeinfo"
+
+ # Create a WC with a branch.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # Make second working copy
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ # Some paths we'll care about
+ A_COPY_H_path = sbox.ospath('A_COPY/D/H')
+ other_A_COPY_H_path = os.path.join(other_wc, "A_COPY", "D", "H")
+
+ # Merge -c3 into A_COPY/D/H of first WC.
+ expected_output = wc.State(A_COPY_H_path, {
+ 'psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_H_path, {
+ })
+ expected_status = wc.State(A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'psi' : Item(status='M ', wc_rev=2),
+ 'omega' : Item(status=' ', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:3'}),
+ 'psi' : Item("New content"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_H_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_H_path, '2', '3',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Commit merge to first WC.
+ wc_status.tweak('A_COPY/D/H/psi', 'A_COPY/D/H', wc_rev=7)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi': Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # Merge -c6 into A_COPY/D/H of other WC.
+ expected_output = wc.State(other_A_COPY_H_path, {
+ 'omega' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(other_A_COPY_H_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(other_A_COPY_H_path, {
+ })
+ expected_status = wc.State(other_A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'psi' : Item(status=' ', wc_rev=2),
+ 'omega' : Item(status='M ', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:6'}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(other_A_COPY_H_path, { })
+ svntest.actions.run_and_verify_merge(other_A_COPY_H_path, '5', '6',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=1)
+
+ # Update A_COPY/D/H in other WC. Local mergeinfo for r6 on A_COPY/D/H
+ # should be *merged* with r3 from first WC.
+ expected_output = svntest.wc.State(other_A_COPY_H_path, {
+ '' : Item(status=' G'),
+ 'psi' : Item(status='U ')
+ })
+ other_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:3,6'}),
+ 'psi' : Item(contents="New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'omega' : Item(contents="New content"),
+ })
+ other_status = wc.State(other_A_COPY_H_path,{
+ '' : Item(wc_rev=7, status=' M'),
+ 'chi' : Item(wc_rev=7, status=' '),
+ 'psi' : Item(wc_rev=7, status=' '),
+ 'omega' : Item(wc_rev=7, status='M ')
+ })
+ svntest.actions.run_and_verify_update(other_A_COPY_H_path,
+ expected_output,
+ other_disk,
+ other_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_with_depth_files(sbox):
+ "merge test for --depth files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ mu_path = sbox.ospath('A/mu')
+ gamma_path = sbox.ospath('A/D/gamma')
+ Acopy_path = sbox.ospath('A_copy')
+ Acopy_mu_path = sbox.ospath('A_copy/mu')
+ A_url = sbox.repo_url + '/A'
+ Acopy_url = sbox.repo_url + '/A_copy'
+
+ # Copy A_url to A_copy_url
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ A_url, Acopy_url,
+ '-m', 'create a new copy of A')
+
+ svntest.main.file_write(mu_path, "this is file 'mu' modified.\n")
+ svntest.main.file_write(gamma_path, "this is file 'gamma' modified.\n")
+
+ # Create expected output tree for commit
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree for commit
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/mu' : Item(status=' ', wc_rev=3),
+ 'A/D/gamma' : Item(status=' ', wc_rev=3),
+ })
+
+ # Commit the modified contents
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', Acopy_path)
+
+ # Merge r1:3 into A_copy with --depth files. The merge only affects
+ # 'A_copy' and its one file child 'mu', so 'A_copy' gets non-inheritable
+ # mergeinfo for -r1:3 and 'mu' gets its own complete set of mergeinfo:
+ # r1 from its parent, and r1:3 from the merge itself.
+ expected_output = wc.State(Acopy_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(Acopy_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(Acopy_path, {
+ })
+ expected_status = wc.State(Acopy_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='MM'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=3)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-3*'}),
+ 'B' : Item(),
+ 'mu' : Item("this is file 'mu' modified.\n",
+ props={SVN_PROP_MERGEINFO : '/A/mu:2-3'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(Acopy_path, { })
+ svntest.actions.run_and_verify_merge(Acopy_path, '1', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--depth', 'files', Acopy_path)
+
+#----------------------------------------------------------------------
+# Test for issue #2976 Subtrees can lose non-inheritable ranges.
+#
+# Also test for a bug with paths added as the immediate child of the
+# merge target when the merge target has non-inheritable mergeinfo
+# and is also the current working directory, see
+# http://svn.haxx.se/dev/archive-2008-12/0133.shtml.
+#
+# Test for issue #3392 'Parsing error with reverse merges and
+# non-inheritable mergeinfo.
+#
+# Test issue #3407 'Shallow merges incorrectly set mergeinfo on children'.
+@SkipUnless(server_has_mergeinfo)
+@Issues(2976,3392,3407,4057)
+def merge_away_subtrees_noninheritable_ranges(sbox):
+ "subtrees can lose non-inheritable ranges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2)
+
+ # Some paths we'll care about
+ H_path = sbox.ospath('A/D/H')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ A_COPY_path = sbox.ospath('A_COPY')
+ nu_path = sbox.ospath('A/nu')
+ mu_path = sbox.ospath('A/mu')
+ mu_2_path = sbox.ospath('A_COPY_2/mu')
+ D_COPY_2_path = sbox.ospath('A_COPY_2/D')
+ H_COPY_2_path = sbox.ospath('A_COPY_2/D/H')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ nu_COPY_path = sbox.ospath('A_COPY/nu')
+
+ # Make a change to directory A/D/H and commit as r8.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'update', wc_dir)
+
+ svntest.actions.run_and_verify_svn(
+ ["property 'prop:name' set on '" + H_path + "'\n"], [],
+ 'ps', 'prop:name', 'propval', H_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H' : Item(verb='Sending'),})
+ wc_status.tweak(wc_rev=7)
+ wc_status.tweak('A/D/H', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge r6:8 --depth immediates to A_COPY/D. This should merge the
+ # prop change from r8 to A_COPY/H but not the change to A_COPY/D/H/omega
+ # from r7 since that is below the depth we are merging to. Instead,
+ # non-inheritable mergeinfo should be set on the immediate directory
+ # child of A_COPY/D that is affected by the merge: A_COPY/D/H.
+ expected_output = wc.State(D_COPY_path, {
+ 'H' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ 'H' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'H' : Item(status=' M', wc_rev=7),
+ 'H/chi' : Item(status=' ', wc_rev=7),
+ 'H/omega' : Item(status=' ', wc_rev=7),
+ 'H/psi' : Item(status=' ', wc_rev=7),
+ 'G' : Item(status=' ', wc_rev=7),
+ 'G/pi' : Item(status=' ', wc_rev=7),
+ 'G/rho' : Item(status=' ', wc_rev=7),
+ 'G/tau' : Item(status=' ', wc_rev=7),
+ 'gamma' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:7-8'}),
+ 'H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:7-8*',
+ 'prop:name' : 'propval'}),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("This is the file 'rho'.\n"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n"),
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, '6', '8',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--depth', 'immediates', D_COPY_path)
+
+ # Repeat the previous merge but at default depth of infinity. The change
+ # to A_COPY/D/H/omega should now happen and the non-inheritable ranges on
+ # A_COPY/D/G and A_COPY/D/H be changed to inheritable and then elide to
+ # A_COPY/D.
+ expected_output = wc.State(D_COPY_path, {
+ 'H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' G'),
+ 'H' : Item(status=' G'),
+ 'H/omega' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ 'H' : Item(status=' U'),
+ 'H/omega' : Item(status=' U'),
+ })
+ expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A/D:7-8'})
+ expected_disk.tweak('H', props={'prop:name' : 'propval'})
+ expected_disk.tweak('G', props={})
+ expected_disk.tweak('H/omega', contents="New content")
+ expected_status.tweak('G', status=' ')
+ expected_status.tweak('H/omega', status='M ')
+ svntest.actions.run_and_verify_merge(D_COPY_path, '6', '8',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Now test the problem described in
+ # http://svn.haxx.se/dev/archive-2008-12/0133.shtml.
+ #
+ # First revert all local mods.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # r9: Merge all available revisions from A to A_COPY at a depth of empty
+ # this will create non-inheritable mergeinfo on A_COPY.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ wc_status.tweak(wc_rev=8)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '--depth', 'empty',
+ sbox.repo_url + '/A', A_COPY_path)
+ wc_status.tweak('A_COPY', wc_rev=9)
+ expected_output = wc.State(wc_dir, {'A_COPY' : Item(verb='Sending')})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # r10: Add the file A/nu.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir, {'A/nu' : Item(verb='Adding')})
+ wc_status.add({'A/nu' : Item(status=' ', wc_rev=10)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Now merge -c10 from A to A_COPY.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State('', {
+ 'nu': Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State('', {
+ '' : Item(status=' U'),
+ 'nu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State('', {
+ })
+ expected_status = wc.State('', {
+ '' : Item(status=' M'),
+ 'nu' : Item(status='A ', copied='+'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=10)
+ expected_status.tweak('nu', wc_rev='-')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-8*,10'}),
+ 'nu' : Item("This is the file 'nu'.\n",
+ props={SVN_PROP_MERGEINFO : '/A/nu:10'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State('.', { })
+ saved_cwd = os.getcwd()
+ os.chdir(A_COPY_path)
+ svntest.actions.run_and_verify_merge('', '9', '10',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ os.chdir(saved_cwd)
+
+ # If a merge target has inheritable and non-inheritable ranges and has a
+ # child with no explicit mergeinfo, test that a merge which brings
+ # mergeinfo changes to that child (i.e. as part of the diff) properly
+ # records mergeinfo on the child that includes both the incoming mergeinfo
+ # *and* the mergeinfo inherited from it's parent.
+ #
+ # First revert all local changes and remove A_COPY/C/nu from disk.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Make a text change to A_COPY_2/mu in r11 and then merge that
+ # change to A/mu in r12. This will create mergeinfo of '/A_COPY_2/mu:11'
+ # on A/mu.
+ svntest.main.file_write(mu_2_path, 'new content')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[11]],
+ ['U ' + mu_path + '\n',
+ ' U ' + mu_path + '\n']),
+ [], 'merge', '-c11', sbox.repo_url + '/A_COPY_2/mu', mu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg',
+ wc_dir)
+
+ # Now merge r12 from A to A_COPY. A_COPY/mu should get the mergeinfo from
+ # r12, '/A_COPY_2/mu:11' as well as mergeinfo describing the merge itself,
+ # '/A/mu:12'.
+ expected_output = wc.State('.', {
+ 'mu': Item(status='UG'),
+ })
+ expected_mergeinfo_output = wc.State('.', {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State('.', {
+ })
+ expected_status = wc.State('', {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='MM'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=10)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-8*,12'}),
+ 'B' : Item(),
+ 'mu' : Item("new content",
+ props={SVN_PROP_MERGEINFO : '/A/mu:12\n/A_COPY_2/mu:11'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State('.', { })
+ saved_cwd = os.getcwd()
+ os.chdir(A_COPY_path)
+ # Don't do a dry-run, because it will differ due to the way merge
+ # sets override mergeinfo on the children of paths with non-inheritable
+ # ranges.
+ svntest.actions.run_and_verify_merge('.', '11', '12',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+ os.chdir(saved_cwd)
+
+ # Test for issue #3392
+ #
+ # Revert local changes and update.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Merge r8 from A/D/H to A_COPY_D/H at depth empty. Since r8 affects only
+ # A_COPY/D/H itself, the resulting mergeinfo is inheritable. Commit this
+ # merge as r13.
+ expected_output = wc.State(H_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_2_path, {
+ })
+ expected_status = wc.State(H_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=12),
+ 'psi' : Item(status=' ', wc_rev=12),
+ 'omega' : Item(status=' ', wc_rev=12),
+ 'chi' : Item(status=' ', wc_rev=12),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:8',
+ "prop:name" : "propval"}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_2_path, {})
+ svntest.actions.run_and_verify_merge(H_COPY_2_path, '7', '8',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--depth', 'empty', H_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit', '-m',
+ 'log msg', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ # Now reverse the prior merge. Issue #3392 manifests itself here with
+ # a mergeinfo parsing error:
+ # >svn merge %url%/A/D/H merge_tests-62\A_COPY_2\D\H -c-8
+ # --- Reverse-merging r8 into 'merge_tests-62\A_COPY_2\D\H':
+ # U merge_tests-62\A_COPY_2\D\H
+ # ..\..\..\subversion\libsvn_subr\mergeinfo.c:590: (apr_err=200020)
+ # svn: Could not parse mergeinfo string '-8'
+ # ..\..\..\subversion\libsvn_subr\kitchensink.c:52: (apr_err=200022)
+ # svn: Negative revision number found parsing '-8'
+ #
+ # Status is identical but for the working revision.
+ expected_status.tweak(wc_rev=13)
+ # The mergeinfo and prop:name props should disappear.
+ expected_disk.remove('')
+ expected_elision_output = wc.State(H_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ svntest.actions.run_and_verify_merge(H_COPY_2_path, '8', '7',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Test issue #3407 'Shallow merges incorrectly set mergeinfo on children'.
+ #
+ # Revert all local mods.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Merge all available changes from A to A_COPY at --depth empty. Only the
+ # mergeinfo on A_COPY should be affected.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[9,13]],
+ [' U ' + A_COPY_path + '\n']),
+ [], 'merge', '--depth', 'empty',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-13*\n'],
+ [], 'pg', SVN_PROP_MERGEINFO,
+ '-R', A_COPY_path)
+
+ # Merge all available changes from A to A_COPY at --depth files. Only the
+ # mergeinfo on A_COPY and its file children should be affected.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ # Revisions 2-13 are already merged to A_COPY and now they will be merged
+ # to A_COPY's file children. Due to the way we drive the merge editor
+ # r2-3, which are inoperative on A_COPY's file children, do not show up
+ # in the merge notifications, although those revs are included in the
+ # recorded mergeinfo.
+ expected_output = expected_merge_output([[4,13], # Merge notification
+ [9,13], # Merge notification
+ [2,13]], # Mergeinfo notification
+ ['UU %s\n' % (mu_COPY_path),
+ 'A %s\n' % (nu_COPY_path),
+ ' U %s\n' % (A_COPY_path),
+ ' G %s\n' % (mu_COPY_path),
+ ' U %s\n' % (nu_COPY_path),])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '--depth', 'files',
+ sbox.repo_url + '/A', A_COPY_path)
+ expected_output = svntest.verify.UnorderedOutput(
+ [A_COPY_path + ' - /A:2-13*\n',
+ mu_COPY_path + ' - /A/mu:2-13\n',
+ nu_COPY_path + ' - /A/nu:10-13\n',])
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'pg', SVN_PROP_MERGEINFO,
+ '-R', A_COPY_path)
+
+#----------------------------------------------------------------------
+# Test for issue #2827
+# Handle merge info for sparsely-populated directories
+@Issue(2827)
+@SkipUnless(server_has_mergeinfo)
+def merge_to_sparse_directories(sbox):
+ "merge to sparse directories"
+
+ # Merges into sparse working copies should set non-inheritable mergeinfo
+ # on the deepest directories present in the WC.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ D_path = sbox.ospath('A/D')
+ I_path = sbox.ospath('A/C/I')
+ G_path = sbox.ospath('A/D/G')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # Make a few more changes to the merge source...
+
+ # r7 - modify and commit A/mu
+ svntest.main.file_write(sbox.ospath('A/mu'),
+ "New content")
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ wc_status.tweak('A/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ wc_disk.tweak('A/mu', contents="New content")
+
+ # r8 - Add a prop to A/D and commit.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ D_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', D_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Sending'),
+ })
+ wc_status.tweak(wc_rev=7)
+ wc_status.tweak('A/D', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # r9 - Add a prop to A and commit.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ A_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', A_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Sending'),
+ })
+ wc_status.tweak(wc_rev=8)
+ wc_status.tweak('A', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Do an --immediates checkout of A_COPY
+ immediates_dir = sbox.add_wc_path('immediates')
+ expected_output = wc.State(immediates_dir, {
+ 'B' : Item(status='A '),
+ 'mu' : Item(status='A '),
+ 'C' : Item(status='A '),
+ 'D' : Item(status='A '),
+ })
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY",
+ immediates_dir,
+ expected_output, expected_disk,
+ [],
+ "--depth", "immediates")
+
+ # Merge r4:9 into the immediates WC.
+ # The root of the immediates WC should get inheritable r4:9 as should
+ # the one file present 'mu'. The three directory children present, 'B',
+ # 'C', and 'D' are checked out at depth empty; the two of these affected
+ # by the merge, 'B' and 'D', get non-inheritable mergeinfo for r4:9.
+ # The root and 'D' do should also get the changes
+ # that affect them directly (the prop adds from r8 and r9).
+ #
+ # Currently this fails due to r1424469. For a full explanation see
+ # http://svn.haxx.se/dev/archive-2012-12/0472.shtml
+ # and http://svn.haxx.se/dev/archive-2012-12/0475.shtml
+ expected_output = wc.State(immediates_dir, {
+ 'D' : Item(status=' U'),
+ 'mu' : Item(status='U '),
+ '' : Item(status=' U'),
+ # Shadowed below skips
+ 'D/H/omega' : Item(status=' ', treeconflict='U'),
+ 'B/E/beta' : Item(status=' ', treeconflict='U'),
+ })
+ expected_mergeinfo_output = wc.State(immediates_dir, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' U'),
+ 'D' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(immediates_dir, {
+ })
+ expected_status = wc.State(immediates_dir, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' M', wc_rev=9),
+ 'mu' : Item(status='M ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' M', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-9',
+ "prop:name" : "propval"}),
+ 'B' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5-9*'}),
+ 'mu' : Item("New content"),
+ 'C' : Item(),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5-9*',
+ "prop:name" : "propval"}),
+ })
+ expected_skip = svntest.wc.State(immediates_dir, {
+ 'D/H' : Item(verb='Skipped missing target'),
+ 'B/E' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(immediates_dir, '4', '9',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Do a --files checkout of A_COPY
+ files_dir = sbox.add_wc_path('files')
+ expected_output = wc.State(files_dir, {
+ 'mu' : Item(status='A '),
+ })
+ expected_disk = wc.State('', {
+ 'mu' : Item("This is the file 'mu'.\n"),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY",
+ files_dir,
+ expected_output, expected_disk,
+ [],
+ "--depth", "files")
+
+ # Merge r4:9 into the files WC.
+ # The root of the files WC should get non-inheritable r4:9 and its one
+ # present child 'mu' should get the same but inheritable. The root
+ # should also get the change that affects it directly (the prop add
+ # from r9).
+ expected_output = wc.State(files_dir, {
+ 'mu' : Item(status='U '),
+ '' : Item(status=' U'),
+ # Below the skips
+ 'D/H/omega' : Item(status=' ', treeconflict='U'),
+ 'B/E/beta' : Item(status=' ', treeconflict='U'),
+ })
+ expected_mergeinfo_output = wc.State(files_dir, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(files_dir, {
+ })
+ expected_status = wc.State(files_dir, {
+ '' : Item(status=' M', wc_rev=9),
+ 'mu' : Item(status='MM', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-9*',
+ "prop:name" : "propval"}),
+ 'mu' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/mu:5-9'}),
+ })
+ expected_skip = svntest.wc.State(files_dir, {
+ 'D' : Item(verb='Skipped missing target'),
+ 'B' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(files_dir, '4', '9',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Do an --empty checkout of A_COPY
+ empty_dir = sbox.add_wc_path('empty')
+ expected_output = wc.State(empty_dir, {})
+ expected_disk = wc.State('', {})
+ svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY",
+ empty_dir,
+ expected_output, expected_disk,
+ [],
+ "--depth", "empty")
+
+ # Merge r4:9 into the empty WC.
+ # The root of the files WC should get non-inheritable r4:9 and also get
+ # the one change that affects it directly (the prop add from r9).
+ expected_output = wc.State(empty_dir, {
+ '' : Item(status=' U'),
+ # Below the skips
+ 'B/E/beta' : Item(status=' ', treeconflict='U'),
+ 'D/H/omega' : Item(status=' ', treeconflict='U'),
+ })
+ expected_mergeinfo_output = wc.State(empty_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(empty_dir, {
+ })
+ expected_status = wc.State(empty_dir, {
+ '' : Item(status=' M', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-9*',
+ "prop:name" : "propval"}),
+ })
+ expected_skip = svntest.wc.State(empty_dir, {
+ 'mu' : Item(verb='Skipped missing target'),
+ 'D' : Item(verb='Skipped missing target'),
+ 'B' : Item(verb='Skipped missing target'),
+ })
+ svntest.actions.run_and_verify_merge(empty_dir, '4', '9',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Check that default depth for merge is infinity.
+ #
+ # Revert the previous changes to the immediates WC and update one
+ # child in that WC to depth infinity.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R',
+ immediates_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth',
+ 'infinity',
+ os.path.join(immediates_dir, 'D'))
+ # Now merge r6 into the immediates WC, even though the root of the
+ # is at depth immediates, the subtree rooted at child 'D' is fully
+ # present, so a merge of r6 should affect 'D/H/omega'.
+ expected_output = wc.State(immediates_dir, {
+ 'D/H/omega' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(immediates_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(immediates_dir, {
+ })
+ expected_status = wc.State(immediates_dir, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/omega' : Item(status='M ', wc_rev=9),
+ 'D/H/psi' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(immediates_dir, {})
+ svntest.actions.run_and_verify_merge(immediates_dir, '5', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_old_and_new_revs_from_renamed_dir(sbox):
+ "merge -rold(before rename):head renamed dir"
+
+ # See the email on dev@ from Paul Burba, 2007-09-27, "RE: svn commit:
+ # r26803 - [...]", <http://svn.haxx.se/dev/archive-2007-09/0706.shtml> or
+ # <http://subversion.tigris.org/ds/viewMessage.do?dsForumId=462&dsMessageId=927127>.
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ A_url = sbox.repo_url + '/A'
+ A_MOVED_url = sbox.repo_url + '/A_MOVED'
+ A_COPY_path = sbox.ospath('A_COPY')
+ mu_path = sbox.ospath('A/mu')
+ A_MOVED_mu_path = sbox.ospath('A_MOVED/mu')
+
+ # Make a modification to A/mu
+ svntest.main.file_write(mu_path, "This is the file 'mu' modified.\n")
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ wc_status.add({'A/mu' : Item(status=' ', wc_rev=3)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Move A to A_MOVED
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 4.\n'],
+ [], 'mv', '-m', 'mv A to A_MOVED',
+ A_url, A_MOVED_url)
+
+ # Update the working copy to get A_MOVED
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Make a modification to A_MOVED/mu
+ svntest.main.file_write(A_MOVED_mu_path, "This is 'mu' in A_MOVED.\n")
+ expected_output = wc.State(wc_dir, {'A_MOVED/mu' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.remove('A', 'A/mu', 'A/C', 'A/D', 'A/B', 'A/B/lambda',
+ 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F',
+ 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/chi', 'A/D/H/omega',
+ 'A/D/H/psi')
+ expected_status.add({
+ '' : Item(status=' ', wc_rev=4),
+ 'iota' : Item(status=' ', wc_rev=4),
+ 'A_MOVED' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/mu' : Item(status=' ', wc_rev=5),
+ 'A_MOVED/C' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/B' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/B/lambda' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/B/E' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/B/E/alpha': Item(status=' ', wc_rev=4),
+ 'A_MOVED/B/E/beta' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/B/F' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/gamma' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/G' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/G/pi' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/G/rho' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/G/tau' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/H' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/H/chi' : Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/H/omega': Item(status=' ', wc_rev=4),
+ 'A_MOVED/D/H/psi' : Item(status=' ', wc_rev=4),
+ 'A_COPY' : Item(status=' ', wc_rev=4),
+ 'A_COPY/mu' : Item(status=' ', wc_rev=4),
+ 'A_COPY/C' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B/lambda' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B/E' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B/E/alpha' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B/E/beta' : Item(status=' ', wc_rev=4),
+ 'A_COPY/B/F' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/gamma' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/G' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/G/pi' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/G/rho' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/G/tau' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/H' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/H/chi' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/H/omega' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/H/psi' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge /A_MOVED to /A_COPY - this happens in multiple passes
+ # because /A_MOVED has renames in its history between the boundaries
+ # of the requested merge range.
+ expected_output = wc.State(A_COPY_path, {
+ 'mu' : Item(status='G ', prev_status='U '), # mu gets touched twice
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G', prev_status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=4),
+ 'mu' : Item(status='M ', wc_rev=4),
+ 'C' : Item(status=' ', wc_rev=4),
+ 'D' : Item(status=' ', wc_rev=4),
+ 'B' : Item(status=' ', wc_rev=4),
+ 'B/lambda' : Item(status=' ', wc_rev=4),
+ 'B/E' : Item(status=' ', wc_rev=4),
+ 'B/E/alpha': Item(status=' ', wc_rev=4),
+ 'B/E/beta' : Item(status=' ', wc_rev=4),
+ 'B/F' : Item(status=' ', wc_rev=4),
+ 'D/gamma' : Item(status=' ', wc_rev=4),
+ 'D/G' : Item(status=' ', wc_rev=4),
+ 'D/G/pi' : Item(status=' ', wc_rev=4),
+ 'D/G/rho' : Item(status=' ', wc_rev=4),
+ 'D/G/tau' : Item(status=' ', wc_rev=4),
+ 'D/H' : Item(status=' ', wc_rev=4),
+ 'D/H/chi' : Item(status=' ', wc_rev=4),
+ 'D/H/omega': Item(status=' ', wc_rev=4),
+ 'D/H/psi' : Item(status=' ', wc_rev=4),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3\n/A_MOVED:4-5'}),
+ 'mu' : Item("This is 'mu' in A_MOVED.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha': Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega': Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+
+ ### Disabling dry_run mode because currently it can't handle the way
+ ### 'mu' gets textually modified in multiple passes.
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '5',
+ A_MOVED_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_with_child_having_different_rev_ranges_to_merge(sbox):
+ "child having different rev ranges to merge"
+ #Modify A/mu to 30 lines with a content 'line1'...'line30' commit it at r2.
+ #Create a branch A_COPY from A, commit it at r3.
+ #Modify A/mu line number 7 to 'LINE7' modify and commit at r4.
+ #Modify A/mu line number 17 to 'LINE17' modify, set prop 'prop1' on 'A'
+ #with a value 'val1' and commit at r5.
+ #Modify A/mu line number 27 to 'LINE27' modify and commit at r6.
+ #Merge r5 to 'A/mu' as a single file merge explicitly to 'A_COPY/mu'.
+ #Merge r3:6 from 'A' to 'A_COPY
+ #This should merge r4 and then r5 through r6.
+ #Revert r5 and r6 via single file merge on A_COPY/mu.
+ #Revert r6 through r4 on A_COPY this should get back us the pristine copy.
+ #Merge r3:6 from 'A' to 'A_COPY
+ #Revert r5 on A_COPY/mu
+ #Modify line number 17 with 'some other line17' of A_COPY/mu
+ #Merge r6:3 from 'A' to 'A_COPY, This should leave line number 17
+ #undisturbed in A_COPY/mu, rest should be reverted.
+
+ # Create a WC
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ mu_path = sbox.ospath('A/mu')
+ A_url = sbox.repo_url + '/A'
+ A_mu_url = sbox.repo_url + '/A/mu'
+ A_COPY_url = sbox.repo_url + '/A_COPY'
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_mu_path = sbox.ospath('A_COPY/mu')
+ thirty_line_dummy_text = 'line1\n'
+ for i in range(2, 31):
+ thirty_line_dummy_text += 'line' + str(i) + '\n'
+
+ svntest.main.file_write(mu_path, thirty_line_dummy_text)
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', A_url, A_COPY_url, '-m', 'rev 3')
+ # Update the working copy to get A_COPY
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_status.add({'A_COPY' : Item(status=' '),
+ 'A_COPY/mu' : Item(status=' '),
+ 'A_COPY/C' : Item(status=' '),
+ 'A_COPY/D' : Item(status=' '),
+ 'A_COPY/B' : Item(status=' '),
+ 'A_COPY/B/lambda' : Item(status=' '),
+ 'A_COPY/B/E' : Item(status=' '),
+ 'A_COPY/B/E/alpha' : Item(status=' '),
+ 'A_COPY/B/E/beta' : Item(status=' '),
+ 'A_COPY/B/F' : Item(status=' '),
+ 'A_COPY/D/gamma' : Item(status=' '),
+ 'A_COPY/D/G' : Item(status=' '),
+ 'A_COPY/D/G/pi' : Item(status=' '),
+ 'A_COPY/D/G/rho' : Item(status=' '),
+ 'A_COPY/D/G/tau' : Item(status=' '),
+ 'A_COPY/D/H' : Item(status=' '),
+ 'A_COPY/D/H/chi' : Item(status=' '),
+ 'A_COPY/D/H/omega' : Item(status=' '),
+ 'A_COPY/D/H/psi' : Item(status=' ')})
+ expected_status.tweak(wc_rev=3)
+ tweaked_7th_line = thirty_line_dummy_text.replace('line7', 'LINE 7')
+ svntest.main.file_write(mu_path, tweaked_7th_line)
+ expected_status.tweak('A/mu', wc_rev=4)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_status.tweak(wc_rev=4)
+ tweaked_17th_line = tweaked_7th_line.replace('line17', 'LINE 17')
+ svntest.main.file_write(mu_path, tweaked_17th_line)
+ svntest.main.run_svn(None, 'propset', 'prop1', 'val1', A_path)
+ expected_output = wc.State(wc_dir,
+ {
+ 'A' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Sending')
+ }
+ )
+ expected_status.tweak('A', wc_rev=5)
+ expected_status.tweak('A/mu', wc_rev=5)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ tweaked_27th_line = tweaked_17th_line.replace('line27', 'LINE 27')
+ svntest.main.file_write(mu_path, tweaked_27th_line)
+ expected_status.tweak('A/mu', wc_rev=6)
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ # Merge r5 to A_COPY/mu
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + A_COPY_mu_path + '\n',
+ ' U ' + A_COPY_mu_path + '\n']),
+ [], 'merge', '-r4:5', A_mu_url, A_COPY_mu_path)
+
+ expected_skip = wc.State(A_COPY_path, {})
+ expected_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status='G ', prev_status='G '), # Updated twice
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'mu' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=4),
+ 'mu' : Item(status='M ', wc_rev=4),
+ 'C' : Item(status=' ', wc_rev=4),
+ 'D' : Item(status=' ', wc_rev=4),
+ 'B' : Item(status=' ', wc_rev=4),
+ 'B/lambda' : Item(status=' ', wc_rev=4),
+ 'B/E' : Item(status=' ', wc_rev=4),
+ 'B/E/alpha': Item(status=' ', wc_rev=4),
+ 'B/E/beta' : Item(status=' ', wc_rev=4),
+ 'B/F' : Item(status=' ', wc_rev=4),
+ 'D/gamma' : Item(status=' ', wc_rev=4),
+ 'D/G' : Item(status=' ', wc_rev=4),
+ 'D/G/pi' : Item(status=' ', wc_rev=4),
+ 'D/G/rho' : Item(status=' ', wc_rev=4),
+ 'D/G/tau' : Item(status=' ', wc_rev=4),
+ 'D/H' : Item(status=' ', wc_rev=4),
+ 'D/H/chi' : Item(status=' ', wc_rev=4),
+ 'D/H/omega': Item(status=' ', wc_rev=4),
+ 'D/H/psi' : Item(status=' ', wc_rev=4),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:4-6',
+ 'prop1' : 'val1'}),
+ 'mu' : Item(tweaked_27th_line),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha': Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega': Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '3', '6',
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ # Revert r5 and r6 on A_COPY/mu
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[6,5]],
+ ['G ' + A_COPY_mu_path + '\n',
+ ' G ' + A_COPY_mu_path + '\n']),
+ [], 'merge', '-r6:4', A_mu_url, A_COPY_mu_path)
+
+ expected_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'), # merged removal of prop1 property
+ 'mu' : Item(status='G '), # merged reversion of text changes
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ 'mu' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' U'),
+ })
+ expected_status.tweak('', status=' ')
+ expected_status.tweak('mu', status=' ')
+ expected_disk.tweak('', props={})
+ expected_disk.remove('')
+ expected_disk.tweak('mu', contents=thirty_line_dummy_text)
+ svntest.actions.run_and_verify_merge(A_COPY_path, '6', '3',
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ expected_disk.add({'' : Item(props={SVN_PROP_MERGEINFO : '/A:4-6',
+ 'prop1' : 'val1'})})
+ expected_disk.tweak('mu', contents=tweaked_27th_line)
+ expected_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'), # new mergeinfo and prop1 property
+ 'mu' : Item(status='U '), # text changes
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status.tweak('', status=' M')
+ expected_status.tweak('mu', status='M ')
+ svntest.actions.run_and_verify_merge(A_COPY_path, '3', '6',
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ #Revert r5 on A_COPY/mu
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-5]],
+ ['G ' + A_COPY_mu_path + '\n',
+ ' G ' + A_COPY_mu_path + '\n']),
+ [], 'merge', '-r5:4', A_mu_url, A_COPY_mu_path)
+ tweaked_17th_line_1 = tweaked_27th_line.replace('LINE 17',
+ 'some other line17')
+ tweaked_17th_line_2 = thirty_line_dummy_text.replace('line17',
+ 'some other line17')
+ svntest.main.file_write(A_COPY_mu_path, tweaked_17th_line_1)
+ expected_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ 'mu' : Item(status='G ', prev_status='G '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ 'mu' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'mu' : Item(status=' U'),
+ })
+ expected_status.tweak('', status=' ')
+ expected_status.tweak('mu', status='M ')
+ expected_disk.remove('')
+ expected_disk.tweak('mu', contents=tweaked_17th_line_2)
+ svntest.actions.run_and_verify_merge(A_COPY_path, '6', '3',
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_old_and_new_revs_from_renamed_file(sbox):
+ "merge -rold(before rename):head renamed file"
+
+ ## See http://svn.haxx.se/dev/archive-2007-09/0706.shtml ##
+
+ # Create a WC
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ mu_url = sbox.repo_url + '/A/mu'
+ mu_MOVED_url = sbox.repo_url + '/A/mu_MOVED'
+ mu_COPY_url = sbox.repo_url + '/A/mu_COPY'
+ mu_COPY_path = sbox.ospath('A/mu_COPY')
+ mu_path = sbox.ospath('A/mu')
+ mu_MOVED_path = sbox.ospath('A/mu_MOVED')
+
+ # Copy mu to mu_COPY
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'],
+ [], 'cp', '-m', 'cp mu to mu_COPY',
+ mu_url, mu_COPY_url)
+
+ # Make a modification to A/mu
+ svntest.main.file_write(mu_path, "This is the file 'mu' modified.\n")
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Move mu to mu_MOVED
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 4.\n'],
+ [], 'mv', '-m', 'mv mu to mu_MOVED',
+ mu_url, mu_MOVED_url)
+
+ # Update the working copy to get mu_MOVED
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Make a modification to mu_MOVED
+ svntest.main.file_write(mu_MOVED_path, "This is 'mu' in mu_MOVED.\n")
+ expected_output = wc.State(wc_dir, {'A/mu_MOVED' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.remove('A/mu')
+ expected_status.add({
+ 'A/mu_MOVED' : Item(status=' ', wc_rev=5),
+ 'A/mu_COPY' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge A/mu_MOVED to A/mu_COPY - this happens in multiple passes
+ # because A/mu_MOVED has renames in its history between the
+ # boundaries of the requested merge range.
+ expected_output = expected_merge_output([[2,3],[4,5]],
+ ['U %s\n' % (mu_COPY_path),
+ ' U %s\n' % (mu_COPY_path),
+ 'G %s\n' % (mu_COPY_path),
+ ' G %s\n' % (mu_COPY_path),])
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-r', '1:5',
+ mu_MOVED_url,
+ mu_COPY_path)
+ svntest.actions.run_and_verify_svn(['/A/mu:2-3\n',
+ '/A/mu_MOVED:4-5\n'],
+ [], 'propget', SVN_PROP_MERGEINFO,
+ mu_COPY_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_with_auto_rev_range_detection(sbox):
+ "merge with auto detection of revision ranges"
+
+ ## See http://svn.haxx.se/dev/archive-2007-09/0735.shtml ##
+
+ # Create a WC
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_url = sbox.repo_url + '/A'
+ A_COPY_url = sbox.repo_url + '/A_COPY'
+ B1_path = sbox.ospath('A/B1')
+ B1_mu_path = sbox.ospath('A/B1/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # Create B1 inside A
+ svntest.actions.run_and_verify_svn(["A " + B1_path + "\n"],
+ [], 'mkdir',
+ B1_path)
+
+ # Add a file mu inside B1
+ svntest.main.file_write(B1_mu_path, "This is the file 'mu'.\n")
+ svntest.actions.run_and_verify_svn(["A " + B1_mu_path + "\n"],
+ [], 'add', B1_mu_path)
+
+ # Commit B1 and B1/mu
+ expected_output = wc.State(wc_dir, {
+ 'A/B1' : Item(verb='Adding'),
+ 'A/B1/mu' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B1' : Item(status=' ', wc_rev=2),
+ 'A/B1/mu' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Copy A to A_COPY
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'],
+ [], 'cp', '-m', 'cp A to A_COPY',
+ A_url, A_COPY_url)
+
+ # Make a modification to A/B1/mu
+ svntest.main.file_write(B1_mu_path, "This is the file 'mu' modified.\n")
+ expected_output = wc.State(wc_dir, {'A/B1/mu' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B1' : Item(status=' ', wc_rev=2),
+ 'A/B1/mu' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update the working copy to get A_COPY
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Merge /A to /A_COPY
+ expected_output = wc.State(A_COPY_path, {
+ 'B1/mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=4),
+ 'mu' : Item(status=' ', wc_rev=4),
+ 'C' : Item(status=' ', wc_rev=4),
+ 'D' : Item(status=' ', wc_rev=4),
+ 'B' : Item(status=' ', wc_rev=4),
+ 'B/lambda' : Item(status=' ', wc_rev=4),
+ 'B/E' : Item(status=' ', wc_rev=4),
+ 'B/E/alpha': Item(status=' ', wc_rev=4),
+ 'B/E/beta' : Item(status=' ', wc_rev=4),
+ 'B/F' : Item(status=' ', wc_rev=4),
+ 'B1' : Item(status=' ', wc_rev=4),
+ 'B1/mu' : Item(status='M ', wc_rev=4),
+ 'D/gamma' : Item(status=' ', wc_rev=4),
+ 'D/G' : Item(status=' ', wc_rev=4),
+ 'D/G/pi' : Item(status=' ', wc_rev=4),
+ 'D/G/rho' : Item(status=' ', wc_rev=4),
+ 'D/G/tau' : Item(status=' ', wc_rev=4),
+ 'D/H' : Item(status=' ', wc_rev=4),
+ 'D/H/chi' : Item(status=' ', wc_rev=4),
+ 'D/H/omega': Item(status=' ', wc_rev=4),
+ 'D/H/psi' : Item(status=' ', wc_rev=4),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-4'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha': Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'B1' : Item(),
+ 'B1/mu' : Item("This is the file 'mu' modified.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega': Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+# Test for issue 2818: Provide a 'merge' API which allows for merging of
+# arbitrary revision ranges (e.g. '-c 3,5,7')
+@Issue(2818)
+@SkipUnless(server_has_mergeinfo)
+def cherry_picking(sbox):
+ "command line supports cherry picked merge ranges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ H_path = sbox.ospath('A/D/H')
+ G_path = sbox.ospath('A/D/G')
+ A_COPY_path = sbox.ospath('A_COPY')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # Update working copy
+ expected_output = svntest.wc.State(wc_dir, {})
+ wc_status.tweak(wc_rev='6')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ wc_disk, wc_status,
+ check_props=True)
+
+ # Make some prop changes to some dirs.
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ G_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', G_path)
+ expected_output = svntest.wc.State(wc_dir, {'A/D/G': Item(verb='Sending'),})
+ wc_status.tweak('A/D/G', wc_rev=7)
+ wc_disk.tweak('A/D/G', props={'prop:name' : 'propval'})
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ H_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', H_path)
+ expected_output = svntest.wc.State(wc_dir, {'A/D/H': Item(verb='Sending'),})
+ wc_status.tweak('A/D/H', wc_rev=8)
+ wc_disk.tweak('A/D/H', props={'prop:name' : 'propval'})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Do multiple additive merges to a file"
+ # Merge -r2:4 -c6 into A_COPY/D/G/rho.
+ expected_skip = wc.State(rho_COPY_path, { })
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3,4],[6]],
+ ['U ' + rho_COPY_path + '\n',
+ ' U ' + rho_COPY_path + '\n',
+ ' G ' + rho_COPY_path + '\n',]),
+ [], 'merge', '-r2:4', '-c6',
+ sbox.repo_url + '/A/D/G/rho', rho_COPY_path)
+
+ # Check rho's status and props.
+ expected_status = wc.State(rho_COPY_path,
+ {'' : Item(status='MM', wc_rev=6)})
+ svntest.actions.run_and_verify_status(rho_COPY_path, expected_status)
+ svntest.actions.run_and_verify_svn(["/A/D/G/rho:3-4,6\n"], [],
+ 'propget', SVN_PROP_MERGEINFO,
+ rho_COPY_path)
+
+ #Do multiple additive merges to a directory:
+ # Merge -c6 -c8 into A_COPY/D/H
+ expected_output = expected_merge_output(
+ [[6],[8]],
+ ['U ' + omega_COPY_path + '\n',
+ ' U ' + H_COPY_path + '\n',
+ ' G ' + H_COPY_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-c6', '-c8',
+ sbox.repo_url + '/A/D/H',
+ H_COPY_path)
+
+ # Check A_COPY/D/H's status and props.
+ expected_status = wc.State(H_COPY_path,
+ {'' : Item(status=' M', wc_rev=6),
+ 'psi' : Item(status=' ', wc_rev=6),
+ 'chi' : Item(status=' ', wc_rev=6),
+ 'omega': Item(status='M ', wc_rev=6),})
+ svntest.actions.run_and_verify_status(H_COPY_path, expected_status)
+ svntest.actions.run_and_verify_svn([H_COPY_path + " - /A/D/H:6,8\n"],
+ [], 'propget', '-R', SVN_PROP_MERGEINFO,
+ H_COPY_path)
+
+ # Do multiple reverse merges to a directory:
+ # Merge -c-6 -c-3 into A_COPY
+ expected_output = expected_merge_output(
+ [[-3],[-6]],
+ ['G ' + omega_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' U ' + H_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',
+ ' G ' + H_COPY_path + '\n',],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-c-3', '-c-6',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ expected_status = wc.State(A_COPY_path,
+ {'' : Item(status=' ', wc_rev=6),
+ 'B' : Item(status=' ', wc_rev=6),
+ 'B/lambda' : Item(status=' ', wc_rev=6),
+ 'B/E' : Item(status=' ', wc_rev=6),
+ 'B/E/alpha' : Item(status=' ', wc_rev=6),
+ 'B/E/beta' : Item(status=' ', wc_rev=6),
+ 'B/F' : Item(status=' ', wc_rev=6),
+ 'mu' : Item(status=' ', wc_rev=6),
+ 'C' : Item(status=' ', wc_rev=6),
+ 'D' : Item(status=' ', wc_rev=6),
+ 'D/gamma' : Item(status=' ', wc_rev=6),
+ 'D/G' : Item(status=' ', wc_rev=6),
+ 'D/G/pi' : Item(status=' ', wc_rev=6),
+ 'D/G/rho' : Item(status='MM', wc_rev=6),
+ 'D/G/tau' : Item(status=' ', wc_rev=6),
+ 'D/H' : Item(status=' M', wc_rev=6),
+ 'D/H/chi' : Item(status=' ', wc_rev=6),
+ 'D/H/psi' : Item(status=' ', wc_rev=6),
+ 'D/H/omega' : Item(status=' ', wc_rev=6),})
+ svntest.actions.run_and_verify_status(A_COPY_path, expected_status)
+ # A_COPY/D/G/rho is untouched by the merge so its mergeinfo
+ # remains unchanged.
+ expected_out = H_COPY_path + " - /A/D/H:8\n|" + \
+ rho_COPY_path + " - /A/D/G/rho:3-4,6\n"
+ # Construct proper regex for '\' infested Windows paths.
+ if sys.platform == 'win32':
+ expected_out = expected_out.replace("\\", "\\\\")
+ svntest.actions.run_and_verify_svn(expected_out, [],
+ 'propget', '-R', SVN_PROP_MERGEINFO,
+ A_COPY_path)
+
+ # Do both additive and reverse merges to a directory:
+ # Merge -r2:3 -c-4 -r4:7 to A_COPY/D
+ expected_output = expected_merge_output(
+ [[3], [-4], [6,7], [5,7]],
+ [' U ' + G_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + D_COPY_path + '\n',
+ ' G ' + D_COPY_path + '\n',
+ ' U ' + H_COPY_path + '\n',
+ ' G ' + H_COPY_path + '\n',
+ 'G ' + rho_COPY_path + '\n',
+ ' U ' + rho_COPY_path + '\n',
+ ' G ' + rho_COPY_path + '\n'],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ '-r2:3', '-c-4', '-r4:7',
+ sbox.repo_url + '/A/D',
+ D_COPY_path)
+ expected_status = wc.State(D_COPY_path,
+ {'' : Item(status=' M', wc_rev=6),
+ 'gamma' : Item(status=' ', wc_rev=6),
+ 'G' : Item(status=' M', wc_rev=6),
+ 'G/pi' : Item(status=' ', wc_rev=6),
+ 'G/rho' : Item(status=' ', wc_rev=6),
+ 'G/tau' : Item(status=' ', wc_rev=6),
+ 'H' : Item(status=' M', wc_rev=6),
+ 'H/chi' : Item(status=' ', wc_rev=6),
+ 'H/psi' : Item(status='M ', wc_rev=6),
+ 'H/omega' : Item(status='M ', wc_rev=6),})
+ svntest.actions.run_and_verify_status(D_COPY_path, expected_status)
+ expected_out = D_COPY_path + " - /A/D:3,5-7\n|" + \
+ H_COPY_path + " - /A/D/H:3,5-8\n|" + \
+ rho_COPY_path + " - /A/D/G/rho:3-4,6\n"
+ # Construct proper regex for '\' infested Windows paths.
+ if sys.platform == 'win32':
+ expected_out = expected_out.replace("\\", "\\\\")
+ svntest.actions.run_and_verify_svn(expected_out, [],
+ 'propget', '-R', SVN_PROP_MERGEINFO,
+ D_COPY_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2969)
+def propchange_of_subdir_raises_conflict(sbox):
+ "merge of propchange on subdir raises conflict"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2969. ##
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ B_url = sbox.repo_url + '/A/B'
+ E_path = sbox.ospath('A/B/E')
+ lambda_path = sbox.ospath('A/B/lambda')
+ A_COPY_B_path = sbox.ospath('A_COPY/B')
+ A_COPY_B_E_path = sbox.ospath('A_COPY/B/E')
+ A_COPY_lambda_path = sbox.ospath('A_COPY/B/E/lambda')
+
+ # Set a property on A/B/E and Make a modification to A/B/lambda
+ svntest.main.run_svn(None, 'propset', 'x', 'x', E_path)
+
+ svntest.main.file_write(lambda_path, "This is the file 'lambda' modified.\n")
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/B/E' : Item(verb='Sending'),
+ })
+ wc_status.add({
+ 'A/B/lambda' : Item(status=' ', wc_rev=3),
+ 'A/B/E' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth files
+ expected_output = wc.State(A_COPY_B_path, {
+ 'lambda' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' U'),
+ 'lambda' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3*'}),
+ 'lambda' : Item(contents="This is the file 'lambda' modified.\n",
+ props={SVN_PROP_MERGEINFO : '/A/B/lambda:2-3'}),
+ 'F' : Item(),
+ 'E' : Item(),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = wc.State(A_COPY_B_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status='MM', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status=' ', wc_rev=2),
+ })
+ expected_skip = wc.State(A_COPY_B_path, {})
+
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, None, None,
+ B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--depth', 'files',
+ A_COPY_B_path)
+
+ # Merge /A/B to /A_COPY/B ie., r1 to r3 with infinite depth
+ expected_output = wc.State(A_COPY_B_path, {
+ 'E' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' G'),
+ 'E' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ 'E' : Item(status=' U'),
+ 'lambda' : Item(status=' U'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3'}),
+ 'lambda' : Item(contents="This is the file 'lambda' modified.\n"),
+ 'F' : Item(),
+ 'E' : Item(props={'x': 'x'}),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = wc.State(A_COPY_B_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status='M ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' M', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, None, None,
+ B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], 1, 1)
+
+#----------------------------------------------------------------------
+# Test for issue #2971: Reverse merge of prop add segfaults if
+# merging to parent of first merge
+@Issue(2971)
+@SkipUnless(server_has_mergeinfo)
+def reverse_merge_prop_add_on_child(sbox):
+ "reverse merge of prop add on child"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ G_path = sbox.ospath('A/D/G')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+
+ # Make some prop changes to some dirs.
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ G_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', G_path)
+ expected_output = svntest.wc.State(wc_dir, {'A/D/G': Item(verb='Sending'),})
+ wc_status.tweak('A/D/G', wc_rev=3)
+ wc_disk.tweak('A/D/G', props={'prop:name' : 'propval'})
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge -c3's prop add to A_COPY/D/G
+ expected_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U')
+ })
+ expected_mergeinfo_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(G_COPY_path, {
+ })
+ expected_status = wc.State(G_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'pi' : Item(status=' ', wc_rev=2),
+ 'rho' : Item(status=' ', wc_rev=2),
+ 'tau' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:3',
+ 'prop:name' : 'propval'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("This is the file 'rho'.\n"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(G_COPY_path, { })
+ svntest.actions.run_and_verify_merge(G_COPY_path, '2', '3',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Now merge -c-3 but target the previous target's parent instead.
+ expected_output = wc.State(D_COPY_path, {
+ 'G' : Item(status=' G'),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ 'G' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ 'G' : Item(status=' U'),
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' ', wc_rev=2),
+ 'G' : Item(status=' ', wc_rev=2),
+ 'G/pi' : Item(status=' ', wc_rev=2),
+ 'G/rho' : Item(status=' ', wc_rev=2),
+ 'G/tau' : Item(status=' ', wc_rev=2),
+ 'H' : Item(status=' ', wc_rev=2),
+ 'H/chi' : Item(status=' ', wc_rev=2),
+ 'H/psi' : Item(status=' ', wc_rev=2),
+ 'H/omega' : Item(status=' ', wc_rev=2),
+ 'gamma' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("This is the file 'rho'.\n"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, '3', '2',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@XFail()
+@Issues(2970,3642)
+def merge_target_with_non_inheritable_mergeinfo(sbox):
+ "merge target with non inheritable mergeinfo"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2970. ##
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ B_url = sbox.repo_url + '/A/B'
+ lambda_path = sbox.ospath('A/B/lambda')
+ newfile_path = sbox.ospath('A/B/E/newfile')
+ A_COPY_B_path = sbox.ospath('A_COPY/B')
+
+ # Make a modifications to A/B/lambda and add A/B/E/newfile
+ svntest.main.file_write(lambda_path, "This is the file 'lambda' modified.\n")
+ svntest.main.file_write(newfile_path, "This is the file 'newfile'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', newfile_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/B/E/newfile' : Item(verb='Adding'),
+ })
+ wc_status.add({
+ 'A/B/lambda' : Item(status=' ', wc_rev=3),
+ 'A/B/E/newfile' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth immediates
+ expected_output = wc.State(A_COPY_B_path, {
+ 'lambda' : Item(status='U '),
+ })
+ # Issue #3642 http://subversion.tigris.org/issues/show_bug.cgi?id=3642
+ #
+ # We don't expect A_COPY/B/F to have mergeinfo recorded on it because
+ # not only is it unaffected by the merge at depth immediates, it could
+ # never be affected by the merge, regardless of depth.
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' U'),
+ 'E' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3'}),
+ 'lambda' : Item(contents="This is the file 'lambda' modified.\n"),
+ 'F' : Item(), # No mergeinfo!
+ 'E' : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:2-3*'}),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = wc.State(A_COPY_B_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status='M ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' M', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status=' ', wc_rev=2),
+ })
+ expected_skip = wc.State(A_COPY_B_path, {})
+
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, None, None,
+ B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--depth', 'immediates',
+ A_COPY_B_path)
+
+ # Merge /A/B to /A_COPY/B ie., r1 to r3 with infinite depth
+ expected_output = wc.State(A_COPY_B_path, {
+ 'E/newfile' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3'}),
+ 'lambda' : Item(contents="This is the file 'lambda' modified.\n"),
+ 'F' : Item(),
+ 'E' : Item(),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'E/newfile' : Item(contents="This is the file 'newfile'.\n"),
+ })
+ expected_status = wc.State(A_COPY_B_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status='M ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status=' ', wc_rev=2),
+ 'E/beta' : Item(status=' ', wc_rev=2),
+ 'E/newfile' : Item(status='A ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, None, None,
+ B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def self_reverse_merge(sbox):
+ "revert a commit on a target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make changes to the working copy
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # update to HEAD so that the to-be-undone revision is found in the
+ # implicit mergeinfo (the natural history) of the target.
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_merge(wc_dir, '2', '1', sbox.repo_url,
+ None, expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # record dummy self mergeinfo to test the fact that self-reversal should work
+ # irrespective of mergeinfo.
+ svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO,
+ '/:1', wc_dir)
+
+ # Bad svntest.main.greek_state does not have '', so adding it explicitly.
+ expected_disk.add({'' : Item(props={SVN_PROP_MERGEINFO : '/:1'})})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('', status = ' M')
+ expected_status.tweak('A/mu', status = 'M ')
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ svntest.actions.run_and_verify_merge(wc_dir, '2', '1', sbox.repo_url,
+ None, expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def ignore_ancestry_and_mergeinfo(sbox):
+ "--ignore-ancestry also ignores mergeinfo"
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ A_B_url = sbox.repo_url + '/A/B'
+ A_COPY_B_path = sbox.ospath('A_COPY/B')
+ lambda_path = sbox.ospath('A/B/lambda')
+ A_COPY_lambda_path = sbox.ospath('A_COPY/B/lambda')
+
+ # Make modifications to A/B/lambda
+ svntest.main.file_write(lambda_path, "This is the file 'lambda' modified.\n")
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ })
+ wc_status.add({
+ 'A/B/lambda' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth immediates
+ expected_output = wc.State(A_COPY_B_path, {
+ 'lambda' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:2-3'}),
+ 'lambda' : Item(contents="This is the file 'lambda' modified.\n"),
+ 'F' : Item(props={}),
+ 'E' : Item(props={}),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = wc.State(A_COPY_B_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'lambda' : Item(status='M ', wc_rev=3),
+ 'F' : Item(status=' ', wc_rev=3),
+ 'E' : Item(status=' ', wc_rev=3),
+ 'E/alpha' : Item(status=' ', wc_rev=3),
+ 'E/beta' : Item(status=' ', wc_rev=3),
+ })
+ expected_skip = wc.State(A_COPY_B_path, {})
+
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, 1, 3,
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Now, revert lambda and repeat the merge. Nothing should happen.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R',
+ A_COPY_lambda_path)
+ expected_output.remove('lambda')
+ expected_disk.tweak('lambda', contents="This is the file 'lambda'.\n")
+ expected_status.tweak('lambda', status=' ')
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {
+ '' : Item(status=' G'),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, 1, 3,
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Now, try the merge again with --ignore-ancestry. We should get
+ # lambda re-modified. */
+ expected_output = wc.State(A_COPY_B_path, {
+ 'lambda' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_B_path, {})
+ expected_elision_output = wc.State(A_COPY_B_path, {
+ })
+ expected_disk.tweak('lambda',
+ contents="This is the file 'lambda' modified.\n")
+ expected_status.tweak('lambda', status='M ')
+ svntest.actions.run_and_verify_merge(A_COPY_B_path, 1, 3,
+ A_B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--ignore-ancestry', A_COPY_B_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3032)
+def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox):
+ "merge from renamed branch"
+ #Copy A/C to A/COPY_C results in r2.
+ #Rename A/COPY_C to A/RENAMED_C results in r3.
+ #Add A/RENAMED_C/file1 and commit, results in r4.
+ #Change A/RENAMED_C/file1 and commit, results in r5.
+ #Merge r4 from A/RENAMED_C to A/C
+ #Merge r2:5 from A/RENAMED_C to A/C <-- This fails tracked via #3032.
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3032. ##
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ # Some paths we'll care about
+ A_C_url = sbox.repo_url + '/A/C'
+ A_COPY_C_url = sbox.repo_url + '/A/COPY_C'
+ A_RENAMED_C_url = sbox.repo_url + '/A/RENAMED_C'
+ A_C_path = sbox.ospath('A/C')
+ A_RENAMED_C_path = sbox.ospath('A/RENAMED_C')
+ A_RENAMED_C_file1_path = sbox.ospath('A/RENAMED_C/file1')
+
+ svntest.main.run_svn(None, 'cp', A_C_url, A_COPY_C_url, '-m', 'copy...')
+ svntest.main.run_svn(None, 'mv', A_COPY_C_url, A_RENAMED_C_url, '-m',
+ 'rename...')
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ svntest.main.file_write(A_RENAMED_C_file1_path, "This is the file1.\n")
+ svntest.main.run_svn(None, 'add', A_RENAMED_C_file1_path)
+ expected_output = wc.State(A_RENAMED_C_path, {
+ 'file1' : Item(verb='Adding'),
+ })
+ expected_status = wc.State(A_RENAMED_C_path, {
+ '' : Item(status=' ', wc_rev=3),
+ 'file1' : Item(status=' ', wc_rev=4),
+ })
+ svntest.actions.run_and_verify_commit(A_RENAMED_C_path, expected_output,
+ expected_status)
+ svntest.main.file_write(A_RENAMED_C_file1_path,
+ "This is the file1 modified.\n")
+ expected_output = wc.State(A_RENAMED_C_path, {
+ 'file1' : Item(verb='Sending'),
+ })
+ expected_status.tweak('file1', wc_rev=5)
+ svntest.actions.run_and_verify_commit(A_RENAMED_C_path, expected_output,
+ expected_status)
+
+ expected_skip = wc.State(A_C_path, {})
+ expected_output = wc.State(A_C_path, {
+ 'file1' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/RENAMED_C:4'}),
+ 'file1' : Item("This is the file1.\n"),
+ })
+ expected_status = wc.State(A_C_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'file1' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ svntest.actions.run_and_verify_merge(A_C_path, 3, 4,
+ A_RENAMED_C_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ expected_output = wc.State(A_C_path, {
+ 'file1' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_C_path, {
+ '' : Item(status=' G'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/RENAMED_C:3-5'}),
+ 'file1' : Item("This is the file1 modified.\n"),
+ })
+ expected_status = wc.State(A_C_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'file1' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ svntest.actions.run_and_verify_merge(A_C_path, 2, 5,
+ A_RENAMED_C_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+# Test for part of issue #2877: 'do subtree merge only if subtree has
+# explicit mergeinfo set and exists in the merge source'
+@SkipUnless(server_has_mergeinfo)
+@Issue(2877)
+def merge_source_normalization_and_subtree_merges(sbox):
+ "normalized mergeinfo is recorded on subtrees"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+
+ # Use our helper to copy 'A' to 'A_COPY' then make some changes under 'A'
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # r7 - Move A to A_MOVED
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 7.\n'],
+ [], 'mv', '-m', 'mv A to A_MOVED',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A_MOVED')
+ wc_status.add({
+ 'A_MOVED/B' : Item(),
+ 'A_MOVED/B/lambda' : Item(),
+ 'A_MOVED/B/E' : Item(),
+ 'A_MOVED/B/E/alpha' : Item(),
+ 'A_MOVED/B/E/beta' : Item(),
+ 'A_MOVED/B/F' : Item(),
+ 'A_MOVED/mu' : Item(),
+ 'A_MOVED/C' : Item(),
+ 'A_MOVED/D' : Item(),
+ 'A_MOVED/D/gamma' : Item(),
+ 'A_MOVED/D/G' : Item(),
+ 'A_MOVED/D/G/pi' : Item(),
+ 'A_MOVED/D/G/rho' : Item(),
+ 'A_MOVED/D/G/tau' : Item(),
+ 'A_MOVED/D/H' : Item(),
+ 'A_MOVED/D/H/chi' : Item(),
+ 'A_MOVED/D/H/omega' : Item(),
+ 'A_MOVED/D/H/psi' : Item(),
+ 'A_MOVED' : Item()})
+ wc_status.remove('A', 'A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/mu', 'A/C', 'A/D',
+ 'A/D/gamma', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho',
+ 'A/D/G/tau' , 'A/D/H', 'A/D/H/chi', 'A/D/H/omega',
+ 'A/D/H/psi')
+ wc_status.tweak(status=' ', wc_rev=7)
+
+ # Update the WC
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ # r8 - Make a text mod to 'A_MOVED/D/G/tau'
+ svntest.main.file_write(sbox.ospath('A_MOVED/D/G/tau'),
+ "New content")
+ expected_output = wc.State(wc_dir,
+ {'A_MOVED/D/G/tau' : Item(verb='Sending')})
+ wc_status.tweak('A_MOVED/D/G/tau', status=' ', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge -c4 URL/A_MOVED/D/G A_COPY/D/G.
+ #
+ # A_MOVED/D/G doesn't exist at r3:4, it's still A/D/G,
+ # so the merge source normalization logic should set
+ # mergeinfo of '/A/D/G:4' on A_COPY/D/G, *not* 'A_MOVED/D/G:4',
+ # see issue #2953.
+ expected_output = wc.State(G_COPY_path, {
+ 'rho' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(G_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(G_COPY_path, {
+ })
+ expected_status = wc.State(G_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'pi' : Item(status=' ', wc_rev=7),
+ 'rho' : Item(status='M ', wc_rev=7),
+ 'tau' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/G:4'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("New content"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State(G_COPY_path, { })
+ svntest.actions.run_and_verify_merge(G_COPY_path, '3', '4',
+ sbox.repo_url + '/A_MOVED/D/G',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge -c8 URL/A_MOVED/D A_COPY/D.
+ #
+ # The merge target A_COPY/D and the subtree at A_COPY/D/G
+ # should both have their mergeinfo updated with r8
+ # from A_MOVED_D, see reopened issue #2877.
+ expected_output = wc.State(D_COPY_path, {
+ 'G/tau' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' U'),
+ 'G' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'G' : Item(status=' M', wc_rev=7),
+ 'G/pi' : Item(status=' ', wc_rev=7),
+ 'G/rho' : Item(status='M ', wc_rev=7),
+ 'G/tau' : Item(status='M ', wc_rev=7),
+ 'H' : Item(status=' ', wc_rev=7),
+ 'H/chi' : Item(status=' ', wc_rev=7),
+ 'H/psi' : Item(status=' ', wc_rev=7),
+ 'H/omega' : Item(status=' ', wc_rev=7),
+ 'gamma' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_MOVED/D:8'}),
+ 'G' : Item(props={SVN_PROP_MERGEINFO :
+ '/A/D/G:4\n/A_MOVED/D/G:8'}),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("New content"),
+ 'G/tau' : Item("New content"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, '7', '8',
+ sbox.repo_url + '/A_MOVED/D',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Tests for issue #3067: 'subtrees with intersecting mergeinfo, that don't
+# exist at the start of a merge range shouldn't break the merge'
+@SkipUnless(server_has_mergeinfo)
+@Issue(3067)
+def new_subtrees_should_not_break_merge(sbox):
+ "subtrees added after start of merge range are ok"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ nu_path = sbox.ospath('A/D/H/nu')
+ nu_COPY_path = sbox.ospath('A_COPY/D/H/nu')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+
+ # Create 'A/D/H/nu', commit it as r7, make a text mod to it in r8.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')})
+ wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=7)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ svntest.main.file_write(nu_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')})
+ wc_status.tweak('A/D/H/nu', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge r7 to A_COPY/D/H, then, so it has it's own explicit mergeinfo,
+ # then merge r8 to A_COPY/D/H/nu so it too has explicit mergeinfo.
+ expected_output = wc.State(H_COPY_path, {
+ 'nu' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'psi' : Item(status=' ', wc_rev=2),
+ 'omega' : Item(status=' ', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ 'nu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:7'}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'nu' : Item("This is the file 'nu'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, {})
+ svntest.actions.run_and_verify_merge(H_COPY_path, '6', '7',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[8]],
+ ['U ' + nu_COPY_path + '\n',
+ ' G ' + nu_COPY_path + '\n']),
+ [], 'merge', '-c8', '--allow-mixed-revisions',
+ sbox.repo_url + '/A/D/H/nu', nu_COPY_path)
+
+ # Merge -r4:6 to A_COPY, then reverse merge r6 from A_COPY/D.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status='M ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' M', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=2),
+ 'D/H/omega' : Item(status='M ', wc_rev=2),
+ 'D/H/nu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-7'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/nu' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/nu:7-8'}),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ expected_output = wc.State(D_COPY_path, {
+ 'H/omega': Item(status='G '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_path, {
+ '' : Item(status=' G'),
+ 'H' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(D_COPY_path, {
+ })
+ expected_status = wc.State(D_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'G' : Item(status=' ', wc_rev=2),
+ 'G/pi' : Item(status=' ', wc_rev=2),
+ 'G/rho' : Item(status=' ', wc_rev=2),
+ 'G/tau' : Item(status=' ', wc_rev=2),
+ 'gamma' : Item(status=' ', wc_rev=2),
+ 'H' : Item(status=' M', wc_rev=2),
+ 'H/chi' : Item(status=' ', wc_rev=2),
+ 'H/psi' : Item(status=' ', wc_rev=2),
+ 'H/omega' : Item(status=' ', wc_rev=2),
+ 'H/nu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5'}),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("This is the file 'rho'.\n"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n"),
+ 'H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5,7'}),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("This is the file 'omega'.\n"),
+ 'H/nu' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/nu:7-8'}),
+ })
+ expected_skip = wc.State(D_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_path, '6', '5',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ # Now once again merge r6 to A_COPY. A_COPY already has r6 in its mergeinfo
+ # so we expect only subtree merges on A_COPY/D, A_COPY_D_H, and
+ # A_COPY/D/H/nu. The fact that A/D/H/nu doesn't exist at r6 should not cause
+ # the merge to fail -- see
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=3067#desc7.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ 'D' : Item(status=' G'),
+ 'D/H' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'D' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status='M ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' M', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=2),
+ 'D/H/omega' : Item(status='M ', wc_rev=2),
+ 'D/H/nu' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_disk_1 = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5-6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(), # Mergeinfo elides to 'A_COPY'
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-7'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ 'D/H/nu' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/nu:7-8'}),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '5', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_1,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Commit this merge as r9.
+ #
+ # Update the wc first to make setting the expected status a bit easier.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=8)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/nu' : Item(verb='Adding'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/H',
+ 'A_COPY/D/H/omega',
+ wc_rev=9)
+ wc_status.add({'A_COPY/D/H/nu' : Item(status=' ', wc_rev=9)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ # Update the WC.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=9)
+
+ # Yet another test for issue #3067. Merge -rX:Y, where X>Y (reverse merge)
+ # and the merge target has a subtree that came into existence at some rev
+ # N where X < N < Y. This merge should simply delete the subtree.
+ #
+ # For this test merge -r9:2 to A_COPY. This should revert all the merges
+ # done thus far, leaving the tree rooted at A_COPY with no explicit
+ # mergeinfo.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/H/omega': Item(status='U '),
+ 'D/H/nu' : Item(status='D '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H': Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H': Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status='M ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' M', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status=' ', wc_rev=9),
+ 'D/H/omega' : Item(status='M ', wc_rev=9),
+ 'D/H/nu' : Item(status='D ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '9', '2',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Revert the previous merge, then merge r4 to A_COPY/D/G/rho. Commit
+ # this merge as r10.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]],
+ ['U ' + rho_COPY_path + '\n',
+ ' G ' + rho_COPY_path + '\n']),
+ [], 'merge', '-c4', sbox.repo_url + '/A/D/G/rho', rho_COPY_path)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),})
+ wc_status.tweak('A_COPY/D/G/rho', wc_rev=10)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=10)
+
+ # Yet another test for issue #3067. Merge -rX:Y, where X>Y (reverse merge)
+ # and the merge target has a subtree that doesn't exist in the merge source
+ # between X and Y. This merge should no effect on that subtree.
+ #
+ # Specifically, merge -c4 to A_COPY. This should revert the previous merge
+ # of r4 directly to A_COPY/D/G/rho. The subtree A_COPY/D/H/nu, whose merge
+ # source A/D/H/nu doesn't in r4:3, shouldn't be affected nor should it break
+ # the merge editor.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/G/rho': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/G/rho' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'D/G/rho' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' ', wc_rev=10),
+ 'B' : Item(status=' ', wc_rev=10),
+ 'mu' : Item(status=' ', wc_rev=10),
+ 'B/E' : Item(status=' ', wc_rev=10),
+ 'B/E/alpha' : Item(status=' ', wc_rev=10),
+ 'B/E/beta' : Item(status=' ', wc_rev=10),
+ 'B/lambda' : Item(status=' ', wc_rev=10),
+ 'B/F' : Item(status=' ', wc_rev=10),
+ 'C' : Item(status=' ', wc_rev=10),
+ 'D' : Item(status=' ', wc_rev=10),
+ 'D/G' : Item(status=' ', wc_rev=10),
+ 'D/G/pi' : Item(status=' ', wc_rev=10),
+ 'D/G/rho' : Item(status='MM', wc_rev=10),
+ 'D/G/tau' : Item(status=' ', wc_rev=10),
+ 'D/gamma' : Item(status=' ', wc_rev=10),
+ 'D/H' : Item(status=' ', wc_rev=10),
+ 'D/H/chi' : Item(status=' ', wc_rev=10),
+ 'D/H/psi' : Item(status=' ', wc_rev=10),
+ 'D/H/omega' : Item(status=' ', wc_rev=10),
+ 'D/H/nu' : Item(status=' ', wc_rev=10),
+ })
+ # Use expected_disk_1 from above since we should be
+ # returning to that state.
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk_1,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def dont_add_mergeinfo_from_own_history(sbox):
+ "cyclic merges don't add mergeinfo from own history"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A_MOVED_path = sbox.ospath('A_MOVED')
+ mu_path = sbox.ospath('A/mu')
+ mu_MOVED_path = sbox.ospath('A_MOVED/mu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+
+ # Merge r3 from 'A' to 'A_COPY', make a text mod to 'A_COPY/mu' and
+ # commit both as r7. This results in mergeinfo of '/A:3' on 'A_COPY'.
+ # Then merge r7 from 'A_COPY' to 'A'. This attempts to add the mergeinfo
+ # '/A:3' to 'A', but that is self-referrential and should be filtered out,
+ # leaving only the mergeinfo '/A_COPY:7' on 'A'.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_A_COPY_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status='M ', wc_rev=2),
+ 'D/H/omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_A_COPY_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_A_COPY_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_COPY_disk,
+ expected_A_COPY_status,
+ expected_A_COPY_skip,
+ check_props=True)
+
+ # Change 'A_COPY/mu'
+ svntest.main.file_write(mu_COPY_path, "New content")
+
+ # Commit r7
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/mu' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/mu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # Merge r7 back to the 'A'
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'B' : Item(status=' ', wc_rev=1),
+ 'mu' : Item(status='M ', wc_rev=1),
+ 'B/E' : Item(status=' ', wc_rev=1),
+ 'B/E/alpha' : Item(status=' ', wc_rev=1),
+ 'B/E/beta' : Item(status=' ', wc_rev=5),
+ 'B/lambda' : Item(status=' ', wc_rev=1),
+ 'B/F' : Item(status=' ', wc_rev=1),
+ 'C' : Item(status=' ', wc_rev=1),
+ 'D' : Item(status=' ', wc_rev=1),
+ 'D/G' : Item(status=' ', wc_rev=1),
+ 'D/G/pi' : Item(status=' ', wc_rev=1),
+ 'D/G/rho' : Item(status=' ', wc_rev=4),
+ 'D/G/tau' : Item(status=' ', wc_rev=1),
+ 'D/gamma' : Item(status=' ', wc_rev=1),
+ 'D/H' : Item(status=' ', wc_rev=1),
+ 'D/H/chi' : Item(status=' ', wc_rev=1),
+ 'D/H/psi' : Item(status=' ', wc_rev=3),
+ 'D/H/omega' : Item(status=' ', wc_rev=6),
+ })
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY:7'}),
+ 'B' : Item(),
+ 'mu' : Item("New content"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_A_skip = wc.State(A_path, {})
+ svntest.actions.run_and_verify_merge(A_path, '6', '7',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], True, False,
+ '--allow-mixed-revisions', A_path)
+
+ # Revert all local mods
+ svntest.actions.run_and_verify_svn(["Reverted '" + A_path + "'\n",
+ "Reverted '" + mu_path + "'\n"],
+ [], 'revert', '-R', wc_dir)
+
+ # Move 'A' to 'A_MOVED' and once again merge r7 from 'A_COPY', this time
+ # to 'A_MOVED'. This attempts to add the mergeinfo '/A:3' to
+ # 'A_MOVED', but 'A_MOVED@3' is 'A', so again this mergeinfo is filtered
+ # out, leaving the only the mergeinfo created from the merge itself:
+ # '/A_COPY:7'.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 8.\n'],
+ [], 'move',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A_MOVED',
+ '-m', 'Copy A to A_MOVED')
+ wc_status.remove('A', 'A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/mu', 'A/C', 'A/D', 'A/D/gamma', 'A/D/G',
+ 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/omega', 'A/D/H/psi')
+ wc_status.add({
+ 'A_MOVED' : Item(),
+ 'A_MOVED/B' : Item(),
+ 'A_MOVED/B/lambda' : Item(),
+ 'A_MOVED/B/E' : Item(),
+ 'A_MOVED/B/E/alpha' : Item(),
+ 'A_MOVED/B/E/beta' : Item(),
+ 'A_MOVED/B/F' : Item(),
+ 'A_MOVED/mu' : Item(),
+ 'A_MOVED/C' : Item(),
+ 'A_MOVED/D' : Item(),
+ 'A_MOVED/D/gamma' : Item(),
+ 'A_MOVED/D/G' : Item(),
+ 'A_MOVED/D/G/pi' : Item(),
+ 'A_MOVED/D/G/rho' : Item(),
+ 'A_MOVED/D/G/tau' : Item(),
+ 'A_MOVED/D/H' : Item(),
+ 'A_MOVED/D/H/chi' : Item(),
+ 'A_MOVED/D/H/omega' : Item(),
+ 'A_MOVED/D/H/psi' : Item(),
+ })
+ wc_status.tweak(wc_rev=8, status=' ')
+ wc_disk.remove('A', 'A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F', 'A/mu', 'A/C', 'A/D', 'A/D/gamma',
+ 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H',
+ 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi' )
+ wc_disk.add({
+ 'A_MOVED' : Item(),
+ 'A_MOVED/B' : Item(),
+ 'A_MOVED/B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'A_MOVED/B/E' : Item(),
+ 'A_MOVED/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'A_MOVED/B/E/beta' : Item("New content"),
+ 'A_MOVED/B/F' : Item(),
+ 'A_MOVED/mu' : Item("This is the file 'mu'.\n"),
+ 'A_MOVED/C' : Item(),
+ 'A_MOVED/D' : Item(),
+ 'A_MOVED/D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'A_MOVED/D/G' : Item(),
+ 'A_MOVED/D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'A_MOVED/D/G/rho' : Item("New content"),
+ 'A_MOVED/D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'A_MOVED/D/H' : Item(),
+ 'A_MOVED/D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'A_MOVED/D/H/omega' : Item("New content"),
+ 'A_MOVED/D/H/psi' : Item("New content"),
+ })
+ wc_disk.tweak('A_COPY/D/H/psi', 'A_COPY/mu', contents='New content')
+ wc_disk.tweak('A_COPY', props={SVN_PROP_MERGEINFO : '/A:3'})
+ expected_output = wc.State(wc_dir, {
+ 'A' : Item(status='D '),
+ 'A_MOVED' : Item(status='A '),
+ 'A_MOVED/B' : Item(status='A '),
+ 'A_MOVED/B/lambda' : Item(status='A '),
+ 'A_MOVED/B/E' : Item(status='A '),
+ 'A_MOVED/B/E/alpha' : Item(status='A '),
+ 'A_MOVED/B/E/beta' : Item(status='A '),
+ 'A_MOVED/B/F' : Item(status='A '),
+ 'A_MOVED/mu' : Item(status='A '),
+ 'A_MOVED/C' : Item(status='A '),
+ 'A_MOVED/D' : Item(status='A '),
+ 'A_MOVED/D/gamma' : Item(status='A '),
+ 'A_MOVED/D/G' : Item(status='A '),
+ 'A_MOVED/D/G/pi' : Item(status='A '),
+ 'A_MOVED/D/G/rho' : Item(status='A '),
+ 'A_MOVED/D/G/tau' : Item(status='A '),
+ 'A_MOVED/D/H' : Item(status='A '),
+ 'A_MOVED/D/H/chi' : Item(status='A '),
+ 'A_MOVED/D/H/omega' : Item(status='A '),
+ 'A_MOVED/D/H/psi' : Item(status='A ')
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ wc_disk,
+ wc_status,
+ check_props=True)
+
+ expected_output = wc.State(A_MOVED_path, {
+ 'mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_MOVED_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_MOVED_path, {
+ })
+ expected_A_status = wc.State(A_MOVED_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status='M ', wc_rev=8),
+ 'B/E' : Item(status=' ', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status=' ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' ', wc_rev=8),
+ 'D/G' : Item(status=' ', wc_rev=8),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status=' ', wc_rev=8),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D/H' : Item(status=' ', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/psi' : Item(status=' ', wc_rev=8),
+ 'D/H/omega' : Item(status=' ', wc_rev=8),
+ })
+ # We can reuse expected_A_disk from above without change.
+ svntest.actions.run_and_verify_merge(A_MOVED_path, '6', '7',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ check_props=True)
+
+ # Revert all local mods
+ svntest.actions.run_and_verify_svn(["Reverted '" + A_MOVED_path + "'\n",
+ "Reverted '" + mu_MOVED_path + "'\n"],
+ [], 'revert', '-R', wc_dir)
+
+ # Create a new 'A' unrelated to the old 'A' which was moved. Then merge
+ # r7 from 'A_COPY' to this new 'A'. Since the new 'A' shares no history
+ # with the mergeinfo 'A@3', the mergeinfo '/A:3' is added and when combined
+ # with the mergeinfo created from the merge should result in
+ # '/A:3\n/A_COPY:7'
+ #
+ # Create the new 'A' by exporting the old 'A@1'.
+ expected_output = svntest.verify.UnorderedOutput(
+ ["A " + sbox.ospath('A') + "\n",
+ "A " + sbox.ospath('A/B') + "\n",
+ "A " + sbox.ospath('A/B/lambda') + "\n",
+ "A " + sbox.ospath('A/B/E') + "\n",
+ "A " + sbox.ospath('A/B/E/alpha') + "\n",
+ "A " + sbox.ospath('A/B/E/beta') + "\n",
+ "A " + sbox.ospath('A/B/F') + "\n",
+ "A " + sbox.ospath('A/mu') + "\n",
+ "A " + sbox.ospath('A/C') + "\n",
+ "A " + sbox.ospath('A/D') + "\n",
+ "A " + sbox.ospath('A/D/gamma') + "\n",
+ "A " + sbox.ospath('A/D/G') + "\n",
+ "A " + sbox.ospath('A/D/G/pi') + "\n",
+ "A " + sbox.ospath('A/D/G/rho') + "\n",
+ "A " + sbox.ospath('A/D/G/tau') + "\n",
+ "A " + sbox.ospath('A/D/H') + "\n",
+ "A " + sbox.ospath('A/D/H/chi') + "\n",
+ "A " + sbox.ospath('A/D/H/omega') + "\n",
+ "A " + sbox.ospath('A/D/H/psi') + "\n",
+ "Exported revision 1.\n",]
+ )
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'export', sbox.repo_url + '/A@1',
+ A_path)
+ expected_output = svntest.verify.UnorderedOutput(
+ ["A " + sbox.ospath('A') + "\n",
+ "A " + sbox.ospath('A/B') + "\n",
+ "A " + sbox.ospath('A/B/lambda') + "\n",
+ "A " + sbox.ospath('A/B/E') + "\n",
+ "A " + sbox.ospath('A/B/E/alpha') + "\n",
+ "A " + sbox.ospath('A/B/E/beta') + "\n",
+ "A " + sbox.ospath('A/B/F') + "\n",
+ "A " + sbox.ospath('A/mu') + "\n",
+ "A " + sbox.ospath('A/C') + "\n",
+ "A " + sbox.ospath('A/D') + "\n",
+ "A " + sbox.ospath('A/D/gamma') + "\n",
+ "A " + sbox.ospath('A/D/G') + "\n",
+ "A " + sbox.ospath('A/D/G/pi') + "\n",
+ "A " + sbox.ospath('A/D/G/rho') + "\n",
+ "A " + sbox.ospath('A/D/G/tau') + "\n",
+ "A " + sbox.ospath('A/D/H') + "\n",
+ "A " + sbox.ospath('A/D/H/chi') + "\n",
+ "A " + sbox.ospath('A/D/H/omega') + "\n",
+ "A " + sbox.ospath('A/D/H/psi') + "\n",]
+ )
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'add', A_path)
+ # Commit the new 'A' as r9
+ expected_output = wc.State(wc_dir, {
+ 'A' : Item(verb='Adding'),
+ 'A/B' : Item(verb='Adding'),
+ 'A/mu' : Item(verb='Adding'),
+ 'A/B/E' : Item(verb='Adding'),
+ 'A/B/E/alpha' : Item(verb='Adding'),
+ 'A/B/E/beta' : Item(verb='Adding'),
+ 'A/B/lambda' : Item(verb='Adding'),
+ 'A/B/F' : Item(verb='Adding'),
+ 'A/C' : Item(verb='Adding'),
+ 'A/D' : Item(verb='Adding'),
+ 'A/D/G' : Item(verb='Adding'),
+ 'A/D/G/pi' : Item(verb='Adding'),
+ 'A/D/G/rho' : Item(verb='Adding'),
+ 'A/D/G/tau' : Item(verb='Adding'),
+ 'A/D/gamma' : Item(verb='Adding'),
+ 'A/D/H' : Item(verb='Adding'),
+ 'A/D/H/chi' : Item(verb='Adding'),
+ 'A/D/H/psi' : Item(verb='Adding'),
+ 'A/D/H/omega' : Item(verb='Adding'),
+ })
+ wc_status.tweak(wc_rev=8)
+ wc_status.add({
+ 'A' : Item(wc_rev=9),
+ 'A/B' : Item(wc_rev=9),
+ 'A/B/lambda' : Item(wc_rev=9),
+ 'A/B/E' : Item(wc_rev=9),
+ 'A/B/E/alpha' : Item(wc_rev=9),
+ 'A/B/E/beta' : Item(wc_rev=9),
+ 'A/B/F' : Item(wc_rev=9),
+ 'A/mu' : Item(wc_rev=9),
+ 'A/C' : Item(wc_rev=9),
+ 'A/D' : Item(wc_rev=9),
+ 'A/D/gamma' : Item(wc_rev=9),
+ 'A/D/G' : Item(wc_rev=9),
+ 'A/D/G/pi' : Item(wc_rev=9),
+ 'A/D/G/rho' : Item(wc_rev=9),
+ 'A/D/G/tau' : Item(wc_rev=9),
+ 'A/D/H' : Item(wc_rev=9),
+ 'A/D/H/chi' : Item(wc_rev=9),
+ 'A/D/H/omega' : Item(wc_rev=9),
+ 'A/D/H/psi' : Item(wc_rev=9),
+ })
+ wc_status.tweak(status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ expected_output = wc.State(A_path, {
+ 'mu' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ '' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status='M ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status='M ', wc_rev=9),
+ 'D/H/omega' : Item(status=' ', wc_rev=9),
+ })
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3\n/A_COPY:7'}),
+ 'B' : Item(),
+ 'mu' : Item("New content"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_A_skip = wc.State(A_path, {})
+ svntest.actions.run_and_verify_merge(A_path, '6', '7',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3094)
+def merge_range_predates_history(sbox):
+ "merge range predates history"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ trunk_file_path = sbox.ospath('trunk/file')
+ trunk_url = sbox.repo_url + "/trunk"
+ branches_url = sbox.repo_url + "/branches"
+ branch_path = sbox.ospath('branches/branch')
+ branch_file_path = sbox.ospath('branches/branch/file')
+ branch_url = sbox.repo_url + "/branches/branch"
+
+ # Tweak a file and commit. (r2)
+ svntest.main.file_append(iota_path, "More data.\n")
+ sbox.simple_commit(message='tweak iota')
+
+ # Create our trunk and branches directory, and update working copy. (r3)
+ svntest.main.run_svn(None, 'mkdir', trunk_url, branches_url,
+ '-m', 'add trunk and branches dirs')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Add a file to the trunk and commit. (r4)
+ svntest.main.file_append(trunk_file_path, "This is the file 'file'.\n")
+ svntest.main.run_svn(None, 'add', trunk_file_path)
+ sbox.simple_commit(message='add trunk file')
+
+ # Branch trunk from r3, and update working copy. (r5)
+ svntest.main.run_svn(None, 'cp', trunk_url, branch_url, '-r3',
+ '-m', 'branch trunk@2')
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Now, try to merge trunk into the branch. There should be one
+ # outstanding change -- the addition of the file.
+ expected_output = expected_merge_output([[4,5]],
+ ['A ' + branch_file_path + '\n',
+ ' U ' + branch_path + '\n'])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ trunk_url, branch_path)
+
+#----------------------------------------------------------------------
+@Issue(3623)
+def foreign_repos(sbox):
+ "merge from a foreign repository"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a copy of this repository and associated working copy. Both
+ # should have nothing but a Greek tree in them, and the two
+ # repository UUIDs should differ.
+ sbox2 = sbox.clone_dependent(True)
+ sbox2.build()
+ wc_dir2 = sbox2.wc_dir
+
+ # Convenience variables for working copy paths.
+ Z_path = sbox.ospath('A/D/G/Z')
+ B_path = sbox.ospath('A/B')
+ Q_path = sbox.ospath('Q')
+ H_path = sbox.ospath('A/D/H')
+ iota_path = sbox.ospath('iota')
+ beta_path = sbox.ospath('A/B/E/beta')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ zeta_path = sbox.ospath('A/D/G/Z/zeta')
+ fred_path = sbox.ospath('A/C/fred')
+
+ # Add new directories, with and without properties.
+ svntest.main.run_svn(None, 'mkdir', Q_path, Z_path)
+ svntest.main.run_svn(None, 'pset', 'foo', 'bar', Z_path)
+
+ # Add new files, with contents, with and without properties.
+ zeta_contents = "This is the file 'zeta'.\n"
+ fred_contents = "This is the file 'fred'.\n"
+ svntest.main.file_append(zeta_path, zeta_contents)
+ svntest.main.file_append(fred_path, fred_contents)
+ svntest.main.run_svn(None, 'add', zeta_path, fred_path)
+ svntest.main.run_svn(None, 'pset', 'foo', 'bar', fred_path)
+
+ # Modify existing files and directories.
+ added_contents = "This is another line of text.\n"
+ svntest.main.file_append(iota_path, added_contents)
+ svntest.main.file_append(beta_path, added_contents)
+ svntest.main.run_svn(None, 'pset', 'foo', 'bar', iota_path, B_path)
+
+ # Delete some stuff
+ svntest.main.run_svn(None, 'delete', alpha_path, H_path)
+
+ # Commit up these changes.
+ expected_output = wc.State(wc_dir, {
+ 'Q' : Item(verb='Adding'),
+ 'A/D/G/Z' : Item(verb='Adding'),
+ 'A/D/G/Z/zeta' : Item(verb='Adding'),
+ 'A/C/fred' : Item(verb='Adding'),
+ 'iota' : Item(verb='Sending'),
+ 'A/B' : Item(verb='Sending'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ 'A/D/H' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'Q' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2),
+ 'A/C/fred' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.tweak('iota', 'A/B/E/beta', 'A/B', wc_rev=2)
+ expected_status.remove('A/B/E/alpha', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'Q' : Item(),
+ 'A/D/G/Z' : Item(props={'foo':'bar'}),
+ 'A/D/G/Z/zeta' : Item(contents=zeta_contents),
+ 'A/C/fred' : Item(contents=fred_contents,props={'foo':'bar'}),
+ })
+ expected_disk.remove('A/B/E/alpha', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega')
+ expected_disk.tweak('iota',
+ contents=expected_disk.desc['iota'].contents
+ + added_contents,
+ props={'foo':'bar'})
+ expected_disk.tweak('A/B', props={'foo':'bar'})
+ expected_disk.tweak('A/B/E/beta',
+ contents=expected_disk.desc['A/B/E/beta'].contents
+ + added_contents)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Now, merge our committed revision into a working copy of another
+ # repository. Not only should the merge succeed, but the results on
+ # disk should match those in our first working copy.
+
+ ### TODO: Use run_and_verify_merge() ###
+ svntest.main.run_svn(None, 'merge', '-c2', sbox.repo_url, wc_dir2)
+ sbox2.simple_commit(message='Merge from foreign repo')
+ svntest.actions.verify_disk(wc_dir2, expected_disk, True)
+
+ # Now, let's make a third checkout -- our second from the original
+ # repository -- and make sure that all the data there is correct.
+ # It should look just like the original EXPECTED_DISK.
+ # This is a regression test for issue #3623 in which wc_dir2 had the
+ # correct state but the committed state was wrong.
+ wc_dir3 = sbox.add_wc_path('wc3')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox2.repo_url, wc_dir3)
+ svntest.actions.verify_disk(wc_dir3, expected_disk, True)
+
+#----------------------------------------------------------------------
+def foreign_repos_uuid(sbox):
+ "verify uuid of items added via foreign repo merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_uuid = svntest.actions.get_wc_uuid(wc_dir)
+
+ # Make a copy of this repository and associated working copy. Both
+ # should have nothing but a Greek tree in them, and the two
+ # repository UUIDs should differ.
+ sbox2 = sbox.clone_dependent(True)
+ sbox2.build()
+ wc_dir2 = sbox2.wc_dir
+ wc2_uuid = svntest.actions.get_wc_uuid(wc_dir2)
+
+ # Convenience variables for working copy paths.
+ zeta_path = sbox.ospath('A/D/G/zeta')
+ Z_path = sbox.ospath('A/Z')
+
+ # Add new file and directory.
+ zeta_contents = "This is the file 'zeta'.\n"
+ svntest.main.file_append(zeta_path, zeta_contents)
+ os.mkdir(Z_path)
+ svntest.main.run_svn(None, 'add', zeta_path, Z_path)
+
+ # Commit up these changes.
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/zeta' : Item(verb='Adding'),
+ 'A/Z' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/G/zeta' : Item(status=' ', wc_rev=2),
+ 'A/Z' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/D/G/zeta' : Item(contents=zeta_contents),
+ 'A/Z' : Item(),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ svntest.main.run_svn(None, 'merge', '-c2', sbox.repo_url, wc_dir2)
+ sbox2.simple_commit(message='Merge from foreign repos')
+
+ # Run info to check the copied rev to make sure it's right
+ zeta2_path = os.path.join(wc_dir2, 'A', 'D', 'G', 'zeta')
+ expected_info = {"Path" : re.escape(zeta2_path), # escape backslashes
+ "URL" : sbox2.repo_url + "/A/D/G/zeta",
+ "Repository Root" : sbox2.repo_url,
+ "Repository UUID" : wc2_uuid,
+ "Revision" : "2",
+ "Node Kind" : "file",
+ "Schedule" : "normal",
+ }
+ svntest.actions.run_and_verify_info([expected_info], zeta2_path)
+
+ # Run info to check the copied rev to make sure it's right
+ Z2_path = os.path.join(wc_dir2, 'A', 'Z')
+ expected_info = {"Path" : re.escape(Z2_path), # escape backslashes
+ "URL" : sbox2.repo_url + "/A/Z",
+ "Repository Root" : sbox2.repo_url,
+ "Repository UUID" : wc2_uuid,
+ "Revision" : "2",
+ "Node Kind" : "directory",
+ "Schedule" : "normal",
+ }
+ svntest.actions.run_and_verify_info([expected_info], Z2_path)
+
+#----------------------------------------------------------------------
+def foreign_repos_2_url(sbox):
+ "2-url merge from a foreign repository"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a copy of this repository and associated working copy. Both
+ # should have nothing but a Greek tree in them, and the two
+ # repository UUIDs should differ.
+ sbox2 = sbox.clone_dependent(True)
+ sbox2.build()
+ wc_dir2 = sbox2.wc_dir
+
+ # Convenience variables for working copy paths.
+ Z_path = sbox.ospath('A/D/G/Z')
+ Q_path = sbox.ospath('A/Q')
+ H_path = sbox.ospath('A/D/H')
+ beta_path = sbox.ospath('A/B/E/beta')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ zeta_path = sbox.ospath('A/D/G/Z/zeta')
+ fred_path = sbox.ospath('A/C/fred')
+
+ # First, "tag" the current state of the repository.
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/A-tag1', '-m', 'tag1')
+
+ # Add new directories
+ svntest.main.run_svn(None, 'mkdir', Q_path, Z_path)
+
+ # Add new files
+ zeta_contents = "This is the file 'zeta'.\n"
+ fred_contents = "This is the file 'fred'.\n"
+ svntest.main.file_append(zeta_path, zeta_contents)
+ svntest.main.file_append(fred_path, fred_contents)
+ svntest.main.run_svn(None, 'add', zeta_path, fred_path)
+
+ # Modify existing files
+ added_contents = "This is another line of text.\n"
+ svntest.main.file_append(beta_path, added_contents)
+
+ # Delete some stuff
+ svntest.main.run_svn(None, 'delete', alpha_path, H_path)
+
+ # Commit up these changes.
+ expected_output = wc.State(wc_dir, {
+ 'A/Q' : Item(verb='Adding'),
+ 'A/D/G/Z' : Item(verb='Adding'),
+ 'A/D/G/Z/zeta' : Item(verb='Adding'),
+ 'A/C/fred' : Item(verb='Adding'),
+ 'A/B/E/beta' : Item(verb='Sending'),
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ 'A/D/H' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/Q' : Item(status=' ', wc_rev=3),
+ 'A/D/G/Z' : Item(status=' ', wc_rev=3),
+ 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=3),
+ 'A/C/fred' : Item(status=' ', wc_rev=3),
+ })
+ expected_status.tweak('A/B/E/beta', wc_rev=3)
+ expected_status.remove('A/B/E/alpha', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/Q' : Item(),
+ 'A/D/G/Z' : Item(),
+ 'A/D/G/Z/zeta' : Item(contents=zeta_contents),
+ 'A/C/fred' : Item(contents=fred_contents),
+ })
+ expected_disk.remove('A/B/E/alpha', 'A/D/H', 'A/D/H/chi',
+ 'A/D/H/psi', 'A/D/H/omega')
+ expected_disk.tweak('A/B/E/beta',
+ contents=expected_disk.desc['A/B/E/beta'].contents
+ + added_contents)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Now, "tag" the new state of the repository.
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/A-tag2', '-m', 'tag2')
+
+ # Now, merge across our "tags" (copies of /A) into the /A of a
+ # working copy of another repository. Not only should the merge
+ # succeed, but the results on disk should match those in our first
+ # working copy.
+
+ ### TODO: Use run_and_verify_merge() ###
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A-tag1',
+ sbox.repo_url + '/A-tag2',
+ os.path.join(wc_dir2, 'A'))
+ sbox2.simple_commit(message='Merge from foreign repos')
+ svntest.actions.verify_disk(wc_dir2, expected_disk, True)
+
+#----------------------------------------------------------------------
+@Issue(1962)
+def merge_added_subtree(sbox):
+ "merge added subtree"
+
+ # The result of a subtree added by copying
+ # or merging an added subtree, should be the same on disk
+ ### with the exception of mergeinfo?!
+
+ # test for issue 1962
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ # make a branch of A
+ # svn cp A A_COPY
+ A_url = url + "/A"
+ A_COPY_url = url + "/A_COPY"
+ A_path = sbox.ospath('A')
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ "cp", "-m", "", A_url, A_COPY_url)
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 3.\n"], [],
+ "cp", "-m", "",
+ A_COPY_url + '/D',
+ A_COPY_url + '/D2')
+ expected_output = wc.State(A_path, {
+ 'D2' : Item(status='A '),
+ 'D2/gamma' : Item(status='A '),
+ 'D2/H' : Item(status='A '),
+ 'D2/H/chi' : Item(status='A '),
+ 'D2/H/psi' : Item(status='A '),
+ 'D2/H/omega': Item(status='A '),
+ 'D2/G' : Item(status='A '),
+ 'D2/G/pi' : Item(status='A '),
+ 'D2/G/rho' : Item(status='A '),
+ 'D2/G/tau' : Item(status='A ')
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D2/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/H/omega': Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/G' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D2/G/tau' : Item(status=' ', copied='+', wc_rev='-')
+ })
+ expected_status.remove('', 'iota')
+
+ expected_skip = wc.State('', {})
+ expected_disk = svntest.main.greek_state.subtree("A")
+ dest_name = ''
+ expected_disk.add({
+ dest_name + 'D2' : Item(),
+ dest_name + 'D2/gamma' : Item("This is the file 'gamma'.\n"),
+ dest_name + 'D2/G' : Item(),
+ dest_name + 'D2/G/pi' : Item("This is the file 'pi'.\n"),
+ dest_name + 'D2/G/rho' : Item("This is the file 'rho'.\n"),
+ dest_name + 'D2/G/tau' : Item("This is the file 'tau'.\n"),
+ dest_name + 'D2/H' : Item(),
+ dest_name + 'D2/H/chi' : Item("This is the file 'chi'.\n"),
+ dest_name + 'D2/H/omega' : Item("This is the file 'omega'.\n"),
+ dest_name + 'D2/H/psi' : Item("This is the file 'psi'.\n")
+ })
+
+ # Using the above information, verify a REPO->WC copy
+ svntest.actions.run_and_verify_svn(None, [],
+ "cp", A_COPY_url + '/D2',
+ os.path.join(A_path, "D2"))
+ svntest.actions.verify_disk(A_path, expected_disk)
+ svntest.actions.run_and_verify_status(A_path, expected_status)
+
+ # Remove the copy artifacts
+ svntest.actions.run_and_verify_svn(None, [],
+ "revert", "-R", A_path)
+ svntest.main.safe_rmtree(os.path.join(A_path, "D2"))
+
+ # Add merge-tracking differences between copying and merging
+ # Verify a merge using the otherwise unchanged disk and status trees
+ expected_status.tweak('A',status=' M')
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {
+ })
+ svntest.actions.run_and_verify_merge(A_path, 2, 3, A_COPY_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip)
+
+#----------------------------------------------------------------------
+# Issue #3138
+@SkipUnless(server_has_mergeinfo)
+@Issue(3138)
+def merge_unknown_url(sbox):
+ "merging an unknown url should return error"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # remove a path from the repo and commit.
+ iota_path = sbox.ospath('iota')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", wc_dir, "-m", "log message")
+
+
+ url = sbox.repo_url + "/iota"
+ expected_err = ".*File not found.*iota.*|.*iota.*path not found.*"
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ "merge", url, wc_dir)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def reverse_merge_away_all_mergeinfo(sbox):
+ "merges that remove all mergeinfo work"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_H_path = sbox.ospath('A_COPY/D/H')
+
+ # Merge r4:8 from A/D/H into A_COPY/D/H.
+ expected_output = wc.State(A_COPY_H_path, {
+ 'omega' : Item(status='U '),
+ 'psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_H_path, {
+ })
+ expected_status = wc.State(A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'psi' : Item(status='M ', wc_rev=2),
+ 'omega' : Item(status='M ', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:3-6'}),
+ 'psi' : Item("New content"),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_H_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_H_path, '2', '6',
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Commit the merge as r7
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY/D/H', 'A_COPY/D/H/omega', 'A_COPY/D/H/psi',
+ wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # Now reverse merge r7 from itself, all mergeinfo should be removed.
+ expected_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ 'omega' : Item(status='U '),
+ 'psi' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_H_path, {
+ '' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_H_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'psi' : Item(status='M ', wc_rev=7),
+ 'omega' : Item(status='M ', wc_rev=7),
+ 'chi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_H_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_H_path, '7', '6',
+ sbox.repo_url + '/A_COPY/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [],
+ True, False, '--allow-mixed-revisions',
+ A_COPY_H_path)
+
+#----------------------------------------------------------------------
+# Issue #3138
+# Another test for issue #3067: 'subtrees with intersecting mergeinfo,
+# that don't exist at the start of a merge range shouldn't break the
+# merge'. Specifically see
+# http://subversion.tigris.org/issues/show_bug.cgi?id=3067#desc5
+@SkipUnless(server_has_mergeinfo)
+@Issues(3138,3067,4217)
+def dont_merge_revs_into_subtree_that_predate_it(sbox):
+ "dont merge revs into a subtree that predate it"
+
+ # +-> merge -c7 A/D/H/nu@7 H_COPY/nu
+ # | +-> merge -c2 A/D/H H_COPY
+ # | | +-> merge A/D/H H_COPY
+ # | | |
+ # A/D/H A----------------------
+ # +-psi +-M-------------M------
+ # +-nu A-D C---M-D
+ # H_COPY C---------G-G
+ # +-psi +---------+-.
+ # +-nu +-------G---.
+ # 1 2 3 4 5 6 7 8 9 w w w
+
+ # Create our good 'ole greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ psi_path = sbox.ospath('A/D/H/psi')
+ nu_path = sbox.ospath('A/D/H/nu')
+ H_COPY_path = sbox.ospath('H_COPY')
+ nu_COPY_path = sbox.ospath('H_COPY/nu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Make a text mod to 'A/D/H/psi' and commit it as r2
+ svntest.main.file_write(psi_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/psi', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A/D/H/psi', contents="New content")
+
+ # Create 'A/D/H/nu' and commit it as r3.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')})
+ expected_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=3)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Delete 'A/D/H/nu' and commit it as r4.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Deleting')})
+ expected_status.remove('A/D/H/nu')
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Copy 'A/D/H/nu' from r3 and commit it as r5.
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.repo_url + '/A/D/H/nu@3', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')})
+ expected_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=5)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Copy 'A/D/H' to 'H_COPY' in r6.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 6.\n'],
+ [], 'copy',
+ sbox.repo_url + "/A/D/H",
+ sbox.repo_url + "/H_COPY",
+ "-m", "Copy A/D/H to H_COPY")
+ expected_status.add({
+ "H_COPY" : Item(),
+ "H_COPY/chi" : Item(),
+ "H_COPY/omega" : Item(),
+ "H_COPY/psi" : Item(),
+ "H_COPY/nu" : Item()})
+
+ # Update to pull the previous copy into the WC
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_status.tweak(status=' ', wc_rev=6)
+
+ # Make a text mod to 'A/D/H/nu' and commit it as r7.
+ svntest.main.file_write(nu_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/nu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Remove A/D/H/nu and commit it as r8.
+ # We do this deletion so that following cherry harvest has a *tough*
+ # time to identify the line of history of /A/D/H/nu@HEAD.
+ svntest.main.run_svn(None, 'rm', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Deleting')})
+ expected_status.remove('A/D/H/nu')
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make another text mod to 'A/D/H/psi' that can be merged to 'H_COPY'
+ # during a cherry harvest and commit it as r9.
+ svntest.main.file_write(psi_path, "Even *newer* content")
+ expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/psi', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A/D/H/psi', contents="Even *newer* content")
+
+ # Update WC so elision occurs smoothly.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_status.tweak(status=' ', wc_rev=9)
+
+ # Merge r7 from 'A/D/H/nu' to 'H_COPY/nu'.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[7]],
+ ['U ' + nu_COPY_path + '\n',
+ ' U ' + nu_COPY_path + '\n']),
+ [], 'merge', '-c7', sbox.repo_url + '/A/D/H/nu@7', nu_COPY_path)
+
+ # Cherry harvest all eligible revisions from 'A/D/H' to 'H_COPY'.
+ #
+ # This is where we see the problem described in
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=3067#desc5.
+ #
+ # Use run_and_verify_svn() because run_and_verify_merge*() require
+ # explicit revision ranges.
+
+ expected_skip = wc.State(H_COPY_path, { })
+ #Cherry pick r2 prior to cherry harvest.
+ svntest.actions.run_and_verify_svn([], [], 'merge', '-c2',
+ sbox.repo_url + '/A/D/H',
+ H_COPY_path)
+
+ # H_COPY needs r6-9 applied while H_COPY/nu needs only 6,8-9.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output(
+ [[7,9], # Merge notification
+ [6,9]], # Mergeinfo notification
+ ['U ' + os.path.join(H_COPY_path, "psi") + '\n',
+ 'D ' + os.path.join(H_COPY_path, "nu") + '\n',
+ ' U ' + H_COPY_path + '\n',]),
+ [], 'merge', sbox.repo_url + '/A/D/H', H_COPY_path, '--force')
+
+ # Check the status after the merge.
+ expected_status.tweak('H_COPY', status=' M')
+ expected_status.tweak('H_COPY/psi', status='M ')
+ expected_status.tweak('H_COPY/nu', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ check_mergeinfo_recursively(wc_dir,
+ { H_COPY_path: '/A/D/H:6-9' })
+
+#----------------------------------------------------------------------
+# Helper for merge_chokes_on_renamed_subtrees and
+# subtrees_with_empty_mergeinfo.
+def set_up_renamed_subtree(sbox):
+ '''Starting with standard greek tree, make a text mod to A/D/H/psi
+ as r2. Tweak A/D/H/omega and commit it at r3(We do this to create
+ broken segment of history of A/D/H.
+ *DO NOT SVN UPDATE*.
+ Move A/D/H/psi to A/D/H/psi_moved as r4. Copy A/D/H to H_COPY
+ as r5. Make a text mod to A/D/H/psi_moved and commit it at r6.
+ Update the working copy and return the expected disk and status
+ representing it'''
+
+ # Create our good 'ole greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ psi_path = sbox.ospath('A/D/H/psi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ psi_moved_path = sbox.ospath('A/D/H/psi_moved')
+ psi_COPY_moved_path = sbox.ospath('H_COPY/psi_moved')
+ H_COPY_path = sbox.ospath('H_COPY')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Make a text mod to 'A/D/H/psi' and commit it as r2
+ svntest.main.file_write(psi_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/psi', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A/D/H/psi', contents="New content")
+
+ # Make a text mod to 'A/D/H/omega' and commit it as r3
+ svntest.main.file_write(omega_path, "New omega")
+ expected_output = wc.State(wc_dir, {'A/D/H/omega' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/omega', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.tweak('A/D/H/omega', contents="New omega")
+
+ # Move 'A/D/H/psi' to 'A/D/H/psi_moved' and commit it as r4.
+ svntest.actions.run_and_verify_svn(None, [], 'move',
+ psi_path, psi_moved_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/D/H/psi' : Item(verb='Deleting'),
+ 'A/D/H/psi_moved' : Item(verb='Adding')
+ })
+ expected_status.add({'A/D/H/psi_moved' : Item(status=' ', wc_rev=4)})
+ expected_status.remove('A/D/H/psi')
+
+ # Replicate old WC-to-WC move behavior where empty mergeinfo was set on
+ # the move destination. Pre 1.6 repositories might have mergeinfo like
+ # this so we still want to test that the issue #3067 fixes tested by
+ # merge_chokes_on_renamed_subtrees and subtrees_with_empty_mergeinfo
+ # still work.
+ svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO,
+ "", psi_moved_path)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Copy 'A/D/H' to 'H_COPY' in r5.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 5.\n'],
+ [], 'copy',
+ sbox.repo_url + "/A/D/H",
+ sbox.repo_url + "/H_COPY",
+ "-m", "Copy A/D/H to H_COPY")
+ expected_status.add({
+ "H_COPY" : Item(),
+ "H_COPY/chi" : Item(),
+ "H_COPY/omega" : Item(),
+ "H_COPY/psi_moved" : Item()})
+
+ # Update to pull the previous copy into the WC
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_status.tweak(status=' ', wc_rev=5)
+
+ # Make a text mod to 'A/D/H/psi_moved' and commit it as r6
+ svntest.main.file_write(psi_moved_path, "Even *Newer* content")
+ expected_output = wc.State(wc_dir,
+ {'A/D/H/psi_moved' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/psi_moved', wc_rev=6)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_disk.remove('A/D/H/psi')
+ expected_disk.add({
+ 'A/D/H/psi_moved' : Item("Even *Newer* content"),
+ })
+
+ # Update for a uniform working copy before merging.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_status.tweak(status=' ', wc_rev=6)
+
+ return wc_dir, expected_disk, expected_status
+
+#----------------------------------------------------------------------
+# Test for issue #3174: 'Merge algorithm chokes on subtrees needing
+# special attention that have been renamed'
+@SkipUnless(server_has_mergeinfo)
+@Issue(3174)
+def merge_chokes_on_renamed_subtrees(sbox):
+ "merge fails with renamed subtrees with mergeinfo"
+
+ # Use helper to setup a renamed subtree.
+ wc_dir, expected_disk, expected_status = set_up_renamed_subtree(sbox)
+
+ # Some paths we'll care about
+ psi_COPY_moved_path = sbox.ospath('H_COPY/psi_moved')
+
+
+ # Cherry harvest all available revsions from 'A/D/H/psi_moved' to
+ # 'H_COPY/psi_moved'.
+ #
+ # Here is where issue #3174 appears, the merge fails with:
+ # svn: svn: File not found: revision 3, path '/A/D/H/psi'
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5,6],[3,6]],
+ ['U ' + psi_COPY_moved_path + '\n',
+ ' U ' + psi_COPY_moved_path + '\n',
+ ' G ' + psi_COPY_moved_path + '\n',],
+ elides=True),
+ [], 'merge', sbox.repo_url + '/A/D/H/psi_moved',
+ psi_COPY_moved_path)
+
+ expected_status.tweak('H_COPY/psi_moved', status='MM')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Issue #3157
+@SkipUnless(server_has_mergeinfo)
+@Issue(3157)
+def dont_explicitly_record_implicit_mergeinfo(sbox):
+ "don't explicitly record implicit mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_copy_path = sbox.ospath('A_copy')
+ A_copy2_path = sbox.ospath('A_copy2')
+ A_copy_mu_path = sbox.ospath('A_copy/mu')
+ A_copy2_mu_path = sbox.ospath('A_copy2/mu')
+ nu_path = sbox.ospath('A/D/H/nu')
+ nu_copy_path = sbox.ospath('A_copy/D/H/nu')
+
+ def _commit_and_update(rev, action):
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'r%d - %s' % (rev, action),
+ sbox.wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r2 - copy A to A_copy
+ svntest.main.run_svn(None, 'cp', A_path, A_copy_path)
+ _commit_and_update(2, "Copy A to A_copy.")
+
+ # r3 - tweak A_copy/mu
+ svntest.main.file_append(A_copy_mu_path, "r3\n")
+ _commit_and_update(3, "Edit A_copy/mu.")
+
+ # r4 - copy A_copy to A_copy2
+ svntest.main.run_svn(None, 'cp', A_copy_path, A_copy2_path)
+ _commit_and_update(4, "Copy A_copy to A_copy2.")
+
+ # r5 - tweak A_copy2/mu
+ svntest.main.file_append(A_copy2_mu_path, "r5\n")
+ _commit_and_update(5, "Edit A_copy2/mu.")
+
+ # Merge r5 from A_copy2/mu to A_copy/mu.
+ #
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn. Check the resulting mergeinfo with
+ # a propget.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]], ['U ' + A_copy_mu_path + '\n',
+ ' U ' + A_copy_mu_path + '\n']),
+ [], 'merge', '-c5', sbox.repo_url + '/A_copy2/mu', A_copy_mu_path)
+ check_mergeinfo_recursively(A_copy_mu_path,
+ { A_copy_mu_path: '/A_copy2/mu:5' })
+
+ # Now, merge A_copy2 (in full) back to A_copy. This should result in
+ # mergeinfo of '/A_copy2:4-5' on A_copy and '/A_copy2/mu:4-5' on A_copy/mu
+ # and the latter should elide to the former. Any revisions < 4 are part of
+ # A_copy's natural history and should not be explicitly recorded.
+ expected_output = wc.State(A_copy_path, {})
+ expected_mergeinfo_output = wc.State(A_copy_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_copy_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_copy2:4-5'}),
+ 'mu' : Item("This is the file 'mu'.\nr3\nr5\n",
+ props={SVN_PROP_MERGEINFO : '/A_copy2/mu:5'}),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_status = wc.State(A_copy_path, {
+ '' : Item(status=' M'),
+ 'mu' : Item(status='MM'),
+ 'B' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=5)
+ expected_skip = wc.State(A_copy_path, { })
+ svntest.actions.run_and_verify_merge(A_copy_path, None, None,
+ sbox.repo_url + '/A_copy2', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # Revert the previous merges and try a cherry harvest merge where
+ # the subtree's natural history is a proper subset of the merge.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ wc_status = svntest.actions.get_virginal_state(wc_dir, 5)
+ wc_status.add({
+ 'A_copy' : Item(),
+ 'A_copy/B' : Item(),
+ 'A_copy/B/lambda' : Item(),
+ 'A_copy/B/E' : Item(),
+ 'A_copy/B/E/alpha' : Item(),
+ 'A_copy/B/E/beta' : Item(),
+ 'A_copy/B/F' : Item(),
+ 'A_copy/mu' : Item(),
+ 'A_copy/C' : Item(),
+ 'A_copy/D' : Item(),
+ 'A_copy/D/gamma' : Item(),
+ 'A_copy/D/G' : Item(),
+ 'A_copy/D/G/pi' : Item(),
+ 'A_copy/D/G/rho' : Item(),
+ 'A_copy/D/G/tau' : Item(),
+ 'A_copy/D/H' : Item(),
+ 'A_copy/D/H/chi' : Item(),
+ 'A_copy/D/H/omega' : Item(),
+ 'A_copy/D/H/psi' : Item(),
+ 'A_copy2' : Item(),
+ 'A_copy2/B' : Item(),
+ 'A_copy2/B/lambda' : Item(),
+ 'A_copy2/B/E' : Item(),
+ 'A_copy2/B/E/alpha' : Item(),
+ 'A_copy2/B/E/beta' : Item(),
+ 'A_copy2/B/F' : Item(),
+ 'A_copy2/mu' : Item(),
+ 'A_copy2/C' : Item(),
+ 'A_copy2/D' : Item(),
+ 'A_copy2/D/gamma' : Item(),
+ 'A_copy2/D/G' : Item(),
+ 'A_copy2/D/G/pi' : Item(),
+ 'A_copy2/D/G/rho' : Item(),
+ 'A_copy2/D/G/tau' : Item(),
+ 'A_copy2/D/H' : Item(),
+ 'A_copy2/D/H/chi' : Item(),
+ 'A_copy2/D/H/omega' : Item(),
+ 'A_copy2/D/H/psi' : Item(),
+ })
+ wc_status.tweak(status=' ', wc_rev=5)
+
+ # r6 - Add the file 'A/D/H/nu'.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')})
+ wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=6)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r7 - Make a change to 'A/D/H/nu'.
+ svntest.main.file_write(nu_path, "Nu content")
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')})
+ wc_status.tweak('A/D/H/nu', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r8 - Merge r6 to 'A_copy'.
+ expected_output = wc.State(A_copy_path, {
+ 'D/H/nu' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_copy_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_copy_path, {
+ })
+ expected_A_copy_status = wc.State(A_copy_path, {
+ '' : Item(status=' M', wc_rev=5),
+ 'B' : Item(status=' ', wc_rev=5),
+ 'mu' : Item(status=' ', wc_rev=5),
+ 'B/E' : Item(status=' ', wc_rev=5),
+ 'B/E/alpha' : Item(status=' ', wc_rev=5),
+ 'B/E/beta' : Item(status=' ', wc_rev=5),
+ 'B/lambda' : Item(status=' ', wc_rev=5),
+ 'B/F' : Item(status=' ', wc_rev=5),
+ 'C' : Item(status=' ', wc_rev=5),
+ 'D' : Item(status=' ', wc_rev=5),
+ 'D/G' : Item(status=' ', wc_rev=5),
+ 'D/G/pi' : Item(status=' ', wc_rev=5),
+ 'D/G/rho' : Item(status=' ', wc_rev=5),
+ 'D/G/tau' : Item(status=' ', wc_rev=5),
+ 'D/gamma' : Item(status=' ', wc_rev=5),
+ 'D/H' : Item(status=' ', wc_rev=5),
+ 'D/H/chi' : Item(status=' ', wc_rev=5),
+ 'D/H/psi' : Item(status=' ', wc_rev=5),
+ 'D/H/omega' : Item(status=' ', wc_rev=5),
+ 'D/H/nu' : Item(status='A ', wc_rev='-', copied='+'),
+ })
+ expected_A_copy_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\nr3\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/H/nu' : Item("This is the file 'nu'.\n"),
+ })
+ expected_A_copy_skip = wc.State(A_copy_path, {})
+ svntest.actions.run_and_verify_merge(A_copy_path, '5', '6',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_copy_disk,
+ expected_A_copy_status,
+ expected_A_copy_skip,
+ check_props=True)
+ wc_status.add({'A_copy/D/H/nu' : Item(status=' ', wc_rev=8)})
+ wc_status.tweak('A_copy', wc_rev=8)
+ expected_output = wc.State(wc_dir, {
+ 'A_copy/D/H/nu' : Item(verb='Adding'),
+ 'A_copy' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r9 - Merge r7 to 'A_copy/D/H/nu'.
+ expected_skip = wc.State(nu_copy_path, { })
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[7]],
+ ['U ' + nu_copy_path + '\n',
+ ' G ' + nu_copy_path + '\n',]),
+ [], 'merge', '-c7', sbox.repo_url + '/A/D/H/nu', nu_copy_path)
+ expected_output = wc.State(wc_dir, {'A_copy/D/H/nu' : Item(verb='Sending')})
+ wc_status.tweak('A_copy/D/H/nu', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Update WC
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ wc_status.tweak(wc_rev=9)
+
+ # r10 - Make another change to 'A/D/H/nu'.
+ svntest.main.file_write(nu_path, "Even nuer content")
+ expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')})
+ wc_status.tweak('A/D/H/nu', wc_rev=10)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Update WC
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ wc_status.tweak(wc_rev=10)
+
+ # Now do a cherry harvest merge to 'A_copy'.
+ expected_output = wc.State(A_copy_path, {
+ 'D/H/nu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_copy_path, {
+ '' : Item(status=' U'),
+ 'D/H/nu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_copy_path, {
+ })
+ expected_A_copy_status = wc.State(A_copy_path, {
+ '' : Item(status=' M', wc_rev=10),
+ 'B' : Item(status=' ', wc_rev=10),
+ 'mu' : Item(status=' ', wc_rev=10),
+ 'B/E' : Item(status=' ', wc_rev=10),
+ 'B/E/alpha' : Item(status=' ', wc_rev=10),
+ 'B/E/beta' : Item(status=' ', wc_rev=10),
+ 'B/lambda' : Item(status=' ', wc_rev=10),
+ 'B/F' : Item(status=' ', wc_rev=10),
+ 'C' : Item(status=' ', wc_rev=10),
+ 'D' : Item(status=' ', wc_rev=10),
+ 'D/G' : Item(status=' ', wc_rev=10),
+ 'D/G/pi' : Item(status=' ', wc_rev=10),
+ 'D/G/rho' : Item(status=' ', wc_rev=10),
+ 'D/G/tau' : Item(status=' ', wc_rev=10),
+ 'D/gamma' : Item(status=' ', wc_rev=10),
+ 'D/H' : Item(status=' ', wc_rev=10),
+ 'D/H/chi' : Item(status=' ', wc_rev=10),
+ 'D/H/psi' : Item(status=' ', wc_rev=10),
+ 'D/H/omega' : Item(status=' ', wc_rev=10),
+ 'D/H/nu' : Item(status='MM', wc_rev=10),
+ })
+ expected_A_copy_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-10'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\nr3\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/H/nu' : Item("Even nuer content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/nu:6-10'}),
+ })
+ expected_A_copy_skip = wc.State(A_copy_path, {})
+ svntest.actions.run_and_verify_merge(A_copy_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_copy_disk,
+ expected_A_copy_status,
+ expected_A_copy_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue where merging a change to a broken link fails
+@SkipUnless(svntest.main.is_posix_os)
+def merge_broken_link(sbox):
+ "merge with broken symlinks in target"
+
+ # Create our good 'ole greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ src_path = sbox.ospath('A/B/E')
+ copy_path = sbox.ospath('A/B/E_COPY')
+ link_path = os.path.join(src_path, 'beta_link')
+
+ os.symlink('beta_broken', link_path)
+ svntest.main.run_svn(None, 'add', link_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'Create a broken link', link_path)
+ svntest.main.run_svn(None, 'copy', src_path, copy_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'Copy the tree with the broken link',
+ copy_path)
+ os.unlink(link_path)
+ os.symlink('beta', link_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'Fix a broken link', link_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]],
+ ['U ' + copy_path + '/beta_link\n',
+ ' U ' + copy_path + '\n']),
+ [], 'merge', '-c4', src_path, copy_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3199 'Subtree merges broken when required ranges
+# don't intersect with merge target'
+@SkipUnless(server_has_mergeinfo)
+@Issue(3199)
+def subtree_merges_dont_intersect_with_targets(sbox):
+ "subtree ranges might not intersect with target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make two branches to merge to.
+ wc_disk, wc_status = set_up_branch(sbox, False, 2)
+
+ # Some paths we'll care about.
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+ H_COPY_2_path = sbox.ospath('A_COPY_2/D/H')
+ gamma_path = sbox.ospath('A/D/gamma')
+ psi_path = sbox.ospath('A/D/H/psi')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ gamma_COPY_path = sbox.ospath('A_COPY/D/gamma')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ psi_COPY_2_path = sbox.ospath('A_COPY_2/D/H/psi')
+ rho_COPY_2_path = sbox.ospath('A_COPY_2/D/G/rho')
+
+ # Make a tweak to A/D/gamma and A/D/H/psi in r8.
+ svntest.main.file_write(gamma_path, "New content")
+ svntest.main.file_write(psi_path, "Even newer content")
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ 'A/D/H/psi' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A/D/gamma', 'A/D/H/psi', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ wc_disk.tweak('A/D/gamma', contents="New content")
+ wc_disk.tweak('A/D/H/psi', contents="Even newer content")
+
+ # Update the WC.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'update', wc_dir)
+ wc_status.tweak(wc_rev=8)
+
+ # Run a bunch of merges to setup the 2 branches with explicit
+ # mergeinfo on each branch root and explicit mergeinfo on one subtree
+ # of each root. The mergeinfo should be such that:
+ #
+ # 1) On one branch: The mergeinfo on the root and the subtree do
+ # not intersect.
+ #
+ # 2) On the other branch: The mergeinfo on the root and subtree
+ # are each 'missing' and eligible ranges and these missing
+ # ranges do not intersect.
+ #
+ # Note: We just use run_and_verify_svn(...'merge'...) here rather than
+ # run_and_verify_merge() because these types of simple merges are
+ # tested to death elsewhere and this is just setup for the "real"
+ # test.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c4',
+ sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c8',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c-8',
+ sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge',
+ sbox.repo_url + '/A',
+ A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c-5',
+ sbox.repo_url + '/A',
+ A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c5', '-c-8',
+ sbox.repo_url + '/A/D/H',
+ H_COPY_2_path)
+
+ # Commit all the previous merges as r9.
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/gamma' : Item(verb='Sending'),
+ 'A_COPY_2' : Item(verb='Sending'),
+ 'A_COPY_2/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY_2/D/H' : Item(verb='Sending'),
+ 'A_COPY_2/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY_2/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY_2/D/gamma' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY',
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/gamma',
+ 'A_COPY_2',
+ 'A_COPY_2/B/E/beta',
+ 'A_COPY_2/D/H',
+ 'A_COPY_2/D/H/omega',
+ 'A_COPY_2/D/H/psi',
+ 'A_COPY_2/D/gamma',
+ wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # Update the WC.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [],
+ 'update', wc_dir)
+
+ # Make sure we have mergeinfo that meets the two criteria set out above.
+ check_mergeinfo_recursively(wc_dir,
+ { # Criterion 1
+ A_COPY_path: '/A:8',
+ psi_COPY_path: '/A/D/H/psi:4',
+ # Criterion 2
+ A_COPY_2_path : '/A:3-4,6-8',
+ H_COPY_2_path : '/A/D/H:3-7' })
+
+ # Merging to the criterion 2 branch.
+ #
+ # Forward merge a range to a target with a subtree where the target
+ # and subtree need different, non-intersecting revision ranges applied:
+ # Merge r3:9 from A into A_COPY_2.
+ #
+ # The subtree A_COPY_2/D/H needs r8-9 applied (affecting A_COPY_2/D/H/psi)
+ # while the target needs r5 (affecting A_COPY_2/D/G/rho) applied. The
+ # resulting mergeinfo on A_COPY_2 and A_COPY_2/D/H should be equivalent
+ # and therefore elide to A_COPY_2.
+ expected_output = wc.State(A_COPY_2_path, {
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ 'D/H' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status='M ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' M', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status='M ', wc_rev=9),
+ 'D/H/omega' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("New content"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("Even newer content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_2_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, '3', '9',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merging to the criterion 1 branch.
+ #
+ # Reverse merge a range to a target with a subtree where the target
+ # and subtree need different, non-intersecting revision ranges
+ # reversed: Merge r9:3 from A into A_COPY.
+ #
+ # The subtree A_COPY_2/D/H/psi needs r4 reversed, while the target needs
+ # r8 (affecting A_COPY/D/gamma) reversed. Since this reverses all merges
+ # thus far to A_COPY, there should be *no* mergeinfo post merge.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/gamma' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/psi' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/psi' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status='M ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status='MM', wc_rev=9),
+ 'D/H/omega' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, '9', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Test the notification portion of issue #3199.
+ #
+ # run_and_verify_merge() doesn't check the notification headers
+ # so we need to repeat the previous two merges using
+ # run_and_verify_svn(...'merge'...) and expected_merge_output().
+ #
+ ### TODO: Things are fairly ugly when it comes to testing the
+ ### merge notification headers. run_and_verify_merge*()
+ ### just ignores the notifications and in the few places
+ ### we use expected_merge_output() the order of notifications
+ ### and paths are not considered. In a perfect world we'd
+ ### have run_and_verify_merge() that addressed these
+ ### shortcomings (and allowed merges to file targets).
+ #
+ # Revert the previous merges.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+
+ # Repeat the forward merge
+ expected_output = expected_merge_output(
+ [[5],[8],[5,9]],
+ ['U %s\n' % (rho_COPY_2_path),
+ 'U %s\n' % (psi_COPY_2_path),
+ ' U %s\n' % (H_COPY_2_path),
+ ' U %s\n' % (A_COPY_2_path),],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-r', '3:9',
+ sbox.repo_url + '/A',
+ A_COPY_2_path)
+ # Repeat the reverse merge
+ expected_output = expected_merge_output(
+ [[-4],[-8],[8,4]],
+ ['U %s\n' % (gamma_COPY_path),
+ 'U %s\n' % (psi_COPY_path),
+ ' U %s\n' % (A_COPY_path),
+ ' U %s\n' % (psi_COPY_path)],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-r', '9:3',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+
+#----------------------------------------------------------------------
+# Some more tests for issue #3067 'subtrees that don't exist at the start
+# or end of a merge range shouldn't break the merge'
+@Issue(3067)
+@SkipUnless(server_has_mergeinfo)
+def subtree_source_missing_in_requested_range(sbox):
+ "subtree merge source might not exist"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a branch to merge to.
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # Some paths we'll care about.
+ psi_path = sbox.ospath('A/D/H/psi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ A_COPY_path = sbox.ospath('A_COPY')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+
+ # r7 Delete A/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete', psi_path)
+ sbox.simple_commit(message='delete psi')
+
+ # r8 - modify A/D/H/omega.
+ svntest.main.file_write(os.path.join(omega_path), "Even newer content")
+ sbox.simple_commit(message='modify omega')
+
+ # r9 - Merge r3 to A_COPY/D/H/psi
+ expected_output = expected_merge_output(
+ [[3]], ['U %s\n' % (psi_COPY_path),
+ ' U %s\n' % (psi_COPY_path),])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c', '3',
+ sbox.repo_url + '/A/D/H/psi@3',
+ psi_COPY_path)
+ sbox.simple_commit(message='merge r3 to A_COPY/D/H/psi')
+
+ # r10 - Merge r6 to A_COPY/D/H/omega.
+ expected_output = expected_merge_output(
+ [[6]], ['U %s\n' % (omega_COPY_path),
+ ' U %s\n' % (omega_COPY_path),])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c', '6',
+ sbox.repo_url + '/A/D/H/omega',
+ omega_COPY_path)
+ sbox.simple_commit(message='merge r6 to A_COPY')
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up',
+ wc_dir)
+
+ # r11 - Merge r8 to A_COPY.
+ expected_output = expected_merge_output(
+ [[8]], ['U %s\n' % (omega_COPY_path),
+ ' U %s\n' % (omega_COPY_path),
+ ' U %s\n' % (A_COPY_path)])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c', '8',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ # Repeat the merge using the --record-only option so A_COPY/D/H/psi gets
+ # mergeinfo including 'A/D/H/psi:8', which doesn't exist. Why? Because
+ # we are trying to create mergeinfo that will provoke an invalid editor
+ # drive. In 1.5-1.6 merge updated all subtrees, regardless of whether the
+ # merge touched these subtrees. This --record-only merge duplicates that
+ # behavior, allowing us to test the relevant issue #3067 fixes.
+ expected_output = expected_merge_output(
+ [[8]], [' G %s\n' % (omega_COPY_path),
+ ' U %s\n' % (psi_COPY_path),
+ ' G %s\n' % (A_COPY_path)])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c', '8',
+ sbox.repo_url + '/A',
+ A_COPY_path, '--record-only')
+ sbox.simple_commit(message='merge r8 to A_COPY/D/H/omega')
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up',
+ wc_dir)
+
+ # r12 - modify A/D/H/omega yet again.
+ svntest.main.file_write(os.path.join(omega_path),
+ "Now with fabulous new content!")
+ sbox.simple_commit(message='modify omega')
+
+ # r13 - Merge all available revs to A_COPY/D/H/omega.
+ expected_output = expected_merge_output(
+ [[9,12],[2,12]], ['U %s\n' % (omega_COPY_path),
+ ' U %s\n' % (omega_COPY_path)])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge',
+ sbox.repo_url + '/A/D/H/omega',
+ omega_COPY_path)
+ sbox.simple_commit(message='cherry harvest to A_COPY/D/H/omega')
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(13), [], 'up',
+ wc_dir)
+
+ # Check that svn:mergeinfo is as expected.
+ check_mergeinfo_recursively(wc_dir,
+ { A_COPY_path: '/A:8',
+ omega_COPY_path: '/A/D/H/omega:2-12',
+ psi_COPY_path : '/A/D/H/psi:3,8' })
+
+ # Now test a reverse merge where part of the requested range postdates
+ # a subtree's existence. Merge -r12:1 to A_COPY. This should revert
+ # all of the merges done thus far. The fact that A/D/H/psi no longer
+ # exists after r7 shouldn't break the subtree merge into A_COPY/D/H/psi.
+ # A_COPY/D/H/psi should simply have r3 reverse merged. No paths under
+ # in the tree rooted at A_COPY should have any explicit mergeinfo.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'D/H/omega' : Item(status='G ', prev_status='G '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/psi' : Item(status=' U'),
+ 'D/H/omega' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/psi' : Item(status=' U'),
+ 'D/H/omega' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=13),
+ 'B' : Item(status=' ', wc_rev=13),
+ 'mu' : Item(status=' ', wc_rev=13),
+ 'B/E' : Item(status=' ', wc_rev=13),
+ 'B/E/alpha' : Item(status=' ', wc_rev=13),
+ 'B/E/beta' : Item(status=' ', wc_rev=13),
+ 'B/lambda' : Item(status=' ', wc_rev=13),
+ 'B/F' : Item(status=' ', wc_rev=13),
+ 'C' : Item(status=' ', wc_rev=13),
+ 'D' : Item(status=' ', wc_rev=13),
+ 'D/G' : Item(status=' ', wc_rev=13),
+ 'D/G/pi' : Item(status=' ', wc_rev=13),
+ 'D/G/rho' : Item(status=' ', wc_rev=13),
+ 'D/G/tau' : Item(status=' ', wc_rev=13),
+ 'D/gamma' : Item(status=' ', wc_rev=13),
+ 'D/H' : Item(status=' ', wc_rev=13),
+ 'D/H/chi' : Item(status=' ', wc_rev=13),
+ 'D/H/psi' : Item(status='MM', wc_rev=13),
+ 'D/H/omega' : Item(status='MM', wc_rev=13),
+ })
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '12', '1',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+ # Revert the previous merge.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', wc_dir)
+ # Merge r12 to A_COPY and commit as r14.
+ expected_output = wc.State(A_COPY_path, {})
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=13),
+ 'B' : Item(status=' ', wc_rev=13),
+ 'mu' : Item(status=' ', wc_rev=13),
+ 'B/E' : Item(status=' ', wc_rev=13),
+ 'B/E/alpha' : Item(status=' ', wc_rev=13),
+ 'B/E/beta' : Item(status=' ', wc_rev=13),
+ 'B/lambda' : Item(status=' ', wc_rev=13),
+ 'B/F' : Item(status=' ', wc_rev=13),
+ 'C' : Item(status=' ', wc_rev=13),
+ 'D' : Item(status=' ', wc_rev=13),
+ 'D/G' : Item(status=' ', wc_rev=13),
+ 'D/G/pi' : Item(status=' ', wc_rev=13),
+ 'D/G/rho' : Item(status=' ', wc_rev=13),
+ 'D/G/tau' : Item(status=' ', wc_rev=13),
+ 'D/gamma' : Item(status=' ', wc_rev=13),
+ 'D/H' : Item(status=' ', wc_rev=13),
+ 'D/H/chi' : Item(status=' ', wc_rev=13),
+ 'D/H/psi' : Item(status=' ', wc_rev=13),
+ 'D/H/omega' : Item(status=' ', wc_rev=13),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:8,12'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/psi:3,8'}),
+ 'D/H/omega' : Item("Now with fabulous new content!",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:2-12'}),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '11', '12',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+ # As we did earlier, repeat the merge with the --record-only option to
+ # preserve the old behavior of recording mergeinfo on every subtree, thus
+ # allowing this test to actually test the issue #3067 fixes.
+ expected_output = expected_merge_output(
+ [[12]], ['U %s\n' % (A_COPY_path),
+ ' G %s\n' % (A_COPY_path),
+ ' U %s\n' % (psi_COPY_path),
+ ' U %s\n' % (omega_COPY_path),])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '-c', '12',
+ sbox.repo_url + '/A',
+ A_COPY_path, '--record-only')
+ sbox.simple_commit(message='Merge r12 to A_COPY')
+
+ # Update A_COPY/D/H/rho back to r13 so it's mergeinfo doesn't include
+ # r12. Then merge a range, -r6:12 which should delete a subtree
+ # (A_COPY/D/H/psi).
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up',
+ wc_dir)
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status='D '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=14),
+ 'B' : Item(status=' ', wc_rev=14),
+ 'mu' : Item(status=' ', wc_rev=14),
+ 'B/E' : Item(status=' ', wc_rev=14),
+ 'B/E/alpha' : Item(status=' ', wc_rev=14),
+ 'B/E/beta' : Item(status=' ', wc_rev=14),
+ 'B/lambda' : Item(status=' ', wc_rev=14),
+ 'B/F' : Item(status=' ', wc_rev=14),
+ 'C' : Item(status=' ', wc_rev=14),
+ 'D' : Item(status=' ', wc_rev=14),
+ 'D/G' : Item(status=' ', wc_rev=14),
+ 'D/G/pi' : Item(status=' ', wc_rev=14),
+ 'D/G/rho' : Item(status=' ', wc_rev=14),
+ 'D/G/tau' : Item(status=' ', wc_rev=14),
+ 'D/gamma' : Item(status=' ', wc_rev=14),
+ 'D/H' : Item(status=' ', wc_rev=14),
+ 'D/H/chi' : Item(status=' ', wc_rev=14),
+ 'D/H/psi' : Item(status='D ', wc_rev=14),
+ 'D/H/omega' : Item(status=' ', wc_rev=14),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:7-12'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega' : Item("Now with fabulous new content!",
+ props={SVN_PROP_MERGEINFO : '/A/D/H/omega:2-12'}),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '6', '12',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+#----------------------------------------------------------------------
+# Another test for issue #3067: 'subtrees that don't exist at the start
+# or end of a merge range shouldn't break the merge'
+#
+# See http://subversion.tigris.org/issues/show_bug.cgi?id=3067#desc34
+@Issue(3067)
+@SkipUnless(server_has_mergeinfo)
+def subtrees_with_empty_mergeinfo(sbox):
+ "mergeinfo not set on subtree with empty mergeinfo"
+
+ # Use helper to setup a renamed subtree.
+ wc_dir, expected_disk, expected_status = set_up_renamed_subtree(sbox)
+
+ # Some paths we'll care about
+ H_COPY_path = sbox.ospath('H_COPY')
+
+ # Cherry harvest all available revsions from 'A/D/H' to 'H_COPY'.
+ #
+ # This should merge r4:6 from 'A/D/H' setting mergeinfo for r5-6
+ # on both 'H_COPY' and 'H_COPY/psi_moved'. But since the working copy
+ # is at a uniform working revision, the latter's mergeinfo should
+ # elide, leaving explicit mergeinfo only on the merge target.
+ expected_output = wc.State(H_COPY_path, {
+ 'psi_moved' : Item(status='U ')
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' U'),
+ 'psi_moved' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ 'psi_moved' : Item(status=' U'),
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=6), # mergeinfo set on target
+ 'psi_moved' : Item(status='MM', wc_rev=6), # mergeinfo elides
+ 'omega' : Item(status=' ', wc_rev=6),
+ 'chi' : Item(status=' ', wc_rev=6),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:5-6'}),
+ 'psi_moved' : Item("Even *Newer* content"), # mergeinfo elides
+ 'omega' : Item("New omega"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(H_COPY_path, None, None,
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3240 'commits to subtrees added by merge
+# corrupt working copy and repos'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3240)
+def commit_to_subtree_added_by_merge(sbox):
+ "commits to subtrees added by merge wreak havoc"
+
+ # Setup a standard greek tree in r1.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ N_path = sbox.ospath('A/D/H/N')
+ nu_path = sbox.ospath('A/D/H/N/nu')
+ nu_COPY_path = sbox.ospath('A_COPY/D/H/N/nu')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+
+ # Copy 'A' to 'A_COPY' in r2.
+ wc_disk, wc_status = set_up_branch(sbox, True)
+
+ # Create a 'A/D/H/N' and 'A/D/H/N/nu', and commit this new
+ # subtree as r3.
+ os.mkdir(N_path)
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', N_path)
+ expected_output = wc.State(wc_dir,
+ {'A/D/H/N' : Item(verb='Adding'),
+ 'A/D/H/N/nu' : Item(verb='Adding')})
+ wc_status.add({'A/D/H/N' : Item(status=' ', wc_rev=3),
+ 'A/D/H/N/nu' : Item(status=' ', wc_rev=3)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Merge r3 to 'A_COPY/D/H', creating A_COPY/D/H/N' and 'A_COPY/D/H/N/nu'.
+ # Commit the merge as r4.
+ expected_output = wc.State(H_COPY_path, {
+ 'N' : Item(status='A '),
+ 'N/nu' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'psi' : Item(status=' ', wc_rev=2),
+ 'omega' : Item(status=' ', wc_rev=2),
+ 'chi' : Item(status=' ', wc_rev=2),
+ 'N' : Item(status='A ', copied='+', wc_rev='-'),
+ 'N/nu' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2-3'}),
+ 'psi' : Item("This is the file 'psi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'N' : Item(),
+ 'N/nu' : Item("This is the file 'nu'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, {})
+ svntest.actions.run_and_verify_merge(H_COPY_path,
+ None, None,
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY/D/H' : Item(verb='Sending'),
+ 'A_COPY/D/H/N' : Item(verb='Adding'),
+ })
+ wc_status.add({'A_COPY/D/H/N' : Item(status=' ', wc_rev=4),
+ 'A_COPY/D/H/N/nu' : Item(status=' ', wc_rev=4)})
+ wc_status.tweak('A_COPY/D/H', wc_rev=4)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Make a text change to 'A_COPY/D/H/N/nu' and commit it as r5. This
+ # is the first place issue #3240 appears over DAV layers, and the
+ # commit fails with an error like this:
+ # trunk>svn ci -m "" merge_tests-100
+ # Sending merge_tests-100\A_COPY\D\H\N\nu
+ # Transmitting file data ...\..\..\subversion\libsvn_client\commit.c:919:
+ # (apr_err=20014)
+ # svn: Commit failed (details follow):
+ # ..\..\..\subversion\libsvn_ra_neon\merge.c:260: (apr_err=20014)
+ # svn: A MERGE response for '/svn-test-work/repositories/merge_tests-100/
+ # A/D/H/N/nu' is not a child of the destination
+ # ('/svn-test-work/repositories/merge_tests-100/A_COPY/D/H/N')
+ svntest.main.file_write(nu_COPY_path, "New content")
+ expected_output = wc.State(wc_dir,
+ {'A_COPY/D/H/N/nu' : Item(verb='Sending')})
+ wc_status.tweak('A_COPY/D/H/N/nu', wc_rev=5)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ # The second place issue #3240 shows up is in the fact that the commit
+ # *did* succeed, but the wrong path ('A/D/H/nu' rather than 'A_COPY/D/H/nu')
+ # is affected. We can see this by running an update; since we just
+ # committed there shouldn't be any incoming changes.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(5), [], 'up',
+ wc_dir)
+
+
+#----------------------------------------------------------------------
+# Tests for merging the deletion of a node, where the node to be deleted
+# is the same as or different from the node that was deleted.
+
+#----------------------------------------------------------------------
+def del_identical_file(sbox):
+ "merge tries to delete a file of identical content"
+
+ # Set up a standard greek tree in r1.
+ sbox.build()
+
+ saved_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # Set up a modification and deletion in the source branch.
+ source = 'A/D/G'
+ s_rev_orig = 1
+ svn_modfile(source+"/tau")
+ sbox.simple_commit(source)
+ s_rev_mod = 2
+ sbox.simple_rm(source+"/tau")
+ sbox.simple_commit(source)
+ s_rev_del = 3
+
+ # Make an identical copy, and merge a deletion to it.
+ target = 'A/D/G2'
+ svn_copy(s_rev_mod, source, target)
+ sbox.simple_commit(target)
+ # Should be deleted quietly.
+ svn_merge(s_rev_del, source, target,
+ ['D %s\n' % local_path('A/D/G2/tau')])
+
+ # Make a differing copy, locally modify it so it's the same,
+ # and merge a deletion to it.
+ target = 'A/D/G3'
+ svn_copy(s_rev_orig, source, target)
+ sbox.simple_commit(target)
+ svn_modfile(target+"/tau")
+ # Should be deleted quietly.
+ svn_merge(s_rev_del, source, target,
+ ['D %s\n' % local_path('A/D/G3/tau')])
+
+ os.chdir(saved_cwd)
+
+#----------------------------------------------------------------------
+def del_sched_add_hist_file(sbox):
+ "merge tries to delete identical sched-add file"
+
+ # Setup a standard greek tree in r1.
+ sbox.build()
+
+ saved_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # Set up a creation in the source branch.
+ source = 'A/D/G'
+ s_rev_orig = 1
+ svn_mkfile(source+"/file")
+ sbox.simple_commit(source)
+ s_rev_add = 2
+
+ # Merge a creation, and delete by reverse-merging into uncommitted WC.
+ target = 'A/D/G2'
+ svn_copy(s_rev_orig, source, target)
+ sbox.simple_commit(target)
+ s_rev = 3
+ svn_merge(s_rev_add, source, target,
+ ['A %s\n' % local_path('A/D/G2/file')])
+ # Should be deleted quietly.
+ svn_merge(-s_rev_add, source, target,
+ ['D %s\n' % local_path('A/D/G2/file')], elides=['A/D/G2'])
+
+ os.chdir(saved_cwd)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def subtree_merges_dont_cause_spurious_conflicts(sbox):
+ "subtree merges dont cause spurious conflicts"
+
+ # Fix a merge bug where previous merges are incorrectly reversed leading
+ # to repeat merges and spurious conflicts. These can occur when a subtree
+ # needs a range M:N merged that is older than the ranges X:Y needed by the
+ # merge target *and* there are changes in the merge source between N:X that
+ # affect parts of the merge target other than the subtree. An actual case
+ # where our own epository encountered this problem is described here:
+ # http://subversion.tigris.org/servlets/ReadMsg?listName=dev&msgNo=141832
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ rho_path = sbox.ospath('A/D/G/rho')
+ A_COPY_path = sbox.ospath('A_COPY')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # Make a branch to merge to.
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # r7 Make a text change to A/D/G/rho.
+ svntest.main.file_write(rho_path, "Newer content")
+ expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')})
+ wc_status.tweak('A/D/G/rho', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ wc_disk.tweak('A/D/G/rho', contents="Newer content")
+
+ # r8 Make another text change to A/D/G/rho.
+ svntest.main.file_write(rho_path, "Even *newer* content")
+ expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')})
+ wc_status.tweak('A/D/G/rho', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+ wc_disk.tweak('A/D/G/rho', contents="Even *newer* content")
+
+ # Update the WC to allow full mergeinfo inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=8)
+
+ # r9 Merge r0:7 from A to A_COPY, then create a subtree with differing
+ # mergeinfo under A_COPY by reverse merging r3 from A_COPY/D/H/psi.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'B/E' : Item(status=' ', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status='M ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' ', wc_rev=8),
+ 'D/G' : Item(status=' ', wc_rev=8),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status='M ', wc_rev=8),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D/H' : Item(status=' ', wc_rev=8),
+ 'D/H/chi' : Item(status=' ', wc_rev=8),
+ 'D/H/psi' : Item(status='M ', wc_rev=8),
+ 'D/H/omega' : Item(status='M ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-7'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("Newer content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content",),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '0', '7',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ ### TODO: We can use run_and_verify_merge() here now.
+ svntest.actions.run_and_verify_svn(expected_merge_output([[-3]],
+ ['G ' + psi_COPY_path + '\n',
+ ' G ' + psi_COPY_path + '\n']),
+ [], 'merge', '-c-3',
+ sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path)
+ # Commit the two merges.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ })
+ wc_status.tweak('A_COPY',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/H/omega',
+ wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Update the WC to allow full mergeinfo inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=9)
+
+ # r9 Merge all available revisions from A to A_COPY.
+ #
+ # This is where the bug revealed itself, instead of cleanly merging
+ # just r3 and then r8-9, the first merge editor drive of r3 set A_COPY
+ # to the state it was in r7, effectively reverting the merge committed
+ # in r9. So we saw unexpected merges to omega, rho, and beta, as they
+ # are returned to their r7 state and then a conflict on rho as the editor
+ # attempted to merge r8:
+ #
+ # trunk>svn merge %url%/A merge_tests-104\A_COPY
+ # --- Merging r3 into 'merge_tests-104\A_COPY\D\H\psi':
+ # U merge_tests-104\A_COPY\D\H\psi
+ # --- Merging r8 through r9 into 'merge_tests-104\A_COPY':
+ # U merge_tests-104\A_COPY\D\H\omega
+ # U merge_tests-104\A_COPY\D\G\rho
+ # U merge_tests-104\A_COPY\B\E\beta
+ # Conflict discovered in 'merge_tests-104/A_COPY/D/G/rho'.
+ # Select: (p) postpone, (df) diff-full, (e) edit,
+ # (mc) mine-conflict, (tc) theirs-conflict,
+ # (s) show all options: p
+ # --- Merging r8 through r9 into 'merge_tests-104\A_COPY':
+ # C merge_tests-104\A_COPY\D\G\rho
+ expected_output = wc.State(A_COPY_path, {
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/psi' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'D/H/psi' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status='M ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status='MM', wc_rev=9),
+ 'D/H/omega' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("Even *newer* content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"), # Mergeinfo elides to A_COPY
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for yet another variant of issue #3067.
+@Issue(3067)
+@SkipUnless(server_has_mergeinfo)
+def merge_target_and_subtrees_need_nonintersecting_ranges(sbox):
+ "target and subtrees need nonintersecting revs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ nu_path = sbox.ospath('A/D/G/nu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ nu_COPY_path = sbox.ospath('A_COPY/D/G/nu')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # Make a branch to merge to.
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # Add file A/D/G/nu in r7.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir, {'A/D/G/nu' : Item(verb='Adding')})
+ wc_status.add({'A/D/G/nu' : Item(status=' ', wc_rev=7)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Make a text mod to A/D/G/nu in r8.
+ svntest.main.file_write(nu_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/G/nu' : Item(verb='Sending')})
+ wc_status.tweak('A/D/G/nu', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Do several merges to setup a situation where the merge
+ # target and two of its subtrees need non-intersecting ranges
+ # merged when doing a synch (a.k.a. cherry harvest) merge.
+ #
+ # 1) Merge -r0:7 from A to A_COPY.
+ #
+ # 2) Merge -c8 from A/D/G/nu to A_COPY/D/G/nu.
+ #
+ # 3) Merge -c-6 from A/D/H/omega to A_COPY/D/H/omega.
+ #
+ # Commit this group of merges as r9. Since we already test these type
+ # of merges to death we don't use run_and_verify_merge() on these
+ # intermediate merges.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2,7]],
+ ['U ' + beta_COPY_path + '\n',
+ 'A ' + nu_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]
+ ),
+ [], 'merge', '-r0:7', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[8]], ['U ' + nu_COPY_path + '\n',
+ ' G ' + nu_COPY_path + '\n']),
+ [], 'merge', '-c8', sbox.repo_url + '/A/D/G/nu', nu_COPY_path)
+
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-6]], ['G ' + omega_COPY_path + '\n',
+ ' G ' + omega_COPY_path + '\n']),
+ [], 'merge', '-c-6', sbox.repo_url + '/A/D/H/omega', omega_COPY_path)
+ wc_status.add({'A_COPY/D/G/nu' : Item(status=' ', wc_rev=9)})
+ wc_status.tweak('A_COPY',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/omega',
+ 'A_COPY/D/H/psi',
+ wc_rev=9)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/G/nu' : Item(verb='Adding'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Update the WC to allow full mergeinfo inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+
+ # Merge all available revisions from A to A_COPY, the merge logic
+ # should handle this situation (no "svn: Working copy path 'D/G/nu'
+ # does not exist in repository" errors!). The mergeinfo on
+ # A_COPY/D/H/omega elides to the root, but the mergeinfo on
+ # A_COPY/D/G/nu, untouched by the merge, does not get updated so
+ # does not elide.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H/omega': Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'D/H/omega': Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/G/nu' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status=' ', wc_rev=9),
+ 'D/H/omega' : Item(status='MM', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/G/nu' : Item("New content",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/nu:2-8'}),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Part of this test is a regression test for issue #3250 "Repeated merging
+# of conflicting properties fails".
+@Issue(3250)
+def merge_two_edits_to_same_prop(sbox):
+ "merge two successive edits to the same property"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a branch to merge to. (This is r6.)
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+ initial_rev = 6
+
+ # Change into the WC dir for convenience
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_disk.wc_dir = ''
+ wc_status.wc_dir = ''
+
+ # Some paths we'll care about
+ A_path = "A"
+ A_COPY_path = "A_COPY"
+ mu_path = os.path.join(A_path, "mu")
+ mu_COPY_path = os.path.join(A_COPY_path, "mu")
+
+ # In the source, make two successive changes to the same property
+ sbox.simple_propset('p', 'new-val-1', 'A/mu')
+ sbox.simple_commit('A/mu')
+ rev1 = initial_rev + 1
+ sbox.simple_propset('p', 'new-val-2', 'A/mu')
+ sbox.simple_commit('A/mu')
+ rev2 = initial_rev + 2
+
+ # Merge the first change, then the second, to a target branch.
+ svn_merge(rev1, A_path, A_COPY_path)
+ svn_merge(rev2, A_path, A_COPY_path)
+
+ # Both changes should merge automatically: the second one should not
+ # complain about the local mod which the first one caused. The starting
+ # value in the target ("mine") for the second merge is exactly equal to
+ # the merge-left source value.
+
+ # A merge-tracking version of this problem is when the merge-tracking
+ # algorithm breaks a single requested merge into two phases because of
+ # some other target within the same merge requiring only a part of the
+ # revision range.
+
+ # ====================================================================
+
+ # We test issue #3250 here: that is, test that we can make two successive
+ # conflicting changes to the same property on the same node (here a file;
+ # in #3250 it was on a dir).
+ #
+ # ### But we no longer support merging into a node that's already in
+ # conflict, and the 'rev3' merge here has been tweaked to resolve
+ # the conflict, so it no longer tests the original #3250 scenario.
+ #
+ # Revert changes to branch wc
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', A_COPY_path)
+
+ # In the branch, make two successive changes to the same property
+ sbox.simple_propset('p', 'new-val-3', 'A_COPY/mu')
+ sbox.simple_commit('A_COPY/mu')
+ rev3 = initial_rev + 3
+ sbox.simple_propset('p', 'new-val-4', 'A_COPY/mu')
+ sbox.simple_commit('A_COPY/mu')
+ rev4 = initial_rev + 4
+
+ # Merge the two changes together to trunk.
+ svn_merge([rev3, rev4], A_COPY_path, A_path, [
+ " C %s\n" % mu_path,
+ ], prop_conflicts=1, args=['--allow-mixed-revisions'])
+
+ # Revert changes to trunk wc, to test next scenario of #3250
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', A_path)
+
+ # Merge the first change, then the second, to trunk.
+ svn_merge(rev3, A_COPY_path, A_path, [
+ " C %s\n" % mu_path,
+ "Resolved .* '%s'\n" % mu_path,
+ ], prop_resolved=1,
+ args=['--allow-mixed-revisions',
+ '--accept=working'])
+ svn_merge(rev4, A_COPY_path, A_path, [
+ " C %s\n" % mu_path,
+ ], prop_conflicts=1, args=['--allow-mixed-revisions'])
+
+ os.chdir(was_cwd)
+
+#----------------------------------------------------------------------
+def merge_an_eol_unification_and_set_svn_eol_style(sbox):
+ "merge an EOL unification and set svn:eol-style"
+ # In svn 1.5.2, merging the two changes between these three states:
+ # r1. inconsistent EOLs and no svn:eol-style
+ # r2. consistent EOLs and no svn:eol-style
+ # r3. consistent EOLs and svn:eol-style=native
+ # fails if attempted as a single merge (e.g. "svn merge r1:3") though it
+ # succeeds if attempted in two phases (e.g. "svn merge -c2,3").
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a branch to merge to. (This will be r6.)
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+ initial_rev = 6
+
+ # Change into the WC dir for convenience
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_disk.wc_dir = ''
+ wc_status.wc_dir = ''
+
+ content1 = 'Line1\nLine2\r\n' # write as 'binary' to get these exact EOLs
+ content2 = 'Line1\nLine2\n' # write as 'text' to get native EOLs in file
+
+ # In the source branch, create initial state and two successive changes.
+ # Use binary mode to write the first file so no newline conversion occurs.
+ svntest.main.file_write('A/mu', content1, 'wb')
+ sbox.simple_commit('A/mu')
+ rev1 = initial_rev + 1
+ # Use text mode to write the second copy of the file to get native EOLs.
+ svntest.main.file_write('A/mu', content2, 'w')
+ sbox.simple_commit('A/mu')
+ rev2 = initial_rev + 2
+ sbox.simple_propset('svn:eol-style', 'native', 'A/mu')
+ sbox.simple_commit('A/mu')
+ rev3 = initial_rev + 3
+
+ # Merge the initial state (inconsistent EOLs) to the target branch.
+ svn_merge(rev1, 'A', 'A_COPY')
+ sbox.simple_commit('A_COPY')
+
+ # Merge the two changes together to the target branch.
+ svn_merge([rev2, rev3], 'A', 'A_COPY',
+ args=['--allow-mixed-revisions'])
+
+ # That merge should succeed.
+ # Surprise: setting svn:eol-style='LF' instead of 'native' doesn't fail.
+ # Surprise: if we don't merge the file's 'rev1' state first, it doesn't fail
+ # nor even raise a conflict.
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_adds_mergeinfo_correctly(sbox):
+ "merge adds mergeinfo to subtrees correctly"
+
+ # A merge may add explicit mergeinfo to the subtree of a merge target
+ # as a result of changes in the merge source. These paths may have
+ # inherited mergeinfo prior to the merge, if so the subtree should end up
+ # with mergeinfo that reflects all of the following:
+ #
+ # A) The mergeinfo added from the merge source
+ #
+ # B) The mergeinfo the subtree inherited prior to the merge.
+ #
+ # C) Mergeinfo describing the merge performed.
+ #
+ # See http://subversion.tigris.org/servlets/ReadMsg?listName=dev&msgNo=142460
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup a 'trunk' and two branches.
+ wc_disk, wc_status = set_up_branch(sbox, False, 2)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+ D_COPY_2_path = sbox.ospath('A_COPY_2/D')
+
+ # Update working copy to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=7)
+
+ # Merge r5 from A to A_COPY and commit as r8.
+ # This creates explicit mergeinfo on A_COPY of '/A:5'.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/G/rho': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'B' : Item(status=' ', wc_rev=7),
+ 'mu' : Item(status=' ', wc_rev=7),
+ 'B/E' : Item(status=' ', wc_rev=7),
+ 'B/E/alpha' : Item(status=' ', wc_rev=7),
+ 'B/E/beta' : Item(status=' ', wc_rev=7),
+ 'B/lambda' : Item(status=' ', wc_rev=7),
+ 'B/F' : Item(status=' ', wc_rev=7),
+ 'C' : Item(status=' ', wc_rev=7),
+ 'D' : Item(status=' ', wc_rev=7),
+ 'D/G' : Item(status=' ', wc_rev=7),
+ 'D/G/pi' : Item(status=' ', wc_rev=7),
+ 'D/G/rho' : Item(status='M ', wc_rev=7),
+ 'D/G/tau' : Item(status=' ', wc_rev=7),
+ 'D/gamma' : Item(status=' ', wc_rev=7),
+ 'D/H' : Item(status=' ', wc_rev=7),
+ 'D/H/chi' : Item(status=' ', wc_rev=7),
+ 'D/H/psi' : Item(status=' ', wc_rev=7),
+ 'D/H/omega' : Item(status=' ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '5',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ wc_status.tweak('A_COPY',
+ 'A_COPY/D/G/rho',
+ wc_rev=8)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge r7 from A/D to A_COPY_2/D and commit as r9.
+ # This creates explicit mergeinfo on A_COPY_2/D of '/A/D:7'.
+ expected_output = wc.State(D_COPY_2_path, {
+ 'H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(D_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(D_COPY_2_path, {
+ })
+ expected_status = wc.State(D_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'G' : Item(status=' ', wc_rev=7),
+ 'G/pi' : Item(status=' ', wc_rev=7),
+ 'G/rho' : Item(status=' ', wc_rev=7),
+ 'G/tau' : Item(status=' ', wc_rev=7),
+ 'gamma' : Item(status=' ', wc_rev=7),
+ 'H' : Item(status=' ', wc_rev=7),
+ 'H/chi' : Item(status=' ', wc_rev=7),
+ 'H/psi' : Item(status=' ', wc_rev=7),
+ 'H/omega' : Item(status='M ', wc_rev=7),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D:7'}),
+ 'G' : Item(),
+ 'G/pi' : Item("This is the file 'pi'.\n"),
+ 'G/rho' : Item("This is the file 'rho'.\n"),
+ 'G/tau' : Item("This is the file 'tau'.\n"),
+ 'gamma' : Item("This is the file 'gamma'.\n"),
+ 'H' : Item(),
+ 'H/chi' : Item("This is the file 'chi'.\n"),
+ 'H/psi' : Item("This is the file 'psi'.\n"),
+ 'H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(D_COPY_2_path, '6', '7',
+ sbox.repo_url + '/A/D', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ wc_status.tweak('A_COPY_2/D',
+ 'A_COPY_2/D/H/omega',
+ wc_rev=9)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY_2/D' : Item(verb='Sending'),
+ 'A_COPY_2/D/H/omega' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Merge r9 from A_COPY_2 to A_COPY. A_COPY/D gets the explicit mergeinfo
+ # '/A/D/:7' added from r9. But it prior to the merge it inherited '/A/D:5'
+ # from A_COPY, so this should be present in its explicit mergeinfo. Lastly,
+ # the mergeinfo describing this merge '/A_COPY_2:9' should also be present
+ # in A_COPY's explicit mergeinfo.
+ # Update working copy to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [],
+ 'up', wc_dir)
+ expected_output = wc.State(A_COPY_path, {
+ 'D' : Item(status=' U'),
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' M', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status=' ', wc_rev=9),
+ 'D/H/psi' : Item(status=' ', wc_rev=9),
+ 'D/H/omega' : Item(status='M ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5\n/A_COPY_2:9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(props={SVN_PROP_MERGEINFO : '/A/D:5,7\n/A_COPY_2/D:9'}),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '8', '9',
+ sbox.repo_url + '/A_COPY_2', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Revert and repeat the above merge, but this time create some
+ # uncommitted mergeinfo on A_COPY/D, this should not cause a write
+ # lock error as was seen in http://subversion.tigris.org/
+ # ds/viewMessage.do?dsForumId=462&dsMessageId=103945
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', SVN_PROP_MERGEINFO, '',
+ D_COPY_path)
+ expected_output = wc.State(A_COPY_path, {
+ 'D' : Item(status=' G'), # Merged with local svn:mergeinfo
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '8', '9',
+ sbox.repo_url + '/A_COPY_2', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def natural_history_filtering(sbox):
+ "natural history filtering permits valid mergeinfo"
+
+ # While filtering self-referential mergeinfo (e.g. natural history) that
+ # a merge tries to add to a target, we may encounter contiguous revision
+ # ranges that describe *both* natural history and valid mergeinfo. The
+ # former should be filtered, but the latter allowed and recorded on the
+ # target. See
+ # http://subversion.tigris.org/servlets/ReadMsg?listName=dev&msgNo=142777.
+ #
+ # To set up a situation where this can occur we'll do the following:
+ #
+ # trunk -1-----3-4-5-6-------8----------- A
+ # \ \ \
+ # branch1 2-----------\-------9-------- A_COPY
+ # \ \
+ # branch2 7--------10---- A_COPY_2
+ #
+ # 1) Create a 'trunk'.
+ #
+ # 2) Copy 'trunk' to 'branch1'.
+ #
+ # 3) Make some changes under 'trunk'.
+ #
+ # 4) Copy 'trunk' to 'branch2'.
+ #
+ # 5) Make some more changes under 'trunk'.
+ #
+ # 6) Merge all available revisions from 'trunk' to 'branch1' and commit.
+ #
+ # 7) Merge all available revisions from 'branch1' to 'branch2'.
+ # 'branch2' should have explicit merginfo for both 'branch1' *and* for
+ # the revisions on 'trunk' which occurred after 'branch2' was copied as
+ # these are not part of 'branch2's natural history.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+ chi_path = sbox.ospath('A/D/H/chi')
+
+ # r1-r6: Setup a 'trunk' (A) and a 'branch' (A_COPY).
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # r7: Make a second 'branch': Copy A to A_COPY_2
+ expected = svntest.verify.UnorderedOutput(
+ ["A " + os.path.join(A_COPY_2_path, "B") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "B", "lambda") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "B", "E") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "B", "E", "alpha") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "B", "E", "beta") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "B", "F") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "mu") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "C") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "gamma") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "G") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "G", "pi") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "G", "rho") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "G", "tau") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "H") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "H", "chi") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "H", "omega") + "\n",
+ "A " + os.path.join(A_COPY_2_path, "D", "H", "psi") + "\n",
+ "Checked out revision 6.\n",
+ "A " + A_COPY_2_path + "\n"])
+ wc_status.add({
+ "A_COPY_2" + "/B" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/B/lambda" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/B/E" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/B/E/alpha" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/B/E/beta" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/B/F" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/mu" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/C" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/gamma" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/G" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/G/pi" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/G/rho" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/G/tau" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/H" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/H/chi" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/H/omega" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" + "/D/H/psi" : Item(status=' ', wc_rev=7),
+ "A_COPY_2" : Item(status=' ', wc_rev=7),
+ })
+ wc_disk.add({
+ "A_COPY_2" : Item(),
+ "A_COPY_2" + '/B' : Item(),
+ "A_COPY_2" + '/B/lambda' : Item("This is the file 'lambda'.\n"),
+ "A_COPY_2" + '/B/E' : Item(),
+ "A_COPY_2" + '/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ "A_COPY_2" + '/B/E/beta' : Item("New content"),
+ "A_COPY_2" + '/B/F' : Item(),
+ "A_COPY_2" + '/mu' : Item("This is the file 'mu'.\n"),
+ "A_COPY_2" + '/C' : Item(),
+ "A_COPY_2" + '/D' : Item(),
+ "A_COPY_2" + '/D/gamma' : Item("This is the file 'gamma'.\n"),
+ "A_COPY_2" + '/D/G' : Item(),
+ "A_COPY_2" + '/D/G/pi' : Item("This is the file 'pi'.\n"),
+ "A_COPY_2" + '/D/G/rho' : Item("New content"),
+ "A_COPY_2" + '/D/G/tau' : Item("This is the file 'tau'.\n"),
+ "A_COPY_2" + '/D/H' : Item(),
+ "A_COPY_2" + '/D/H/chi' : Item("New content"),
+ "A_COPY_2" + '/D/H/omega' : Item("This is the file 'omega'.\n"),
+ "A_COPY_2" + '/D/H/psi' : Item("New content"),
+ })
+ svntest.actions.run_and_verify_svn(expected, [], 'copy',
+ sbox.repo_url + "/A",
+ A_COPY_2_path)
+ expected_output = wc.State(wc_dir, {"A_COPY_2" : Item(verb='Adding')})
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ wc_status)
+
+ # r8: Make a text change under A, to A/D/H/chi.
+ svntest.main.file_write(chi_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/chi' : Item(verb='Sending')})
+ wc_status.tweak('A/D/H/chi', wc_rev=8)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ wc_disk.tweak('A/D/H/psi', contents="New content")
+
+ # r9: Merge all available revisions from A to A_COPY. But first
+ # update working copy to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=8)
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/chi' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'D/H/omega': Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'B' : Item(status=' ', wc_rev=8),
+ 'mu' : Item(status=' ', wc_rev=8),
+ 'B/E' : Item(status=' ', wc_rev=8),
+ 'B/E/alpha' : Item(status=' ', wc_rev=8),
+ 'B/E/beta' : Item(status='M ', wc_rev=8),
+ 'B/lambda' : Item(status=' ', wc_rev=8),
+ 'B/F' : Item(status=' ', wc_rev=8),
+ 'C' : Item(status=' ', wc_rev=8),
+ 'D' : Item(status=' ', wc_rev=8),
+ 'D/G' : Item(status=' ', wc_rev=8),
+ 'D/G/pi' : Item(status=' ', wc_rev=8),
+ 'D/G/rho' : Item(status='M ', wc_rev=8),
+ 'D/G/tau' : Item(status=' ', wc_rev=8),
+ 'D/gamma' : Item(status=' ', wc_rev=8),
+ 'D/H' : Item(status=' ', wc_rev=8),
+ 'D/H/chi' : Item(status='M ', wc_rev=8),
+ 'D/H/psi' : Item(status='M ', wc_rev=8),
+ 'D/H/omega' : Item(status='M ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ wc_status.tweak('A_COPY',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/chi',
+ 'A_COPY/D/H/psi',
+ 'A_COPY/D/H/omega',
+ wc_rev=9)
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/chi' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # Again update the working copy to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [],
+ 'up', wc_dir)
+ wc_status.tweak(wc_rev=9)
+
+ # Merge all available revisions from A_COPY to A_COPY_2. The mergeinfo on
+ # A_COPY_2 should reflect both the merge of revisions 2-9 from A_COPY *and*
+ # revisions 7-8 from A. Reivisions 2-6 from A should not be part of the
+ # explicit mergeinfo on A_COPY_2 as they are already part of its natural
+ # history.
+ expected_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ 'D/H/chi' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'B' : Item(status=' ', wc_rev=9),
+ 'mu' : Item(status=' ', wc_rev=9),
+ 'B/E' : Item(status=' ', wc_rev=9),
+ 'B/E/alpha' : Item(status=' ', wc_rev=9),
+ 'B/E/beta' : Item(status=' ', wc_rev=9),
+ 'B/lambda' : Item(status=' ', wc_rev=9),
+ 'B/F' : Item(status=' ', wc_rev=9),
+ 'C' : Item(status=' ', wc_rev=9),
+ 'D' : Item(status=' ', wc_rev=9),
+ 'D/G' : Item(status=' ', wc_rev=9),
+ 'D/G/pi' : Item(status=' ', wc_rev=9),
+ 'D/G/rho' : Item(status=' ', wc_rev=9),
+ 'D/G/tau' : Item(status=' ', wc_rev=9),
+ 'D/gamma' : Item(status=' ', wc_rev=9),
+ 'D/H' : Item(status=' ', wc_rev=9),
+ 'D/H/chi' : Item(status='M ', wc_rev=9),
+ 'D/H/psi' : Item(status=' ', wc_rev=9),
+ 'D/H/omega' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:7-8\n/A_COPY:2-9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("New content"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, None, None,
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(3067)
+def subtree_gets_changes_even_if_ultimately_deleted(sbox):
+ "subtree gets changes even if ultimately deleted"
+
+ # merge_tests.py 101 'merge tries to delete a file of identical content'
+ # demonstrates how a file can be deleted by a merge if the file is identical
+ # to the file deleted in the merge source. If the file differs then it
+ # should be 'skipped' as a tree-conflict. But suppose the file has
+ # mergeinfo such that the requested merge should bring the file into a state
+ # identical to the deleted source *before* attempting to delete it. Then the
+ # file should get those changes first and then be deleted rather than skipped.
+ #
+ # This problem, as discussed here,
+ # http://subversion.tigris.org/servlets/ReadMsg?listName=dev&msgNo=141533,
+ # is only nominally a tree conflict issue. More accurately this is yet
+ # another issue #3067 problem, in that the merge target has a subtree which
+ # doesn't exist in part of the requested merge range.
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+ psi_path = sbox.ospath('A/D/H/psi')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ set_up_branch(sbox)
+
+ # r7: Make an additional text mod to A/D/H/psi.
+ svntest.main.file_write(psi_path, "Even newer content")
+ sbox.simple_commit(message='mod psi')
+
+ # r8: Delete A/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete', psi_path)
+ sbox.simple_commit(message='delete psi')
+
+ # Update WC before merging so mergeinfo elision and inheritance
+ # occur smoothly.
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r9: Merge r3,7 from A/D/H to A_COPY/D/H, then reverse merge r7 from
+ # A/D/H/psi to A_COPY/D/H/psi.
+ expected_output = wc.State(H_COPY_path, {
+ 'psi' : Item(status='G ', prev_status='U '), # Touched twice
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' G', prev_status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=8),
+ 'psi' : Item(status='M ', wc_rev=8),
+ 'omega' : Item(status=' ', wc_rev=8),
+ 'chi' : Item(status=' ', wc_rev=8),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:3,7'}),
+ 'psi' : Item("Even newer content"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(H_COPY_path, None, None,
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, False,
+ '-c3,7', H_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-7]],
+ ['G ' + psi_COPY_path + '\n',
+ ' G ' + psi_COPY_path + '\n',]),
+ [], 'merge', '-c-7', sbox.repo_url + '/A/D/H/psi@7', psi_COPY_path)
+ sbox.simple_commit(message='merge -c3,7 from A/D/H,' \
+ 'reverse merge -c-7 from A/D/H/psi')
+
+ # Merge all available revisions from A/D/H to A_COPY/D/H. This merge
+ # ultimately tries to delete A_COPY/D/H/psi, but first it should merge
+ # r7 to A_COPY/D/H/psi, since that is one of the available revisions.
+ # Then when merging the deletion of A_COPY/D/H/psi in r8 the file will
+ # be identical to the deleted source A/D/H/psi and the deletion will
+ # succeed.
+ #
+ # Update WC before merging so mergeinfo elision and inheritance
+ # occur smoothly.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ expected_output = wc.State(H_COPY_path, {
+ 'omega' : Item(status='U '),
+ 'psi' : Item(status='D ', prev_status='U '),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_COPY_path, {
+ })
+ expected_status = wc.State(H_COPY_path, {
+ '' : Item(status=' M', wc_rev=9),
+ 'psi' : Item(status='D ', wc_rev=9),
+ 'omega' : Item(status='M ', wc_rev=9),
+ 'chi' : Item(status=' ', wc_rev=9),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2-9'}),
+ 'omega' : Item("New content"),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ })
+ expected_skip = wc.State(H_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(H_COPY_path, None, None,
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status, expected_skip,
+ [], True, False)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def no_self_referential_filtering_on_added_path(sbox):
+ "no self referential filtering on added path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ C_COPY_path = sbox.ospath('A_COPY/C')
+ A_path = sbox.ospath('A')
+ C_path = sbox.ospath('A/C')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+
+ # r1-r7: Setup a 'trunk' and two 'branches'.
+ wc_disk, wc_status = set_up_branch(sbox, False, 2)
+
+ # r8: Make a prop change on A_COPY/C.
+ svntest.actions.run_and_verify_svn(["property 'propname' set on '" +
+ C_COPY_path + "'\n"], [],
+ 'ps', 'propname', 'propval',
+ C_COPY_path)
+ expected_output = svntest.wc.State(wc_dir,
+ {'A_COPY/C' : Item(verb='Sending')})
+ wc_status.tweak('A_COPY/C', wc_rev=8)
+ wc_disk.tweak("A_COPY/C",
+ props={'propname' : 'propval'})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # r9: Merge r8 from A_COPY to A.
+ #
+ # Update first to avoid an out of date error.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=8)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[8]],
+ [' U ' + C_path + '\n',
+ ' U ' + A_path + '\n',]),
+ [], 'merge', '-c8', sbox.repo_url + '/A_COPY', A_path)
+ expected_output = svntest.wc.State(wc_dir,
+ {'A' : Item(verb='Sending'),
+ 'A/C' : Item(verb='Sending')})
+ wc_status.tweak('A', 'A/C', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ wc_disk.tweak("A/C",
+ props={'propname' : 'propval'})
+ wc_disk.tweak("A",
+ props={SVN_PROP_MERGEINFO : '/A_COPY:8'})
+
+ # r10: Move A/C to A/C_MOVED.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 10.\n'],
+ [], 'move',
+ sbox.repo_url + '/A/C',
+ sbox.repo_url + '/A/C_MOVED',
+ '-m', 'Copy A/C to A/C_MOVED')
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ wc_dir)
+
+ # Now try to merge all available revisions from A to A_COPY_2.
+ # This should try to add the directory A_COPY_2/C_MOVED which has
+ # explicit mergeinfo. This should not break self-referential mergeinfo
+ # filtering logic...in fact there is no reason to even attempt such
+ # filtering since the file is *new*.
+
+ expected_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'C' : Item(status='D '),
+ 'C_MOVED' : Item(status='A '),
+ })
+ # Why is C_MOVED notified as ' G' rather than ' U'? C_MOVED was
+ # added by the merge and there is only a single editor drive, so
+ # how can any prop changes be merged to it? The answer is that
+ # the merge code does some quiet housekeeping, merging C_MOVED's
+ # inherited mergeinfo into its incoming mergeinfo, see
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=4309
+ # This test is not covering issue #4309 so we let the current
+ # behavior pass.
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' G'),
+ 'C_MOVED' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ })
+ expected_A_COPY_2_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=10),
+ 'B' : Item(status=' ', wc_rev=10),
+ 'mu' : Item(status=' ', wc_rev=10),
+ 'B/E' : Item(status=' ', wc_rev=10),
+ 'B/E/alpha' : Item(status=' ', wc_rev=10),
+ 'B/E/beta' : Item(status='M ', wc_rev=10),
+ 'B/lambda' : Item(status=' ', wc_rev=10),
+ 'B/F' : Item(status=' ', wc_rev=10),
+ 'C' : Item(status='D ', wc_rev=10),
+ 'C_MOVED' : Item(status='A ', wc_rev='-', copied='+'),
+ 'D' : Item(status=' ', wc_rev=10),
+ 'D/G' : Item(status=' ', wc_rev=10),
+ 'D/G/pi' : Item(status=' ', wc_rev=10),
+ 'D/G/rho' : Item(status='M ', wc_rev=10),
+ 'D/G/tau' : Item(status=' ', wc_rev=10),
+ 'D/gamma' : Item(status=' ', wc_rev=10),
+ 'D/H' : Item(status=' ', wc_rev=10),
+ 'D/H/chi' : Item(status=' ', wc_rev=10),
+ 'D/H/psi' : Item(status='M ', wc_rev=10),
+ 'D/H/omega' : Item(status='M ', wc_rev=10),
+ })
+ expected_A_COPY_2_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-10\n/A_COPY:8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ # What's up with the mergeinfo
+ 'C_MOVED' : Item(props={SVN_PROP_MERGEINFO : '/A/C_MOVED:10\n' +
+ '/A_COPY/C:8\n' +
+ '/A_COPY/C_MOVED:8',
+ 'propname' : 'propval'}),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_A_COPY_2_skip = wc.State(A_COPY_2_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_COPY_2_disk,
+ expected_A_COPY_2_status,
+ expected_A_COPY_2_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3324
+# http://subversion.tigris.org/issues/show_bug.cgi?id=3324
+@Issue(3324)
+@SkipUnless(server_has_mergeinfo)
+def merge_range_prior_to_rename_source_existence(sbox):
+ "merge prior to rename src existence still dels src"
+
+ # Replicate a merge bug found while synching up a feature branch on the
+ # Subversion repository with trunk. See r874121 of
+ # http://svn.apache.org/repos/asf/subversion/branches/ignore-mergeinfo, in which
+ # a move was merged to the target, but the delete half of the move
+ # didn't occur.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ nu_path = sbox.ospath('A/D/H/nu')
+ nu_moved_path = sbox.ospath('A/D/H/nu_moved')
+ A_path = sbox.ospath('A')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+ B_COPY_path = sbox.ospath('A_COPY/B')
+ B_COPY_2_path = sbox.ospath('A_COPY_2/B')
+ alpha_COPY_path = sbox.ospath('A_COPY/B/E/alpha')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ gamma_COPY_path = sbox.ospath('A_COPY/D/gamma')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ nu_COPY_path = sbox.ospath('A_COPY/D/H/nu')
+
+ # Setup our basic 'trunk' and 'branch':
+ # r2 - Copy A to A_COPY
+ # r3 - Copy A to A_COPY_2
+ # r4 - Text change to A/D/H/psi
+ # r5 - Text change to A/D/G/rho
+ # r6 - Text change to A/B/E/beta
+ # r7 - Text change to A/D/H/omega
+ wc_disk, wc_status = set_up_branch(sbox, False, 2)
+
+ # r8 - Text change to A/B/E/alpha
+ svntest.main.file_write(alpha_path, "New content")
+ wc_status.tweak('A/B/E/alpha', wc_rev=8)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Text change', wc_dir)
+
+ # r9 - Add the file A/D/H/nu and make another change to A/B/E/alpha.
+ svntest.main.file_write(alpha_path, "Even newer content")
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ expected_output = wc.State(wc_dir,
+ {'A/D/H/nu' : Item(verb='Adding'),
+ 'A/B/E/alpha' : Item(verb='Sending')})
+ wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=9)})
+ wc_status.tweak('A/B/E/alpha', wc_rev=9)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r10 - Merge all available revisions (i.e. -r1:9) from A to A_COPY.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=9)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[2,9]],
+ ['A ' + nu_COPY_path + '\n',
+ 'U ' + alpha_COPY_path + '\n',
+ 'U ' + beta_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', sbox.repo_url + '/A', A_COPY_path)
+ expected_output = wc.State(wc_dir,
+ {'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/D/H/nu' : Item(verb='Adding'),
+ 'A_COPY/B/E/alpha' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ 'A_COPY/D/H/omega' : Item(verb='Sending'),
+ 'A_COPY/D/H/psi' : Item(verb='Sending')})
+ wc_status.tweak('A_COPY',
+ 'A_COPY/B/E/alpha',
+ 'A_COPY/B/E/beta',
+ 'A_COPY/D/G/rho',
+ 'A_COPY/D/H/omega',
+ 'A_COPY/D/H/psi',
+ wc_rev=10)
+ wc_status.add({'A_COPY/D/H/nu' : Item(status=' ', wc_rev=10)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r11 - Reverse merge -r9:1 from A/B to A_COPY/B
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up',
+ wc_dir)
+ wc_status.tweak(wc_rev=10)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[9,2]], ['U ' + alpha_COPY_path + '\n',
+ 'U ' + beta_COPY_path + '\n',
+ ' G ' + B_COPY_path + '\n',]),
+ [], 'merge', sbox.repo_url + '/A/B', B_COPY_path, '-r9:1')
+ expected_output = wc.State(wc_dir,
+ {'A_COPY/B' : Item(verb='Sending'),
+ 'A_COPY/B/E/alpha' : Item(verb='Sending'),
+ 'A_COPY/B/E/beta' : Item(verb='Sending')})
+ wc_status.tweak('A_COPY/B',
+ 'A_COPY/B/E/alpha',
+ 'A_COPY/B/E/beta',
+ wc_rev=11)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # r12 - Move A/D/H/nu to A/D/H/nu_moved
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 12.\n"], [],
+ 'move', sbox.repo_url + '/A/D/H/nu',
+ sbox.repo_url + '/A/D/H/nu_moved',
+ '-m', 'Move nu to nu_moved')
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Updating '%s':\n" % (wc_dir),
+ "D " + nu_path + "\n",
+ "A " + nu_moved_path + "\n",
+ "Updated to revision 12.\n"],
+ )
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'up', wc_dir)
+
+ # Now merge -r7:12 from A to A_COPY.
+ # A_COPY needs only -r10:12, which amounts to the rename of nu.
+ # The subtree A_COPY/B needs the entire range -r7:12 because of
+ # the reverse merge we performed in r11; the only operative change
+ # here is the text mod to alpha made in r9.
+ #
+ # This merge previously failed because the delete half of the A_COPY/D/H/nu
+ # to A_COPY/D/H/nu_moved move was reported in the notifications, but didn't
+ # actually happen.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/alpha' : Item(status='U '),
+ 'D/H/nu' : Item(status='D '),
+ 'D/H/nu_moved' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=12),
+ 'B' : Item(status=' M', wc_rev=12),
+ 'mu' : Item(status=' ', wc_rev=12),
+ 'B/E' : Item(status=' ', wc_rev=12),
+ 'B/E/alpha' : Item(status='M ', wc_rev=12),
+ 'B/E/beta' : Item(status=' ', wc_rev=12),
+ 'B/lambda' : Item(status=' ', wc_rev=12),
+ 'B/F' : Item(status=' ', wc_rev=12),
+ 'C' : Item(status=' ', wc_rev=12),
+ 'D' : Item(status=' ', wc_rev=12),
+ 'D/G' : Item(status=' ', wc_rev=12),
+ 'D/G/pi' : Item(status=' ', wc_rev=12),
+ 'D/G/rho' : Item(status=' ', wc_rev=12),
+ 'D/G/tau' : Item(status=' ', wc_rev=12),
+ 'D/gamma' : Item(status=' ', wc_rev=12),
+ 'D/H' : Item(status=' ', wc_rev=12),
+ 'D/H/nu' : Item(status='D ', wc_rev=12),
+ 'D/H/nu_moved' : Item(status='A ', wc_rev='-', copied='+'),
+ 'D/H/chi' : Item(status=' ', wc_rev=12),
+ 'D/H/psi' : Item(status=' ', wc_rev=12),
+ 'D/H/omega' : Item(status=' ', wc_rev=12),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-12'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B' : Item(props={SVN_PROP_MERGEINFO : '/A/B:8-12'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("Even newer content"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/nu_moved' : Item("This is the file 'nu'.\n"),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, 7, 12,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge -r7:12 from A to A_COPY', wc_dir)
+
+ # Now run a similar scenario as above on the second branch, but with
+ # a reverse merge this time.
+ #
+ # r14 - Merge all available revisions from A/B to A_COPY_B and then merge
+ # -r2:9 from A to A_COPY_2. Among other things, this adds A_COPY_2/D/H/nu
+ # and leaves us with mergeinfo on the A_COPY_2 branch of:
+ #
+ # Properties on 'A_COPY_2':
+ # svn:mergeinfo
+ # /A:3-9
+ # Properties on 'A_COPY_2\B':
+ # svn:mergeinfo
+ # /A/B:3-13
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(13), [], 'up',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this
+ # type of merge to death elsewhere.
+ [], 'merge', sbox.repo_url + '/A/B',
+ B_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None,[], 'merge', '-r', '2:9',
+ sbox.repo_url + '/A', A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(
+ None, [], 'ci', '-m',
+ 'Merge all from A/B to A_COPY_2/B\nMerge -r2:9 from A to A_COPY_2',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up',
+ wc_dir)
+
+ # Now reverse merge -r13:7 from A to A_COPY_2.
+ #
+ # Recall:
+ #
+ # >svn log -r8:13 ^/A -v
+ # ------------------------------------------------------------------------
+ # r8 | jrandom | 2010-10-14 11:25:59 -0400 (Thu, 14 Oct 2010) | 1 line
+ # Changed paths:
+ # M /A/B/E/alpha
+ #
+ # Text change
+ # ------------------------------------------------------------------------
+ # r9 | jrandom | 2010-10-14 11:25:59 -0400 (Thu, 14 Oct 2010) | 1 line
+ # Changed paths:
+ # M /A/B/E/alpha
+ # A /A/D/H/nu
+ #
+ # log msg
+ # ------------------------------------------------------------------------
+ # r12 | jrandom | 2010-10-14 11:26:01 -0400 (Thu, 14 Oct 2010) | 1 line
+ # Changed paths:
+ # D /A/D/H/nu
+ # A /A/D/H/nu_moved (from /A/D/H/nu:11)
+ #
+ # Move nu to nu_moved
+ # ------------------------------------------------------------------------
+ #
+ # We can only reverse merge changes from the explicit mergeinfo or
+ # natural history of a target, but since all of these changes intersect with
+ # the target's explicit mergeinfo (including subtrees), all should be
+ # reverse merged, including the deletion of A_COPY/D/H/nu. Like the forward
+ # merge performed earlier, this test previously failed when A_COPY/D/H/nu
+ # was reported as deleted, but still remained as a versioned item in the WC.
+ expected_output = wc.State(A_COPY_2_path, {
+ 'B/E/alpha' : Item(status='U '),
+ 'D/H/nu' : Item(status='D '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ 'B' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' M'),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status='M '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/nu' : Item(status='D '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=14)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-7'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B' : Item(),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, 13, 7,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+def set_up_natural_history_gap(sbox):
+ '''Starting with standard greek tree, do the following:
+ r2 - A/D/H/psi
+ r3 - A/D/G/rho
+ r4 - A/B/E/beta
+ r5 - A/D/H/omega
+ r6 - Delete A
+ r7 - "Resurrect" A, by copying A@2 to A
+ r8 - Copy A to A_COPY
+ r9 - Text mod to A/D/gamma
+ Lastly it updates the WC to r9.
+ All text mods set file contents to "New content".
+ Return (expected_disk, expected_status).'''
+
+ # r1: Create a standard greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2-5: Make some changes under 'A' (no branches yet).
+ wc_disk, wc_status = set_up_branch(sbox, False, 0)
+
+ # Some paths we'll care about.
+ A_COPY_path = sbox.ospath('A_COPY')
+ gamma_path = sbox.ospath('A/D/gamma')
+
+ # r6: Delete 'A'
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ ["Committing transaction...\n",
+ "Committed revision 6.\n"], [],
+ 'delete', sbox.repo_url + '/A', '-m', 'Delete A')
+
+ # r7: Resurrect 'A' by copying 'A@2' to 'A'.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ ["Committing transaction...\n",
+ "Committed revision 7.\n"], [],
+ 'copy', sbox.repo_url + '/A@2', sbox.repo_url + '/A',
+ '-m', 'Resurrect A from A@2')
+
+ # r8: Branch the resurrected 'A' to 'A_COPY'.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ ["Committing transaction...\n",
+ "Committed revision 8.\n"], [],
+ 'copy', sbox.repo_url + '/A', sbox.repo_url + '/A_COPY',
+ '-m', 'Copy A to A_COPY')
+
+ # Update to bring all the repos side changes down.
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+ wc_status.add({
+ "A_COPY/B" : Item(status=' '),
+ "A_COPY/B/lambda" : Item(status=' '),
+ "A_COPY/B/E" : Item(status=' '),
+ "A_COPY/B/E/alpha" : Item(status=' '),
+ "A_COPY/B/E/beta" : Item(status=' '),
+ "A_COPY/B/F" : Item(status=' '),
+ "A_COPY/mu" : Item(status=' '),
+ "A_COPY/C" : Item(status=' '),
+ "A_COPY/D" : Item(status=' '),
+ "A_COPY/D/gamma" : Item(status=' '),
+ "A_COPY/D/G" : Item(status=' '),
+ "A_COPY/D/G/pi" : Item(status=' '),
+ "A_COPY/D/G/rho" : Item(status=' '),
+ "A_COPY/D/G/tau" : Item(status=' '),
+ "A_COPY/D/H" : Item(status=' '),
+ "A_COPY/D/H/chi" : Item(status=' '),
+ "A_COPY/D/H/omega" : Item(status=' '),
+ "A_COPY/D/H/psi" : Item(status=' '),
+ "A_COPY" : Item(status=' ')})
+ wc_status.tweak(wc_rev=8)
+
+ # r9: Make a text change to 'A/D/gamma'.
+ svntest.main.file_write(gamma_path, "New content")
+ expected_output = wc.State(wc_dir, {'A/D/gamma' : Item(verb='Sending')})
+ wc_status.tweak('A/D/gamma', wc_rev=9)
+
+ # Update the WC to a uniform revision.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [],
+ 'up', wc_dir)
+ return wc_disk, wc_status
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def dont_merge_gaps_in_history(sbox):
+ "mergeinfo aware merges ignore natural history gaps"
+
+ ## See http://svn.haxx.se/dev/archive-2008-11/0618.shtml ##
+
+ wc_dir = sbox.wc_dir
+
+ # Create a branch with gaps in its natural history.
+ set_up_natural_history_gap(sbox)
+
+ # Some paths we'll care about.
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # Now merge all available changes from 'A' to 'A_COPY'. The only
+ # available revisions are r8 and r9. Only r9 effects the source/target
+ # so this merge should change 'A/D/gamma' from r9. The fact that 'A_COPY'
+ # has 'broken' natural history, i.e.
+ #
+ # /A:2,7 <-- Recall 'A@7' was copied from 'A@2'.
+ # /A_COPY:8-9
+ #
+ # should have no impact, but currently this fact is causing a failure:
+ #
+ # >svn merge %url127%/A merge_tests-127\A_COPY
+ # ..\..\..\subversion\libsvn_repos\reporter.c:1162: (apr_err=160005)
+ # svn: Target path '/A' does not exist.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/gamma' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status='M '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=9)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:8-9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("New content"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3432 'Merge can record mergeinfo from natural history
+# gaps'. See http://subversion.tigris.org/issues/show_bug.cgi?id=3432
+@Issue(3432)
+@SkipUnless(server_has_mergeinfo)
+def handle_gaps_in_implicit_mergeinfo(sbox):
+ "correctly consider natural history gaps"
+
+ wc_dir = sbox.wc_dir
+
+ # Create a branch with gaps in its natural history.
+ #
+ # r1--------r2--------r3--------r4--------r5--------r6
+ # Add 'A' edit edit edit edit Delete A
+ # psi rho beta omega
+ # |
+ # V
+ # r7--------r9----------------->
+ # Rez 'A' edit
+ # | gamma
+ # |
+ # V
+ # r8--------------------------->
+ # Copy 'A@7' to
+ # 'A_COPY'
+ #
+ expected_disk, expected_status = set_up_natural_history_gap(sbox)
+
+ # Some paths we'll care about.
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # Merge r4 to 'A_COPY' from A@4, which is *not* part of A_COPY's history.
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=9)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:4'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"), # From the merge of A@4
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"), # From A@2
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, 3, 4,
+ sbox.repo_url + '/A@4', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Now reverse merge -r9:2 from 'A@HEAD' to 'A_COPY'. This should be
+ # a no-op since the only operative change made on 'A@HEAD' between r2:9
+ # is the text mod to 'A/D/gamma' made in r9, but since that was after
+ # 'A_COPY' was copied from 'A 'and that change was never merged, we don't
+ # try to reverse merge it.
+ #
+ # Also, the mergeinfo recorded by the previous merge, i.e. '/A:4', should
+ # *not* be removed! A@4 is not on the same line of history as 'A@9'.
+ expected_output = wc.State(A_COPY_path, {})
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_path, 9, 2,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Now merge all available revisions from 'A' to 'A_COPY'.
+ # The mergeinfo '/A:4' on 'A_COPY' should have no impact on this merge
+ # since it refers to another line of history. Since 'A_COPY' was copied
+ # from 'A@7' the only available revisions are r8 and r9.
+ expected_output = wc.State(A_COPY_path, {
+ 'D/gamma' : Item(status='U '),
+ })
+ expected_status.tweak('D/gamma', status='M ')
+ expected_disk.tweak('D/gamma', contents='New content')
+ expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A:4,8-9'})
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3323 'Mergeinfo deleted by a merge should disappear'
+@Issue(3323)
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_deleted_by_a_merge_should_disappear(sbox):
+ "mergeinfo deleted by a merge should disappear"
+
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ D_COPY_path = sbox.ospath('A_COPY/D')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A_COPY_2_path = sbox.ospath('A_COPY_2')
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # r7: Merge all available revisions from A/D to A_COPY/D, this creates
+ # mergeinfo on A_COPY/D.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this
+ # type of merge to death elsewhere.
+ [], 'merge', sbox.repo_url + '/A/D',
+ D_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ None, [], 'ci', '-m',
+ 'Merge all available revisions from A/D to A_COPY/D', wc_dir)
+
+ # r8: Copy A_COPY to A_COPY_2, this carries the mergeinf on A_COPY/D
+ # to A_COPY_2/D.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None,[],
+ 'copy', A_COPY_path, A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Copy A_COPY to A_COPY_2', wc_dir)
+
+ # r9: Propdel the mergeinfo on A_COPY/D.
+ svntest.actions.run_and_verify_svn(None,[],
+ 'pd', SVN_PROP_MERGEINFO, D_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Propdel the mergeinfo on A_COPY/D',
+ wc_dir)
+
+ # r10: Merge r5 from A to A_COPY_2 so the latter gets some explicit
+ # mergeinfo.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5',
+ sbox.repo_url + '/A', A_COPY_2_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r5 from A to A_COPY_2', wc_dir)
+
+ # Now merge r9 from A_COPY to A_COPY_2. Since the merge itself cleanly
+ # removes all explicit mergeinfo from A_COPY_2/D, we should not set any
+ # mergeinfo on that subtree describing the merge.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY_2_path, {
+ 'D' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_2_path, {
+ })
+ expected_status = wc.State(A_COPY_2_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' M'),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=10)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:5\n/A_COPY:9'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_2_path, '8', '9',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# File merge optimization caused segfault during noop file merge
+# when multiple ranges are eligible for merge, see
+# http://svn.haxx.se/dev/archive-2009-05/0363.shtml
+@SkipUnless(server_has_mergeinfo)
+def noop_file_merge(sbox):
+ "noop file merge does not segfault"
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ chi_COPY_path = sbox.ospath('A_COPY/D/H/chi')
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Merge r5 from A to A_COPY and commit as r7. This will split the
+ # eligible ranges to be merged to A_COPY/D/H/chi into two discrete
+ # sets: r1-4 and r5-HEAD
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + beta_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', '-c5', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit', '-m',
+ 'Merge r5 from A to A_COPY',
+ wc_dir)
+
+ # Update working copy to allow full inheritance and elision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+
+ # Merge all available revisions from A/D/H/chi to A_COPY/D/H/chi.
+ # There are no operative changes in the source, so this should
+ # not produce any output other than mergeinfo updates on
+ # A_COPY/D/H/chi. This is where the segfault occurred.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D/H/chi',
+ chi_COPY_path)
+ svntest.actions.run_and_verify_svn([' M ' + chi_COPY_path + '\n'],
+ [], 'st', chi_COPY_path)
+ svntest.actions.run_and_verify_svn(['/A/D/H/chi:2-7\n'],
+ [], 'pg', SVN_PROP_MERGEINFO,
+ chi_COPY_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2690)
+def copy_then_replace_via_merge(sbox):
+ "copy then replace via merge"
+ # Testing issue #2690 with deleted/added/replaced files and subdirs.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ j = os.path.join
+
+ A = j(wc_dir, 'A')
+ AJ = j(wc_dir, 'A', 'J')
+ AJK = j(AJ, 'K')
+ AJL = j(AJ, 'L')
+ AJM = j(AJ, 'M')
+ AJ_sigma = j(AJ, 'sigma')
+ AJ_theta = j(AJ, 'theta')
+ AJ_omega = j(AJ, 'omega')
+ AJK_zeta = j(AJK, 'zeta')
+ AJL_zeta = j(AJL, 'zeta')
+ AJM_zeta = j(AJM, 'zeta')
+ branch = j(wc_dir, 'branch')
+ branch_J = j(wc_dir, 'branch', 'J')
+ url_A = sbox.repo_url + '/A'
+ url_branch = sbox.repo_url + '/branch'
+
+ # Create a branch.
+ main.run_svn(None, 'cp', url_A, url_branch, '-m', 'create branch') # r2
+
+ # Create a tree J in A.
+ os.makedirs(AJK)
+ os.makedirs(AJL)
+ main.file_append(AJ_sigma, 'new text')
+ main.file_append(AJ_theta, 'new text')
+ main.file_append(AJK_zeta, 'new text')
+ main.file_append(AJL_zeta, 'new text')
+ main.run_svn(None, 'add', AJ)
+ sbox.simple_commit(message='create tree J') # r3
+ main.run_svn(None, 'up', wc_dir)
+
+ # Copy J to the branch via merge
+ main.run_svn(None, 'merge', url_A, branch)
+ sbox.simple_commit(message='merge to branch') # r4
+ main.run_svn(None, 'up', wc_dir)
+
+ # In A, replace J with a slightly different tree
+ main.run_svn(None, 'rm', AJ)
+ sbox.simple_commit(message='rm AJ') # r5
+ main.run_svn(None, 'up', wc_dir)
+
+ os.makedirs(AJL)
+ os.makedirs(AJM)
+ main.file_append(AJ_theta, 'really new text')
+ main.file_append(AJ_omega, 'really new text')
+ main.file_append(AJL_zeta, 'really new text')
+ main.file_append(AJM_zeta, 'really new text')
+ main.run_svn(None, 'add', AJ)
+ sbox.simple_commit(message='create tree J again') # r6
+ main.run_svn(None, 'up', wc_dir)
+
+ # Run merge to replace /branch/J in one swell foop.
+ main.run_svn(None, 'merge', url_A, branch)
+
+ # Check status:
+ # sigma and K are deleted (not copied!)
+ # theta and L are replaced (deleted then copied-here)
+ # omega and M are copied-here
+ expected_status = wc.State(branch_J, {
+ '' : Item(status='R ', copied='+', wc_rev='-'),
+ 'sigma' : Item(status='D ', wc_rev=6),
+ 'K' : Item(status='D ', wc_rev=6),
+ 'K/zeta' : Item(status='D ', wc_rev=6),
+ 'theta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'L' : Item(status=' ', copied='+', wc_rev='-'),
+ 'L/zeta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'M' : Item(status=' ', copied='+', wc_rev='-'),
+ 'M/zeta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ actions.run_and_verify_status(branch_J, expected_status)
+
+ # Update and commit, just to make sure the WC isn't busted.
+ main.run_svn(None, 'up', branch_J)
+ expected_output = wc.State(branch_J, {
+ '' : Item(verb='Replacing'),
+ })
+ expected_status = wc.State(branch_J, {
+ '' : Item(status=' ', wc_rev=7),
+ 'theta' : Item(status=' ', wc_rev=7),
+ 'L' : Item(status=' ', wc_rev=7),
+ 'L/zeta' : Item(status=' ', wc_rev=7),
+ 'omega' : Item(status=' ', wc_rev=7),
+ 'M' : Item(status=' ', wc_rev=7),
+ 'M/zeta' : Item(status=' ', wc_rev=7),
+ })
+ actions.run_and_verify_commit(branch_J,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def record_only_merge(sbox):
+ "record only merge applies mergeinfo diffs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ nu_path = sbox.ospath('A/C/nu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ A2_path = sbox.ospath('A2')
+ Z_path = sbox.ospath('A/B/Z')
+ Z_COPY_path = sbox.ospath('A_COPY/B/Z')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+ nu_COPY_path = sbox.ospath('A_COPY/C/nu')
+
+ # r7 - Copy the branch A_COPY@2 to A2 and update the WC.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A_COPY_path, A2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'Branch the branch',
+ wc_dir)
+ # r8 - Add A/C/nu and A/B/Z.
+ # Add a new file with mergeinfo in the foreign repos.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', Z_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'Add subtrees',
+ wc_dir)
+
+ # r9 - Edit A/C/nu and add a random property on A/B/Z.
+ svntest.main.file_write(nu_path, "New content.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'propname', 'propval', Z_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'Subtree changes',
+ wc_dir)
+
+ # r10 - Merge r8 from A to A_COPY.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(expected_merge_output(
+ [[8]],
+ ['A ' + Z_COPY_path + '\n',
+ 'A ' + nu_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', '-c8',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'Root merge of r8',
+ wc_dir)
+
+ # r11 - Do several subtree merges:
+ #
+ # r4 from A/D/G/rho to A_COPY/D/G/rho
+ # r6 from A/D/H to A_COPY/D/H
+ # r9 from A/C/nu to A_COPY/C/nu
+ # r9 from A/B/Z to A_COPY/B/Z
+ svntest.actions.run_and_verify_svn(expected_merge_output(
+ [[4]],
+ ['U ' + rho_COPY_path + '\n',
+ ' U ' + rho_COPY_path + '\n',]),
+ [], 'merge', '-c4',
+ sbox.repo_url + '/A/D/G/rho',
+ rho_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[6]],
+ ['U ' + omega_COPY_path + '\n',
+ ' U ' + H_COPY_path + '\n',]),
+ [], 'merge', '-c6', sbox.repo_url + '/A/D/H', H_COPY_path)
+ svntest.actions.run_and_verify_svn(expected_merge_output(
+ [[9]],
+ ['U ' + nu_COPY_path + '\n',
+ ' G ' + nu_COPY_path + '\n',]),
+ [], 'merge', '-c9',
+ sbox.repo_url + '/A/C/nu',
+ nu_COPY_path)
+ svntest.actions.run_and_verify_svn(expected_merge_output(
+ [[9]],
+ [' U ' + Z_COPY_path + '\n',
+ ' G ' + Z_COPY_path + '\n']),
+ [], 'merge', '-c9',
+ sbox.repo_url + '/A/B/Z',
+ Z_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'Several subtree merges',
+ wc_dir)
+
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up',
+ wc_dir)
+
+ # Now do a --record-only merge of r10 and r11 from A_COPY to A2.
+ #
+ # We only expect svn:mergeinfo changes to be applied to existing paths:
+ #
+ # From r10 the mergeinfo '/A:r8' is recorded on A_COPY.
+ #
+ # From r11 the mergeinfo of '/A/D/G/rho:r4' is recorded on A_COPY/D/G/rho
+ # and the mergeinfo of '/A/D/H:r6' is recorded on A_COPY/D/H. Rev 8 should
+ # also be recorded on both subtrees because explicit mergeinfo must be
+ # complete.
+ #
+ # The mergeinfo describing the merge source itself, '/A_COPY:10-11' should
+ # also be recorded on the root and the two subtrees.
+ #
+ # The mergeinfo changes from r10 to A_COPY/C/nu and A_COPY/B/Z cannot be
+ # applied because the corresponding paths don't exist under A2; this should
+ # not cause any problems.
+ expected_output = wc.State(A2_path, {
+ '' : Item(status=' U'),
+ 'D/G/rho' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A2_path, {
+ '' : Item(status=' G'),
+ 'D/H' : Item(status=' G'),
+ 'D/G/rho' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A2_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:8\n/A_COPY:10-11'}),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B' : Item(),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO :
+ '/A/D/H:6,8\n/A_COPY/D/H:10-11'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n",
+ props={SVN_PROP_MERGEINFO :
+ '/A/D/G/rho:4,8\n/A_COPY/D/G/rho:10-11'}),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_status = wc.State(A2_path, {
+ '' : Item(status=' M'),
+ 'mu' : Item(status=' '),
+ 'B' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' M'),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' M'),
+ 'D/G/tau' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=11)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(A2_path, '9', '11',
+ sbox.repo_url + '/A_COPY', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--record-only', A2_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3514 'svn merge --accept [ base | theirs-full ]
+# doesn't work'
+@Issue(3514)
+def merge_automatic_conflict_resolution(sbox):
+ "automatic conflict resolutions work with merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+
+ # r7 - Make a change on A_COPY that will conflict with r3 on A
+ svntest.main.file_write(psi_COPY_path, "BASE.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'log msg', wc_dir)
+
+ # Set up our base expectations, we'll tweak accordingly for each option.
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=7),
+ 'D/H/omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+
+ # Test --accept postpone
+ expected_output = wc.State(A_COPY_path, {'D/H/psi' : Item(status='C ')})
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_disk.tweak('D/H/psi', contents="<<<<<<< .working\n"
+ "BASE.\n"
+ "||||||| .merge-left.r2\n"
+ "This is the file 'psi'.\n"
+ "=======\n"
+ "New content>>>>>>> .merge-right.r3\n")
+ expected_status.tweak('D/H/psi', status='C ')
+ psi_conflict_support_files = ["psi\.working",
+ "psi\.merge-right\.r3",
+ "psi\.merge-left\.r2"]
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--accept', 'postpone',
+ '--allow-mixed-revisions',
+ A_COPY_path,
+ extra_files=
+ list(psi_conflict_support_files))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+
+ # Test --accept mine-conflict and mine-full
+ ### TODO: Also test that the output has a 'Resolved' line for this path.
+ expected_output = wc.State(A_COPY_path, {'D/H/psi' : Item(status='C ')})
+ expected_disk.tweak('D/H/psi', contents="BASE.\n")
+ expected_status.tweak('D/H/psi', status=' ')
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--accept', 'mine-conflict',
+ '--allow-mixed-revisions',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--accept', 'mine-full',
+ '--allow-mixed-revisions',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+
+ # Test --accept theirs-conflict and theirs-full
+ ### TODO: Also test that the output has a 'Resolved' line for this path.
+ expected_output = wc.State(A_COPY_path, {'D/H/psi' : Item(status='C ')})
+ expected_disk.tweak('D/H/psi', contents="New content")
+ expected_status.tweak('D/H/psi', status='M ')
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--accept', 'theirs-conflict',
+ '--allow-mixed-revisions',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--accept', 'theirs-full',
+ '--allow-mixed-revisions',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ # Test --accept base
+ ### TODO: Also test that the output has a 'Resolved' line for this path.
+ expected_output = wc.State(A_COPY_path, {'D/H/psi' : Item(status='C ')})
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_disk.tweak('D/H/psi', contents="This is the file 'psi'.\n")
+ expected_status.tweak('D/H/psi', status='M ')
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--accept', 'base',
+ '--allow-mixed-revisions',
+ A_COPY_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3440 'Skipped paths get incorrect override mergeinfo
+# during merge'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3440)
+def skipped_files_get_correct_mergeinfo(sbox):
+ "skipped files get correct mergeinfo set"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ psi_path = sbox.ospath('A/D/H/psi')
+
+ # Setup our basic 'trunk' and 'branch':
+ # r2 - Copy A to A_COPY
+ # r3 - Text change to A/D/H/psi
+ # r4 - Text change to A/D/G/rho
+ # r5 - Text change to A/B/E/beta
+ # r6 - Text change to A/D/H/omega
+ wc_disk, wc_status = set_up_branch(sbox, False, 1)
+
+ # r7 Make another text change to A/D/H/psi
+ svntest.main.file_write(psi_path, "Even newer content")
+ expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')})
+ svntest.main.run_svn(None, 'commit', '-m', 'another change to A/D/H/psi',
+ wc_dir)
+
+ # Merge r3 from A to A_COPY, this will create explicit mergeinfo of
+ # '/A:3' on A_COPY. Commit this merge as r8.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3]],
+ ['U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', '-c3', sbox.repo_url + '/A', A_COPY_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'initial merge', wc_dir)
+
+ # Update WC to uniform revision and then set the depth on A_COPY/D/H to
+ # empty. Then merge all available revisions from A to A_COPY.
+ # A_COPY/D/H/psi and A_COPY/D/H/omega are not present due to their
+ # parent's depth and should be reported as skipped. A_COPY/D/H should
+ # get explicit mergeinfo set on it reflecting what it previously inherited
+ # from A_COPY after the first merge, i.e. '/A/D/H:3', plus non-inheritable
+ # mergeinfo describing what was done during this merge,
+ # i.e. '/A/D/H:2*,4-8*'.
+ #
+ # Issue #3440 occurred when empty mergeinfo was set on A_COPY/D/H, making
+ # it appear that r3 was never merged.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [],
+ 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth=empty', H_COPY_path)
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status='M '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' M'),
+ })
+ expected_status.tweak(wc_rev=8)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2*,3,4-8*'}),
+ })
+ expected_skip = wc.State(
+ A_COPY_path,
+ {'D/H/psi' : Item(verb='Skipped missing target'),
+ 'D/H/omega' : Item(verb='Skipped missing target')})
+ expected_output = wc.State(A_COPY_path,
+ {'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U ')})
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H' : Item(status=' G'), # ' G' because override mergeinfo gets set
+ # on this, the root of a 'missing' subtree.
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+# Test for issue #3115 'Case only renames resulting from merges don't
+# work or break the WC on case-insensitive file systems'.
+@Issue(3115)
+def committed_case_only_move_and_revert(sbox):
+ "committed case only move causes revert to fail"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # r3: A case-only file rename on the server
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'],
+ [], 'move',
+ sbox.repo_url + '/A/mu',
+ sbox.repo_url + '/A/MU',
+ '-m', 'Move A/mu to A/MU')
+
+ # Now merge that rename into the WC
+ expected_output = wc.State(A_COPY_path, {
+ 'mu' : Item(status='D '),
+ 'MU' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'mu' : Item(status='D ', wc_rev=2),
+ 'MU' : Item(status='A ', wc_rev='-', copied='+'),
+ 'B/E' : Item(status=' ', wc_rev=2),
+ 'B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'B/E/beta' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=2),
+ 'D/H/omega' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3'}),
+ 'B' : Item(),
+ 'MU' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+ # Commit the merge
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/mu' : Item(verb='Deleting'),
+ 'A_COPY/MU' : Item(verb='Adding'),
+ })
+ wc_status.tweak('A_COPY', wc_rev=4)
+ wc_status.remove('A_COPY/mu')
+ wc_status.add({'A_COPY/MU': Item(status=' ', wc_rev=4)})
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status)
+
+ # In issue #3115 the WC gets corrupted and any subsequent revert
+ # attempts fail with this error:
+ # svn.exe revert -R "svn-test-work\working_copies\merge_tests-139"
+ # ..\..\..\subversion\svn\revert-cmd.c:81: (apr_err=2)
+ # ..\..\..\subversion\libsvn_client\revert.c:167: (apr_err=2)
+ # ..\..\..\subversion\libsvn_client\revert.c:103: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:2232: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:2232: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:2232: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:2176: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:2053: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:1869: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\workqueue.c:520: (apr_err=2)
+ # ..\..\..\subversion\libsvn_wc\workqueue.c:490: (apr_err=2)
+ # svn: Error restoring text for 'C:\SVN\src-trunk\Debug\subversion\tests
+ # \cmdline\svn-test-work\working_copies\merge_tests-139\A_COPY\MU'
+ svntest.actions.run_and_verify_svn([], [], 'revert', '-R', wc_dir)
+
+ # r5: A case-only directory rename on the server
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 5.\n'],
+ [], 'move',
+ sbox.repo_url + '/A/C',
+ sbox.repo_url + '/A/c',
+ '-m', 'Move A/C to A/c')
+ expected_output = wc.State(A_COPY_path, {
+ 'C' : Item(status='D '),
+ 'c' : Item(status='A '),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_disk.tweak('', props={SVN_PROP_MERGEINFO : '/A:3,5'})
+ expected_disk.add({'c' : Item()})
+ expected_disk.remove('C')
+ expected_status.tweak('MU', status=' ', wc_rev=4, copied=None)
+ expected_status.remove('mu')
+ expected_status.tweak('C', status='D ')
+ expected_status.tweak('', wc_rev=4)
+ expected_status.add({'c' : Item(status='A ', copied='+', wc_rev='-')})
+ # This merge succeeds. It used to leave a strange state, added with
+ # history but missing:
+ #
+ # M merge_tests-139\A_COPY
+ # ! + merge_tests-139\A_COPY\c
+ # R + merge_tests-139\A_COPY\C
+ svntest.actions.run_and_verify_merge(A_COPY_path, '4', '5',
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--allow-mixed-revisions', A_COPY_path)
+
+#----------------------------------------------------------------------
+# This is a test for issue #3221 'Unable to merge into working copy of
+# deleted branch'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3221)
+def merge_into_wc_for_deleted_branch(sbox):
+ "merge into WC of deleted branch should work"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Copy 'A' to 'A_COPY' then make some changes under 'A'
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ gamma_path = sbox.ospath('A/D/gamma')
+
+ # r7 - Delete the branch on the repository, obviously it still
+ # exists in our WC.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete', sbox.repo_url + '/A_COPY',
+ '-m', 'Delete A_COPY directly in repos')
+
+ # r8 - Make another change under 'A'.
+ svntest.main.file_write(gamma_path, "Content added after A_COPY deleted")
+ expected_output = wc.State(wc_dir, {'A/D/gamma' : Item(verb='Sending')})
+ svntest.main.run_svn(None, 'commit',
+ '-m', 'Change made on A after A_COPY was deleted',
+ wc_dir)
+
+ # Now merge all available revisions from A to A_COPY:
+ expected_output = wc.State(A_COPY_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'D/gamma' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status='M '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status='M '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status='M '),
+ 'D/H/omega' : Item(status='M '),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("Content added after A_COPY deleted"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ # Issue #3221: Previously this merge failed with:
+ # ..\..\..\subversion\svn\util.c:900: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:9383: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:8029: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:7577: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:4132: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\merge.c:3312: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_client\ra.c:659: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\rev_hunt.c:696: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_repos\rev_hunt.c:539: (apr_err=160013)
+ # ..\..\..\subversion\libsvn_fs_fs\tree.c:2818: (apr_err=160013)
+ # svn: File not found: revision 8, path '/A_COPY'
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+def foreign_repos_del_and_props(sbox):
+ "merge del and ps variants from a foreign repos"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc2_dir = sbox.add_wc_path('wc2')
+
+ (r2_path, r2_url) = sbox.add_repo_path('fgn')
+ svntest.main.create_repos(r2_path)
+
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ r2_url, wc2_dir)
+
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:eol-style', 'native',
+ sbox.ospath('iota'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ sbox.ospath('A/D'),
+ sbox.ospath('D'))
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ sbox.ospath('A/D'),
+ sbox.ospath('D/G'))
+
+ new_file = sbox.ospath('new-file')
+ svntest.main.file_write(new_file, 'new-file')
+ svntest.actions.run_and_verify_svn(None, [], 'add', new_file)
+
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:eol-style', 'native', new_file)
+
+ svntest.actions.run_and_verify_svn(None, [], 'commit', wc_dir,
+ '-m', 'changed')
+
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url, wc2_dir,
+ '-r', '0:1')
+
+ expected_status = svntest.actions.get_virginal_state(wc2_dir, 0)
+ expected_status.tweak(status='A ')
+ expected_status.add(
+ {
+ '' : Item(status=' ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status)
+
+ expected_status = svntest.actions.get_virginal_state(wc2_dir, 1)
+
+ svntest.actions.run_and_verify_svn(None, [], 'commit', wc2_dir,
+ '-m', 'Merged r1')
+
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url, wc2_dir,
+ '-r', '1:2', '--allow-mixed-revisions')
+
+ expected_status.tweak('A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/tau', 'A/D/G/pi',
+ 'A/D/gamma', 'A/D/H', 'A/D/H/psi', 'A/D/H/omega',
+ 'A/D/H/chi', status='D ')
+ expected_status.tweak(wc_rev='1')
+ expected_status.tweak('', wc_rev='0')
+ expected_status.tweak('iota', status=' M')
+
+ expected_status.add(
+ {
+ 'new-file' : Item(status='A ', wc_rev='0'),
+ 'D' : Item(status='A ', wc_rev='0'),
+ 'D/H' : Item(status='A ', wc_rev='0'),
+ 'D/H/omega' : Item(status='A ', wc_rev='0'),
+ 'D/H/psi' : Item(status='A ', wc_rev='0'),
+ 'D/H/chi' : Item(status='A ', wc_rev='0'),
+ 'D/gamma' : Item(status='A ', wc_rev='0'),
+ })
+
+ svntest.actions.run_and_verify_status(wc2_dir, expected_status)
+
+ expected_output = ["Properties on '%s':\n" % (os.path.join(wc2_dir, 'iota')),
+ " svn:eol-style\n",
+ "Properties on '%s':\n" % (os.path.join(wc2_dir, 'new-file')),
+ " svn:eol-style\n" ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'proplist',
+ os.path.join(wc2_dir, 'iota'),
+ os.path.join(wc2_dir, 'new-file'))
+
+#----------------------------------------------------------------------
+# Test for issue #3642 'immediate depth merges don't create proper subtree
+# mergeinfo'. See http://subversion.tigris.org/issues/show_bug.cgi?id=3642
+@Issue(3642)
+def immediate_depth_merge_creates_minimal_subtree_mergeinfo(sbox):
+ "no spurious mergeinfo from immediate depth merges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ B_path = sbox.ospath('A/B')
+ B_COPY_path = sbox.ospath('A_COPY/B')
+
+
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Merge -c5 from A/B to A_COPY/B at --depth immediates.
+ # This should create only the minimum subtree mergeinfo
+ # required to describe the merge. This means that A_COPY/B/E gets
+ # non-inheritable mergeinfo for r5, because a full depth merge would
+ # affect that subtree. The other child of the merge target, A_COPY/B/F
+ # would never be affected by r5, so it doesn't need any explicit
+ # mergeinfo.
+ expected_output = wc.State(B_COPY_path, {})
+ expected_mergeinfo_output = wc.State(B_COPY_path, {
+ '' : Item(status=' U'),
+ 'E' : Item(status=' U'), # A_COPY/B/E would be affected by r5 if the
+ # merge was at infinite depth, so it needs
+ # non-inheritable override mergeinfo.
+ #'F' : Item(status=' U'), No override mergeinfo, r5 is
+ # inoperative on this child.
+ })
+ expected_elision_output = wc.State(B_COPY_path, {
+ })
+ expected_status = wc.State(B_COPY_path, {
+ '' : Item(status=' M'),
+ 'F' : Item(status=' '),
+ 'E' : Item(status=' M'),
+ 'E/alpha' : Item(status=' '),
+ 'E/beta' : Item(status=' '),
+ 'lambda' : Item(status=' '),
+
+ })
+ expected_status.tweak(wc_rev=6)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:5'}),
+ 'E' : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:5*'}),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n")
+ })
+ expected_skip = wc.State(B_COPY_path, { })
+ svntest.actions.run_and_verify_merge(B_COPY_path, '4', '5',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--depth', 'immediates',
+ B_COPY_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3646 'cyclic --record-only merges create self-referential
+# mergeinfo'
+@SkipUnless(server_has_mergeinfo)
+@Issue(3646)
+def record_only_merge_creates_self_referential_mergeinfo(sbox):
+ "merge creates self referential mergeinfo"
+
+ # Given a copy of trunk@M to branch, committed in r(M+1), if we
+ # --record-only merge the branch back to trunk with no revisions
+ # specified, then trunk gets self-referential mergeinfo recorded
+ # reflecting its entire natural history.
+
+ # Setup a standard greek tree in r1.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ mu_path = sbox.ospath('A/mu')
+ A_path = sbox.ospath('A')
+ A_branch_path = sbox.ospath('A-branch')
+
+ # Make a change to A/mu in r2.
+ svntest.main.file_write(mu_path, "Trunk edit\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'trunk edit',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ # Copy A to A-branch in r3
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A_path, A_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Branch A to A-branch', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Merge A-branch back to A. This should record the mergeinfo '/A-branch:3'
+ # on A.
+ expected_output = wc.State(A_path, {})
+ expected_mergeinfo_output = wc.State(A_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_path, {})
+ expected_A_status = wc.State(A_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_A_status.tweak(wc_rev=3)
+ expected_A_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A-branch:3'}),
+ 'B' : Item(),
+ 'mu' : Item("Trunk edit\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_A_skip = wc.State(A_path, {})
+ svntest.actions.run_and_verify_merge(A_path, None, None,
+ sbox.repo_url + '/A-branch', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_A_disk,
+ expected_A_status,
+ expected_A_skip,
+ [], True, True,
+ '--record-only', A_path)
+
+#----------------------------------------------------------------------
+# Test for issue #3657 'dav update report handler in skelta mode can cause
+# spurious conflicts'.
+@Issue(3657)
+def dav_skelta_mode_causes_spurious_conflicts(sbox):
+ "dav skelta mode can cause spurious conflicts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ mu_path = sbox.ospath('A/mu')
+ A_path = sbox.ospath('A')
+ C_path = sbox.ospath('A/C')
+ A_branch_path = sbox.ospath('A-branch')
+ C_branch_path = sbox.ospath('A-branch/C')
+
+ # r2 - Set some initial properties:
+ #
+ # 'dir-prop'='value1' on A/C.
+ # 'svn:eol-style'='native' on A/mu.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'dir-prop', 'initial-val',
+ C_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:eol-style', 'native',
+ mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'Set some properties',
+ wc_dir)
+
+ # r3 - Branch 'A' to 'A-branch':
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', A_path, A_branch_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'Create a branch of A',
+ wc_dir)
+
+ # r4 - Make a text mod to 'A/mu' and add new props to 'A/mu' and 'A/C':
+ svntest.main.file_write(mu_path, "The new mu!\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'prop-name', 'prop-val', mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'another-dir-prop', 'initial-val',
+ C_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Edit a file and make some prop changes',
+ wc_dir)
+
+ # r5 - Modify the sole property on 'A-branch/C':
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'dir-prop', 'branch-val',
+ C_branch_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'prop mod on branch', wc_dir)
+
+ # Now merge r4 from 'A' to 'A-branch'.
+ #
+ # Previously this failed over ra_neon and ra_serf on Windows:
+ #
+ # >svn merge ^^/A A-branch -c4
+ # Conflict discovered in 'C:/SVN/src-trunk/Debug/subversion/tests/cmdline
+ # /svn-test-work/working_copies/merge_tests-110/A-branch/mu'.
+ # Select: (p) postpone, (df) diff-full, (e) edit,
+ # (mc) mine-conflict, (tc) theirs-conflict,
+ # (s) show all options: p
+ # --- Merging r4 into 'A-branch':
+ # CU A-branch\mu
+ # Conflict for property 'another-dir-prop' discovered on 'C:/SVN/src-trunk
+ # /Debug/subversion/tests/cmdline/svn-test-work/working_copies/
+ # merge_tests-110/A-branch/C'.
+ # Select: (p) postpone,
+ # (mf) mine-full, (tf) theirs-full,
+ # (s) show all options: p
+ # C A-branch\C
+ # --- Recording mergeinfo for merge of r4 into 'A-branch':
+ # U A-branch
+ # Summary of conflicts:
+ # Text conflicts: 1
+ # Property conflicts: 1
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_branch_path, {
+ 'mu' : Item(status='UU'),
+ 'C' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_branch_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_branch_path, {})
+ expected_status = wc.State(A_branch_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status='MM'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' M'),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=5)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO :
+ '/A:4'}),
+ 'B' : Item(),
+ 'mu' : Item("The new mu!\n",
+ props={'prop-name' : 'prop-val',
+ 'svn:eol-style' : 'native'}),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(props={'dir-prop' : 'branch-val',
+ 'another-dir-prop' : 'initial-val'}),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_branch_path, {})
+ svntest.actions.run_and_verify_merge(A_branch_path, 3, 4,
+ sbox.repo_url + '/A',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+
+#----------------------------------------------------------------------
+def merge_into_locally_added_file(sbox):
+ "merge into locally added file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ pi_path = sbox.ospath("A/D/G/pi")
+ new_path = sbox.ospath("A/D/G/new")
+
+ shutil.copy(pi_path, new_path)
+ svntest.main.file_append(pi_path, "foo\n")
+ sbox.simple_commit() # r2
+
+ sbox.simple_add('A/D/G/new')
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/new' : Item(status='G '),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ 'A/D/G/new' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({ 'A/D/G/new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/D/G/pi', wc_rev=2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/pi', contents="This is the file 'pi'.\nfoo\n")
+ expected_disk.add({'A/D/G/new' : Item("This is the file 'pi'.\nfoo\n",
+ props={SVN_PROP_MERGEINFO : '/A/D/G/pi:2'})})
+ expected_skip = wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_merge(wc_dir, '1', '2',
+ sbox.repo_url + '/A/D/G/pi', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ new_path)
+ sbox.simple_commit()
+
+#----------------------------------------------------------------------
+def merge_into_locally_added_directory(sbox):
+ "merge into locally added directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ G_path = sbox.ospath("A/D/G")
+ pi_path = sbox.ospath("A/D/G/pi")
+ new_dir_path = sbox.ospath("A/D/new_dir")
+
+ svntest.main.file_append_binary(pi_path, "foo\n")
+ sbox.simple_commit() # r2
+
+ os.mkdir(new_dir_path)
+ svntest.main.file_append_binary(os.path.join(new_dir_path, 'pi'),
+ "This is the file 'pi'.\n")
+ svntest.main.file_append_binary(os.path.join(new_dir_path, 'rho'),
+ "This is the file 'rho'.\n")
+ svntest.main.file_append_binary(os.path.join(new_dir_path, 'tau'),
+ "This is the file 'tau'.\n")
+ sbox.simple_add('A/D/new_dir')
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/new_dir/pi' : Item(status='G '),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ 'A/D/new_dir' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({ 'A/D/new_dir' : Item(status='A ', wc_rev=0)})
+ expected_status.add({ 'A/D/new_dir/pi' : Item(status='A ', wc_rev=0)})
+ expected_status.add({ 'A/D/new_dir/rho' : Item(status='A ', wc_rev=0)})
+ expected_status.add({ 'A/D/new_dir/tau' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/D/G/pi', wc_rev=2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/pi', contents="This is the file 'pi'.\nfoo\n")
+ expected_disk.add({'A/D/new_dir' :
+ Item(props={SVN_PROP_MERGEINFO : '/A/D/G:2'})})
+ expected_disk.add({'A/D/new_dir/pi' :
+ Item(contents="This is the file 'pi'.\nfoo\n")})
+ expected_disk.add({'A/D/new_dir/rho' :
+ Item(contents="This is the file 'rho'.\n")})
+ expected_disk.add({'A/D/new_dir/tau' :
+ Item(contents="This is the file 'tau'.\n")})
+ expected_skip = wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_merge(wc_dir, '1', '2',
+ sbox.repo_url + '/A/D/G', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ new_dir_path)
+ sbox.simple_commit()
+
+#----------------------------------------------------------------------
+# Test for issue #2915 'Handle mergeinfo for subtrees missing due to removal
+# by non-svn command'
+@SkipUnless(server_has_mergeinfo)
+@Issue(2915)
+def merge_with_os_deleted_subtrees(sbox):
+ "merge tracking fails if target missing subtrees"
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = sbox.ospath('A_COPY')
+ C_COPY_path = sbox.ospath('A_COPY/C')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ G_COPY_path = sbox.ospath('A_COPY/D/G')
+
+ # Remove several subtrees from disk.
+ svntest.main.safe_rmtree(C_COPY_path)
+ svntest.main.safe_rmtree(G_COPY_path)
+ os.remove(psi_COPY_path)
+ os.remove(mu_COPY_path)
+
+ # Be sure the regex paths are properly escaped on Windows, see the
+ # note about "The Backslash Plague" in expected_merge_output().
+ if sys.platform == 'win32':
+ re_sep = '\\\\'
+ else:
+ re_sep = os.sep
+
+ # Common part of the expected error message for all cases we will test.
+ err_re = "svn: E195016: Merge tracking not allowed with missing subtrees; " + \
+ "try restoring these items first:" + \
+ "|(\n)" + \
+ "|" + svntest.main.stack_trace_regexp
+
+ # Case 1: Infinite depth merge into infinite depth WC target.
+ # Every missing subtree under the target should be reported as missing.
+ missing = "|(.*A_COPY" + re_sep + "mu\n)" + \
+ "|(.*A_COPY" + re_sep + "D" + re_sep + "G\n)" + \
+ "|(.*A_COPY" + re_sep + "C\n)" + \
+ "|(.*A_COPY" + re_sep + "D" + re_sep + "H" + re_sep + "psi\n)"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'merge', sbox.repo_url + '/A', A_COPY_path)
+ svntest.verify.verify_outputs("Merge failed but not in the way expected",
+ err, None, err_re + missing, None,
+ True) # Match *all* lines of stderr
+
+ # Case 2: Immediates depth merge into infinite depth WC target.
+ # Only the two immediate children of the merge target should be reported
+ # as missing.
+ missing = "|(.*A_COPY" + re_sep + "mu\n)" + \
+ "|(.*A_COPY" + re_sep + "C\n)"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'merge', sbox.repo_url + '/A', A_COPY_path, '--depth=immediates')
+ svntest.verify.verify_outputs("Merge failed but not in the way expected",
+ err, None, err_re + missing, None, True)
+
+ # Case 3: Files depth merge into infinite depth WC target.
+ # Only the single file child of the merge target should be reported
+ # as missing.
+ missing = "|(.*A_COPY" + re_sep + "mu\n)"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ [], svntest.verify.AnyOutput,
+ 'merge', sbox.repo_url + '/A', A_COPY_path, '--depth=files')
+ svntest.verify.verify_outputs("Merge failed but not in the way expected",
+ err, None, err_re + missing, None, True)
+
+ # Case 4: Empty depth merge into infinite depth WC target.
+ # Only the...oh, wait, the target is present and that is as deep
+ # as the merge goes, so this merge should succeed!
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A',
+ A_COPY_path, '--depth=empty')
+
+#----------------------------------------------------------------------
+# Test for issue #3668 'inheritance can result in self-referential
+# mergeinfo' and issue #3669 'inheritance can result in mergeinfo
+# describing nonexistent sources'
+@Issue(3668,3669)
+@XFail()
+def no_self_referential_or_nonexistent_inherited_mergeinfo(sbox):
+ "don't inherit bogus mergeinfo"
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ set_up_branch(sbox, nbr_of_branches=1)
+
+ # Some paths we'll care about
+ nu_path = sbox.ospath('A/C/nu')
+ nu_COPY_path = sbox.ospath('A_COPY/C/nu')
+ J_path = sbox.ospath('A/D/J')
+ J_COPY_path = sbox.ospath('A_COPY/D/J')
+ zeta_path = sbox.ospath('A/D/J/zeta')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # r7 - Add the file A/C/nu
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Add file', wc_dir)
+
+ # r8 - Sync merge A to A_COPY
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Sync A_COPY with A', wc_dir)
+
+ # r9 - Add the subtree A/D/J
+ # A/D/J/zeta
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', J_path)
+ svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Add subtree', wc_dir)
+
+ # Update the WC in preparation for merges.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # r10 - Sync merge A to A_COPY
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Sync A_COPY with A', wc_dir)
+
+ # r11 - Text changes to A/C/nu and A/D/J/zeta.
+ svntest.main.file_write(nu_path, "This is the EDITED file 'nu'.\n")
+ svntest.main.file_write(zeta_path, "This is the EDITED file 'zeta'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Edit added files', wc_dir)
+
+ # Update the WC in preparation for merges.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # This test is marked as XFail because the following two merges
+ # create mergeinfo with both non-existent path-revs and self-referential
+ # mergeinfo.
+ #
+ # Merge all available revisions from A/C/nu to A_COPY/C/nu.
+ # The target has no explicit mergeinfo of its own but inherits mergeinfo
+ # from A_COPY. A_COPY has the mergeinfo '/A:2-9' so the naive mergeinfo
+ # A_COPY/C/nu inherits is '/A/C/nu:2-9'. However, '/A/C/nu:2-6' don't
+ # actually exist (issue #3669) and '/A/C/nu:7-8' is self-referential
+ # (issue #3668). Neither of these should be present in the resulting
+ # mergeinfo for A_COPY/C/nu, only '/A/C/nu:8-11'
+ expected_output = wc.State(nu_COPY_path, {
+ '' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(nu_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(nu_COPY_path, {
+ })
+ expected_status = wc.State(nu_COPY_path, {
+ '' : Item(status='MM', wc_rev=11),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/C/nu:8-11'}),
+ })
+ expected_skip = wc.State(nu_COPY_path, { })
+ svntest.actions.run_and_verify_merge(nu_COPY_path, None, None,
+ sbox.repo_url + '/A/C/nu', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Merge all available revisions from A/D/J to A_COPY/D/J. Like the
+ # previous merge, the target should not have any non-existent ('/A/D/J:2-8')
+ # or self-referential mergeinfo ('/A/D/J:9') recorded on it post-merge.
+ expected_output = wc.State(J_COPY_path, {
+ 'zeta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(J_COPY_path, {
+ '' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(J_COPY_path, {
+ })
+ expected_status = wc.State(J_COPY_path, {
+ '' : Item(status=' M', wc_rev=11),
+ 'zeta' : Item(status='M ', wc_rev=11),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/J:10-11'}),
+ 'zeta' : Item("This is the EDITED file 'zeta'.\n")
+ })
+ expected_skip = wc.State(J_COPY_path, { })
+ svntest.actions.run_and_verify_merge(J_COPY_path, None, None,
+ sbox.repo_url + '/A/D/J', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3756 'subtree merge can inherit invalid working mergeinfo',
+# issue #3668 'inheritance can result in self-referential mergeinfo', and
+# issue #3669 'inheritance can result in mergeinfo describing nonexistent
+# sources'.
+@XFail()
+@Issue(3756,3668,3669)
+def subtree_merges_inherit_invalid_working_mergeinfo(sbox):
+ "don't inherit bogus working mergeinfo"
+
+ # r1: Create a greek tree.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ set_up_branch(sbox, nbr_of_branches=1)
+
+ # Some paths we'll care about
+ nu_path = sbox.ospath('A/C/nu')
+ nu_COPY_path = sbox.ospath('A_COPY/C/nu')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # r7 - Add the file A/C/nu
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Add file', wc_dir)
+
+ # r8 Merge c7 from A to A_COPY.
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A',
+ A_COPY_path, '-c7')
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Merge subtree file addition',
+ wc_dir)
+
+ # r9 - A text change to A/C/nu.
+ svntest.main.file_write(nu_path, "This is the EDITED file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'Edit added file', wc_dir)
+
+ # Update the WC in preparation for merges.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Now do two merges. The first, r3 to the root of the branch A_COPY.
+ # This creates working mergeinfo '/A:3,7' on A_COPY. Then do a subtree
+ # file merge of r9 from A/C/nu to A_COPY/C/nu. Since the target has no
+ # explicit mergeinfo, the mergeinfo set to record the merge of r9 should
+ # include the mergeinfo inherited from A_COPY. *But* that raw inherited
+ # mergeinfo, '/A/C/nu:3,7' is wholly invalid: '/A/C/nu:3' simply doesn't
+ # exist in the repository and '/A/C/nu:7' is self-referential. So the
+ # resulting mergeinfo on 'A_COPY/C/nu' should be only '/A/C/nu:9'.
+ #
+ # Currently this test is marked as XFail because the resulting mergeinfo is
+ # '/A/C/nu:3,7,9' and thus includes a non-existent path-rev.
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A',
+ A_COPY_path, '-c3')
+ svntest.actions.run_and_verify_svn(
+ svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A/C/nu',
+ nu_COPY_path, '-c9')
+ svntest.actions.run_and_verify_svn(
+ '/A/C/nu:9', [], 'pg', SVN_PROP_MERGEINFO, nu_COPY_path)
+
+
+#----------------------------------------------------------------------
+# Test for issue #3686 'executable flag not correctly set on merge'
+# See http://subversion.tigris.org/issues/show_bug.cgi?id=3686
+@Issue(3686)
+@SkipUnless(server_has_mergeinfo)
+@SkipUnless(svntest.main.is_posix_os)
+def merge_change_to_file_with_executable(sbox):
+ "executable flag is maintained during binary merge"
+
+ # Scenario: When merging a change to a binary file with the 'svn:executable'
+ # property set, the file is not marked as 'executable'. After commit, the
+ # executable bit is set correctly.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ trunk_url = sbox.repo_url + '/A/B/E'
+
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+
+ # Force one of the files to be a binary type
+ svntest.actions.run_and_verify_svn2(None,
+ binary_mime_type_on_text_file_warning, 0,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream',
+ alpha_path)
+
+ # Set the 'svn:executable' property on both files
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:executable', 'ON',
+ beta_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:executable', 'ON',
+ alpha_path)
+
+ # Verify the executable bit has been set before committing
+ if not os.access(alpha_path, os.X_OK):
+ raise svntest.Failure("alpha not marked as executable before commit")
+ if not os.access(beta_path, os.X_OK):
+ raise svntest.Failure("beta is not marked as executable before commit")
+
+ # Commit change (r2)
+ sbox.simple_commit()
+
+ # Verify the executable bit has remained after committing
+ if not os.access(alpha_path, os.X_OK):
+ raise svntest.Failure("alpha not marked as executable before commit")
+ if not os.access(beta_path, os.X_OK):
+ raise svntest.Failure("beta is not marked as executable before commit")
+
+ # Create the branch
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ trunk_url,
+ sbox.repo_url + '/branch',
+ '-m', "Creating the Branch")
+
+ # Modify the files + commit (r3)
+ svntest.main.file_append(alpha_path, 'appended alpha text')
+ svntest.main.file_append(beta_path, 'appended beta text')
+ sbox.simple_commit()
+
+ # Re-root the WC at the branch
+ svntest.main.safe_rmtree(wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url + '/branch', wc_dir)
+
+ # Recalculate the paths
+ alpha_path = sbox.ospath('alpha')
+ beta_path = sbox.ospath('beta')
+
+ expected_output = wc.State(wc_dir, {
+ 'beta' : Item(status='U '),
+ 'alpha' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ '.' : Item(props={'svn:mergeinfo':'/A/B/E:3-4'}),
+ 'alpha' : Item(contents="This is the file 'alpha'.\nappended alpha text",
+ props={'svn:executable':'*',
+ 'svn:mime-type':'application/octet-stream'}),
+ 'beta' : Item(contents="This is the file 'beta'.\nappended beta text",
+ props={"svn:executable" : '*'}),
+ })
+ expected_status = wc.State(wc_dir, {
+ '' : Item(status=' M', wc_rev='4'),
+ 'alpha' : Item(status='M ', wc_rev='4'),
+ 'beta' : Item(status='M ', wc_rev='4'),
+ })
+ expected_skip = wc.State(wc_dir, { })
+
+ # Merge the changes across
+ svntest.actions.run_and_verify_merge(wc_dir, None, None,
+ trunk_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+
+ # Verify the executable bit has been set
+ if not os.access(alpha_path, os.X_OK):
+ raise svntest.Failure("alpha is not marked as executable after merge")
+ if not os.access(beta_path, os.X_OK):
+ raise svntest.Failure("beta is not marked as executable after merge")
+
+ # Commit (r4)
+ sbox.simple_commit()
+
+ # Verify the executable bit has been set
+ if not os.access(alpha_path, os.X_OK):
+ raise svntest.Failure("alpha is not marked as executable after commit")
+ if not os.access(beta_path, os.X_OK):
+ raise svntest.Failure("beta is not marked as executable after commit")
+
+def dry_run_merge_conflicting_binary(sbox):
+ "dry run shouldn't resolve conflicts"
+
+ # This test-case is to showcase the regression caused by
+ # r1075802. Here is the link to the relevant discussion:
+ # http://svn.haxx.se/dev/archive-2011-03/0145.shtml
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ # Add a binary file to the project
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ # Write PNG file data into 'A/theta'.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Commit the new binary file, creating revision 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make the "other" working copy
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ # Change the binary file in first working copy, commit revision 3.
+ svntest.main.file_append(theta_path, "some extra junk")
+ expected_output = wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # In second working copy, append different content to the binary
+ # and attempt to 'svn merge -r 2:3'.
+ # We should see a conflict during the merge.
+ other_theta_path = os.path.join(other_wc, 'A', 'theta')
+ svntest.main.file_append(other_theta_path, "some other junk")
+ expected_output = wc.State(other_wc, {
+ 'A/theta' : Item(status='C '),
+ })
+ expected_mergeinfo_output = wc.State(other_wc, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(other_wc, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ '' : Item(props={SVN_PROP_MERGEINFO : '/:3'}),
+ 'A/theta' : Item(theta_contents + b"some other junk",
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ # verify content of base(left) file
+ expected_disk.add({
+ 'A/theta.merge-left.r2' :
+ Item(contents = theta_contents )
+ })
+ # verify content of theirs(right) file
+ expected_disk.add({
+ 'A/theta.merge-right.r3' :
+ Item(contents= theta_contents + b"some extra junk")
+ })
+
+ expected_status = svntest.actions.get_virginal_state(other_wc, 1)
+ expected_status.add({
+ '' : Item(status=' M', wc_rev=1),
+ 'A/theta' : Item(status='C ', wc_rev=2),
+ })
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_merge(other_wc, '2', '3',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--allow-mixed-revisions',
+ other_wc)
+
+#----------------------------------------------------------------------
+@Issue(3857)
+def foreign_repos_prop_conflict(sbox):
+ "prop conflict from foreign repos merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a second repository and working copy with the original
+ # greek tree.
+ repo_dir = sbox.repo_dir
+ other_repo_dir, other_repo_url = sbox.add_repo_path("other")
+ other_wc_dir = sbox.add_wc_path("other")
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url,
+ other_wc_dir)
+
+ # Add properties in the first repos and commit.
+ sbox.simple_propset('red', 'rojo', 'A/D/G')
+ sbox.simple_propset('yellow', 'amarillo', 'A/D/G')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'spenglish', wc_dir)
+
+ # Tweak properties in the first repos and commit.
+ sbox.simple_propset('red', 'rosso', 'A/D/G')
+ sbox.simple_propset('yellow', 'giallo', 'A/D/G')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'engtalian', wc_dir)
+
+ # Now, merge the propchange to the *second* working copy.
+ expected_output = [' C %s\n' % (os.path.join(other_wc_dir,
+ "A", "D", "G"))]
+ expected_output = expected_merge_output([[3]], expected_output, True,
+ prop_conflicts=1)
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'merge', '-c3',
+ sbox.repo_url,
+ other_wc_dir)
+
+#----------------------------------------------------------------------
+# Test for issue #3975 'adds with explicit mergeinfo don't get mergeinfo
+# describing merge which added them'
+@Issue(3975)
+@SkipUnless(server_has_mergeinfo)
+def merge_adds_subtree_with_mergeinfo(sbox):
+ "merge adds subtree with mergeinfo"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, False, 2)
+
+ A_path = sbox.ospath('A')
+ nu_path = sbox.ospath('A/C/nu')
+ nu_COPY_path = sbox.ospath('A_COPY/C/nu')
+ A_COPY2_path = sbox.ospath('A_COPY_2')
+
+ # r8 - Add the file A_COPY/C/nu.
+ svntest.main.file_write(nu_COPY_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Add a file on the A_COPY branch',
+ wc_dir)
+
+ # r9 - Cherry pick r8 from A_COPY to A.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A_COPY',
+ A_path, '-c8')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r8 from A_COPY to A', wc_dir)
+
+ # r10 - Make a modification to A_COPY/C/nu
+ svntest.main.file_append(nu_COPY_path,
+ "More work on the A_COPY branch.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Some work on the A_COPY branch', wc_dir)
+
+ # r9 - Cherry pick r10 from A_COPY/C/nu to A/C/nu. Make some
+ # changes to A/C/nu before committing the merge.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A_COPY/C/nu',
+ nu_path, '-c10')
+ svntest.main.file_append(nu_path, "A faux conflict resolution.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r8 from A_COPY to A', wc_dir)
+
+ # Sync merge A to A_COPY_2
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY2_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'C/nu' : Item(status='A '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ '' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY2_path, {
+ '' : Item(status=' G'),
+ 'C/nu' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY2_path, {
+ })
+ expected_status = wc.State(A_COPY2_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status='M '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'C/nu' : Item(status='A ', copied='+'),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status='M '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status='M '),
+ 'D/H/omega' : Item(status='M '),
+ })
+ expected_status.tweak(wc_rev=11)
+ expected_status.tweak('C/nu', wc_rev='-')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-11\n/A_COPY:8'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ # C/nu will pick up the mergeinfo A_COPY/C/nu:8 which is self-referential.
+ # This is issue #3668 'inheritance can result in self-referential
+ # mergeinfo', but we'll allow it in this test since issue #3668 is
+ # tested elsewhere and is not the point of *this* test.
+ 'C/nu' : Item("This is the file 'nu'.\n" \
+ "More work on the A_COPY branch.\n" \
+ "A faux conflict resolution.\n",
+ props={SVN_PROP_MERGEINFO :
+ '/A/C/nu:9-11\n/A_COPY/C/nu:8,10'}),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State('.', { })
+ svntest.actions.run_and_verify_merge(A_COPY2_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# A test for issue #3978 'reverse merge which adds subtree fails'.
+@Issue(3978,4057)
+@SkipUnless(server_has_mergeinfo)
+def reverse_merge_adds_subtree(sbox):
+ "reverse merge adds subtree"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ A_path = sbox.ospath('A')
+ chi_path = sbox.ospath('A/D/H/chi')
+ A_COPY_path = sbox.ospath('A_COPY')
+ H_COPY_path = sbox.ospath('A_COPY/D/H')
+
+ # r7 - Delete A\D\H\chi
+ svntest.actions.run_and_verify_svn(None, [], 'delete', chi_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Delete a file', wc_dir)
+
+ # r8 - Merge r7 from A to A_COPY
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A',
+ A_COPY_path, '-c7')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Cherry-pick r7 from A to A_COPY', wc_dir)
+
+ # r9 - File depth sync merge from A/D/H to A_COPY/D/H/
+ # This shallow merge does not create non-inheritable mergeinfo because of
+ # the issue #4057 fix; all subtrees affected by the diff are present, so
+ # non-inheritable mergeinfo is not required.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D/H',
+ H_COPY_path, '--depth', 'files')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Cherry-pick r7 from A to A_COPY', wc_dir)
+
+ # Reverse merge r7 from A to A_COPY
+ #
+ # Prior to the issue #3978 fix this merge failed with an assertion:
+ #
+ # >svn merge ^/A A_COPY -c-7
+ # --- Reverse-merging r7 into 'A_COPY\D\H':
+ # A A_COPY\D\H\chi
+ # --- Recording mergeinfo for reverse merge of r7 into 'A_COPY':
+ # U A_COPY
+ # --- Recording mergeinfo for reverse merge of r7 into 'A_COPY\D\H':
+ # U A_COPY\D\H
+ # ..\..\..\subversion\svn\util.c:913: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:10990: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:10944: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:10944: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:10914: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:8928: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\merge.c:7850: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_client\mergeinfo.c:120: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_wc\props.c:2472: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_wc\props.c:2247: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_wc\props.c:2576: (apr_err=200020)
+ # ..\..\..\subversion\libsvn_subr\mergeinfo.c:705: (apr_err=200020)
+ # svn: E200020: Could not parse mergeinfo string '-7'
+ # ..\..\..\subversion\libsvn_subr\mergeinfo.c:688: (apr_err=200022)
+ # ..\..\..\subversion\libsvn_subr\mergeinfo.c:607: (apr_err=200022)
+ # ..\..\..\subversion\libsvn_subr\mergeinfo.c:504: (apr_err=200022)
+ # ..\..\..\subversion\libsvn_subr\kitchensink.c:57: (apr_err=200022)
+ # svn: E200022: Negative revision number found parsing '-7'
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY_path, {
+ 'D/H/chi' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'D/H' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' M'),
+ 'D/H/chi' : Item(status='A ', copied='+'),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=9)
+ expected_status.tweak('D/H/chi', wc_rev='-')
+ expected_disk = wc.State('', {
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:2-6,8'}),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State('.', { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, 7, 6,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# A test for issue #3989 'merge which deletes file with native eol-style
+# raises spurious tree conflict'.
+@Issue(3989)
+@SkipUnless(server_has_mergeinfo)
+def merged_deletion_causes_tree_conflict(sbox):
+ "merged deletion causes spurious tree conflict"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ psi_path = sbox.ospath('A/D/H/psi')
+ H_branch_path = sbox.ospath('branch/D/H')
+
+ # r2 - Set svn:eol-style native on A/D/H/psi
+ svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style',
+ 'native', psi_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Set eol-style native on a path',
+ wc_dir)
+
+ # r3 - Branch ^/A to ^/branch
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/branch',
+ '-m', 'Copy ^/A to ^/branch')
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # r4 - Delete A/D/H/psi
+ svntest.actions.run_and_verify_svn(None, [], 'delete', psi_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Delete a a path with native eol-style',
+ wc_dir)
+
+ # Sync merge ^/A/D/H to branch/D/H.
+ #
+ # branch/D/H/psi is, ignoring differences caused by svn:eol-style, identical
+ # to ^/A/D/H/psi when the latter was deleted, so the deletion should merge
+ # cleanly.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(H_branch_path, {
+ 'psi' : Item(status='D '),
+ })
+ expected_mergeinfo_output = wc.State(H_branch_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(H_branch_path, {})
+ expected_status = wc.State(H_branch_path, {
+ '' : Item(status=' M'),
+ 'chi' : Item(status=' '),
+ 'psi' : Item(status='D '),
+ 'omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=4)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/H:3-4'}),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State('.', { })
+ svntest.actions.run_and_verify_merge(H_branch_path, None, None,
+ sbox.repo_url + '/A/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# A test for issue #3976 'record-only merges which add new subtree mergeinfo
+# don't record mergeinfo describing merge'.
+@Issue(3976)
+@SkipUnless(server_has_mergeinfo)
+def record_only_merge_adds_new_subtree_mergeinfo(sbox):
+ "record only merge adds new subtree mergeinfo"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ psi_path = sbox.ospath('A/D/H/psi')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ H_COPY2_path = sbox.ospath('A_COPY_2/D/H')
+
+ # r7 - Copy ^/A_COPY to ^/A_COPY_2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '-m', 'copy A_COPY to A_COPY_2',
+ sbox.repo_url + '/A_COPY',
+ sbox.repo_url + '/A_COPY_2')
+
+ # r8 - Set a property on A/D/H/psi. It doesn't matter what property
+ # we use, just as long as we have a change that can be merged independently
+ # of the text change to A/D/H/psi in r3.
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', 'native', psi_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'set svn:eol-style', wc_dir)
+
+ # r9 - Merge r3 from ^/A/D/H/psi to A_COPY/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path, '-c3')
+ svntest.main.run_svn(None, 'commit', '-m', 'Subtree merge', wc_dir)
+
+ # r10 - Merge r8 from ^/A/D/H/psi to A_COPY/D/H/psi.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D/H/psi',
+ psi_COPY_path, '-c8')
+ svntest.main.run_svn(None, 'commit', '-m', 'Subtree merge', wc_dir)
+
+ # Merge r10 from ^/A_COPY/D/H to A_COPY_2/D/H. This should leave
+ # A_COPY_2/D/H/psi with three new property additions:
+ #
+ # 1) The 'svn:eol-style=native' from r10 via r8.
+ #
+ # 2) The mergeinfo '/A/D/H/psi:8' from r10.
+ #
+ # 3) The mergeinfo '/A_COPY/D/H/psi:10' describing the merge itself.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(H_COPY2_path, {
+ 'psi' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(H_COPY2_path, {
+ '' : Item(status=' U'),
+ 'psi' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(H_COPY2_path, {})
+ expected_status = wc.State(H_COPY2_path, {
+ '' : Item(status=' M'),
+ 'chi' : Item(status=' '),
+ 'psi' : Item(status=' M'),
+ 'omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=10)
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A_COPY/D/H:10'}),
+ 'psi' : Item("This is the file 'psi'.\n",
+ props={SVN_PROP_MERGEINFO :
+ '/A/D/H/psi:8\n/A_COPY/D/H/psi:10',
+ 'svn:eol-style' : 'native'}),
+ 'chi' : Item("This is the file 'chi'.\n"),
+ 'omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State('.', { })
+ svntest.actions.run_and_verify_merge(H_COPY2_path, 9, 10,
+ sbox.repo_url + '/A_COPY/D/H', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+
+#----------------------------------------------------------------------
+# Test for issue #4056 "don't record non-inheritable mergeinfo if missing
+# subtrees are not touched by the full-depth diff".
+@Issue(4056)
+@SkipUnless(server_has_mergeinfo)
+def unnecessary_noninheritable_mergeinfo_missing_subtrees(sbox):
+ "missing subtrees untouched by infinite depth merge"
+
+ B_branch_path = sbox.ospath('branch/B')
+
+ # Setup a simple branch to which
+ expected_output, expected_mergeinfo_output, expected_elision_output, \
+ expected_status, expected_disk, expected_skip = \
+ noninheritable_mergeinfo_test_set_up(sbox)
+
+ # Create a shallow merge target; set depth of branch/B to files.
+ svntest.main.run_svn(None, 'up', '--set-depth=files', B_branch_path)
+ expected_status.remove('E', 'E/alpha', 'E/beta', 'F')
+ expected_disk.remove('E', 'E/alpha', 'E/beta', 'F')
+
+ # Merge r3 from ^/A/B to branch/B
+ #
+ # Merge is smart enough to realize that despite the shallow merge target,
+ # the diff can only affect branch/B/lambda, which is still present, so there
+ # is no need to record non-inheritable mergeinfo on the target
+ # or any subtree mergeinfo whatsoever:
+ #
+ # >svn pg svn:mergeinfo -vR
+ # Properties on 'branch\B':
+ # svn:mergeinfo
+ # /A/B:3 <-- Nothing was skipped, so doesn't need
+ # to be non-inheritable.
+ svntest.actions.run_and_verify_merge(B_branch_path,
+ '2', '3',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ B_branch_path)
+
+#----------------------------------------------------------------------
+# Test for issue #4057 "don't record non-inheritable mergeinfo in shallow
+# merge if entire diff is within requested depth".
+@Issue(4057)
+@SkipUnless(server_has_mergeinfo)
+def unnecessary_noninheritable_mergeinfo_shallow_merge(sbox):
+ "shallow merge reaches all necessary subtrees"
+
+ B_branch_path = sbox.ospath('branch/B')
+ E_path = sbox.ospath('A/B/E')
+
+ # Setup a simple branch to which
+ expected_output, expected_mergeinfo_output, expected_elision_output, \
+ expected_status, expected_disk, expected_skip = \
+ noninheritable_mergeinfo_test_set_up(sbox)
+
+ # Merge r3 from ^/A/B to branch/B at operational depth=files
+ #
+ # Previously this failed because merge wasn't smart enough to
+ # realize that despite being a shallow merge, the diff can
+ # only affect branch/B/lambda, which is within the specified
+ # depth, so there is no need to record non-inheritable mergeinfo
+ # or subtree mergeinfo:
+ #
+ # >svn pg svn:mergeinfo -vR
+ # Properties on 'branch\B':
+ # svn:mergeinfo
+ # /A/B:3* <-- Should be inheritable
+ # Properties on 'branch\B\lambda':
+ # svn:mergeinfo
+ # /A/B/lambda:3 <-- Not necessary
+ expected_skip = wc.State(B_branch_path, {})
+ svntest.actions.run_and_verify_merge(B_branch_path, '2', '3',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--depth', 'files', B_branch_path)
+
+ # Revert the merge and then make a prop change to A/B/E in r4.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" +
+ E_path + "'\n"], [], 'ps',
+ 'prop:name', 'propval', E_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'A new property on a dir',
+ sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', sbox.wc_dir)
+
+ # Merge r4 from ^/A/B to branch/B at operational depth=immediates
+ #
+ # Previously this failed because the mergetracking logic didn't realize
+ # that despite being a shallow merge, the diff only affected branch/B/E,
+ # which was within the specified depth, so there was no need to record
+ # non-inheritable mergeinfo or subtree mergeinfo:
+ #
+ # >svn pg svn:mergeinfo -vR
+ # Properties on 'branch\B':
+ # svn:mergeinfo
+ # /A/B:4* <-- Should be inheritable
+ # Properties on 'branch\B\E':
+ # svn:mergeinfo
+ # /A/B/E:4 <-- Not necessary
+ expected_output = wc.State(B_branch_path, {
+ 'E' : Item(status=' U'),
+ })
+ expected_mergeinfo_output = wc.State(B_branch_path, {
+ '' : Item(status=' U'),
+ 'E' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B_branch_path, {
+ 'E' : Item(status=' U'),
+ })
+ expected_status = wc.State(B_branch_path, {
+ '' : Item(status=' M'),
+ 'lambda' : Item(status=' '),
+ 'E' : Item(status=' M'),
+ 'E/alpha' : Item(status=' '),
+ 'E/beta' : Item(status=' '),
+ 'F' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev='4')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:4'}),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ 'E' : Item(props={'prop:name' : 'propval'}),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ })
+ svntest.actions.run_and_verify_merge(B_branch_path, '3', '4',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--depth', 'immediates', B_branch_path)
+
+#----------------------------------------------------------------------
+# Test for issue #4132, "merge of replaced source asserts".
+# The original use-case is the following merges, which both asserted:
+# svn merge -cr1295005 ^/subversion/trunk@1295000 ../src
+# svn merge -cr1295004 ^/subversion/trunk/@r1295004 ../src
+@Issue(4132)
+def svnmucc_abuse_1(sbox):
+ "svnmucc: merge a replacement"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ ## Using A/ as our trunk, since one cannot replace the root.
+
+ ## r2: open a branch
+ sbox.simple_repo_copy('A', 'A_COPY')
+
+ ## r3: padding (to make the revnums-mod-10 match)
+ sbox.simple_repo_copy('iota', 'padding')
+
+ ## r4: trunk: accidental change
+ sbox.simple_append('A/mu', 'accidental change')
+ sbox.simple_commit()
+
+ ## r5: fail to revert it
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'r5',
+ '-U', sbox.repo_url,
+ 'rm', 'A',
+ 'cp', 'HEAD', 'A', 'A')
+
+ ## r6: really revert it
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'r6',
+ '-U', sbox.repo_url,
+ 'rm', 'A',
+ 'cp', '3', 'A', 'A')
+
+ ## Attempt to merge that.
+ # This used to assert:
+ # --- Recording mergeinfo for merge of r5 into \
+ # 'svn-test-work/working_copies/merge_tests-125/A_COPY':
+ # subversion/libsvn_subr/mergeinfo.c:1172: (apr_err=235000)
+ # svn: E235000: In file 'subversion/libsvn_subr/mergeinfo.c' \
+ # line 1172: assertion failed (IS_VALID_FORWARD_RANGE(first))
+ #
+ # Then, prior to the fix asserted this way:
+ #
+ # >svn merge -c5 ^/A@r5 A_COPY
+ # subversion\libsvn_client\merge.c:4871: (apr_err=235000)
+ # svn: E235000: In file 'subversion\libsvn_client\merge.c'
+ # line 4871: assertion failed (*gap_start < *gap_end)
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '-c', 'r5', '^/A@r5',
+ sbox.ospath('A_COPY'))
+
+#----------------------------------------------------------------------
+# Test for issue #4138 'replacement in merge source not notified correctly'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4138)
+def merge_source_with_replacement(sbox):
+ "replacement in merge source not notified correctly"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ omega_path = sbox.ospath('A/D/H/omega')
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+
+ # branch A@1 to A_COPY in r2, then make a few edits under A in r3-6:
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # r7 Delete A, replace it with A@5, effectively reverting the change
+ # made to A/D/H/omega in r6:
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'del', A_path)
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A@5', A_path)
+ sbox.simple_commit(message='Replace A with older version of itself')
+
+ # r8: Make an edit to A/D/H/omega:
+ svntest.main.file_write(omega_path, "New content for 'omega'.\n")
+ sbox.simple_commit(message='file edit')
+
+ # Update and sync merge ^/A to A_COPY.
+ #
+ # text text text text text
+ # edit edit edit edit edit
+ # psi rho beta omega omega
+ # A@r1---r3----r4----r5----r6---X r7---r8--------->
+ # | | ^ |
+ # | v | |
+ # | +---replacement---+ |
+ # copy |
+ # | sync-merge
+ # | |
+ # v v
+ # r2---A_COPY----------------------------------------->
+ svntest.main.run_svn(None, 'up', wc_dir)
+ # This test previously failed because the merge notifications make it look
+ # like r6 from ^/A was merged and recorded:
+ #
+ # >svn merge ^^/A A_COPY
+ # --- Merging r2 through r5 into 'A_COPY':
+ # U A_COPY\B\E\beta
+ # U A_COPY\D\G\rho
+ # U A_COPY\D\H\psi
+ # --- Recording mergeinfo for merge of r2 through r5 into 'A_COPY':
+ # U A_COPY
+ # --- Merging r6 through r8 into 'A_COPY':
+ # U A_COPY\D\H\omega
+ # --- Recording mergeinfo for merge of r6 through r8 into 'A_COPY':
+ # G A_COPY
+ expected_output = expected_merge_output(
+ [[2,5],[7,8]],
+ ['U ' + beta_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', sbox.repo_url + '/A',
+ A_COPY_path)
+
+ # Misleading notifications are one thing, incorrect mergeinfo is quite
+ # another.
+ svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-5,7-8\n'],
+ [], 'pg', SVN_PROP_MERGEINFO,
+ '-R', A_COPY_path)
+
+ # Commit the above merge and then reverse merge it. Again r6 is not
+ # being merged and should not be part of the notifications.
+ sbox.simple_commit()
+ sbox.simple_update()
+ expected_output = expected_merge_output(
+ [[5,2],[8,7]],
+ ['U ' + beta_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',],
+ elides=True)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', sbox.repo_url + '/A',
+ A_COPY_path, '-r8:1')
+
+#----------------------------------------------------------------------
+# Test for issue #4144 'Reverse merge with replace in source applies
+# diffs in forward order'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4144)
+def reverse_merge_with_rename(sbox):
+ "reverse merge applies revs in reverse order"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ omega_path = sbox.ospath('trunk/D/H/omega')
+ A_COPY_path = sbox.ospath('A_COPY')
+ beta_COPY_path = sbox.ospath('A_COPY/B/E/beta')
+ psi_COPY_path = sbox.ospath('A_COPY/D/H/psi')
+ rho_COPY_path = sbox.ospath('A_COPY/D/G/rho')
+ omega_COPY_path = sbox.ospath('A_COPY/D/H/omega')
+
+ # branch A@1 to A_COPY in r2, then make a few edits under A in r3-6:
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # r7 - Rename ^/A to ^/trunk.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 7.\n'],
+ [], 'move',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/trunk',
+ '-m', "Rename 'A' to 'trunk'")
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # r8 - Make and edit to trunk/D/H/omega (which was also edited in r6).
+ svntest.main.file_write(omega_path, "Edit 'omega' on trunk.\n")
+ sbox.simple_commit(message='Another omega edit')
+
+ # r9 - Sync merge ^/trunk to A_COPY.
+ svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this
+ # type of merge to death elsewhere.
+ [], 'merge', sbox.repo_url + '/trunk',
+ A_COPY_path)
+ sbox.simple_commit(message='Sync A_COPY with ^/trunk')
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Reverse merge -r9:1 from ^/trunk to A_COPY. This should return
+ # A_COPY to the same state it had prior to the sync merge in r2.
+ #
+ # This currently fails because the Subversion tries to reverse merge
+ # -r6:1 first, then -r8:6, causing a spurious conflict on omega:
+ #
+ # >svn merge ^/trunk A_COPY -r9:1 --accept=postpone
+ # --- Reverse-merging r6 through r2 into 'A_COPY':
+ # U A_COPY\B\E\beta
+ # U A_COPY\D\G\rho
+ # C A_COPY\D\H\omega
+ # U A_COPY\D\H\psi
+ # --- Recording mergeinfo for reverse merge of r6 through r2 into 'A_COPY':
+ # U A_COPY
+ # Summary of conflicts:
+ # Text conflicts: 1
+ # ..\..\..\subversion\svn\util.c:913: (apr_err=155015)
+ # ..\..\..\subversion\libsvn_client\merge.c:10848: (apr_err=155015)
+ # ..\..\..\subversion\libsvn_client\merge.c:10812: (apr_err=155015)
+ # ..\..\..\subversion\libsvn_client\merge.c:8984: (apr_err=155015)
+ # ..\..\..\subversion\libsvn_client\merge.c:4728: (apr_err=155015)
+ # svn: E155015: One or more conflicts were produced while merging r6:1
+ # into 'C:\SVN\src-trunk-4\Debug\subversion\tests\cmdline\svn-test-work
+ # \working_copies\merge_tests-127\A_COPY' -- resolve all conflicts and
+ # rerun the merge to apply the remaining unmerged revisions
+ expected_output = expected_merge_output(
+ [[8,7],[6,2]],
+ ['U ' + beta_COPY_path + '\n',
+ 'U ' + rho_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ 'G ' + omega_COPY_path + '\n',
+ 'U ' + psi_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',], elides=True)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', sbox.repo_url + '/trunk',
+ A_COPY_path, '-r9:1')
+
+#----------------------------------------------------------------------
+# Test for issue #4166 'multiple merge editor drives which add then
+# delete a subtree fail'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4166)
+def merge_adds_then_deletes_subtree(sbox):
+ "merge adds then deletes subtree"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ nu_path = sbox.ospath('A/C/nu')
+ C_branch_path = sbox.ospath('branch/C')
+ nu_branch_path = sbox.ospath('branch/C/nu')
+
+ # Make a branch.
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/branch',
+ '-m', 'Make a branch.')
+
+ # On the branch parent: Add a file in r3 and then delete it in r4.
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Add a file')
+ svntest.actions.run_and_verify_svn(None, [], 'delete', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Delete a file')
+
+ # Merge r3 and r4 from ^/A/C to branch/C as part of one merge
+ # command, but as separate editor drives, i.e. 'c3,4 vs. -r2:4.
+ # These should be equivalent but the former was failing with:
+ #
+ # >svn merge ^/A/C branch\C -c3,4
+ # --- Merging r3 into 'branch\C':
+ # A branch\C\nu
+ # --- Recording mergeinfo for merge of r3 into 'branch\C':
+ # U branch\C
+ # --- Merging r4 into 'branch\C':
+ # D branch\C\nu
+ # --- Recording mergeinfo for merge of r4 into 'branch\C':
+ # G branch\C
+ # ..\..\..\subversion\svn\util.c:913: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:10873: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:10837: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:8994: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:7923: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\mergeinfo.c:257: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\mergeinfo.c:97: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\props.c:2003: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\props.c:2024: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:11473: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:7247: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:7232: (apr_err=155010)
+ # svn: E155010: The node 'C:\SVN\src-trunk\Debug\subversion\tests
+ # \cmdline\svn-test-work\working_copies\merge_tests-128\branch\C\nu'
+ # was not found.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3],[4]],
+ ['A ' + nu_branch_path + '\n',
+ 'D ' + nu_branch_path + '\n',
+ ' U ' + C_branch_path + '\n',
+ ' G ' + C_branch_path + '\n',]),
+ [], 'merge', '-c3,4', sbox.repo_url + '/A/C', C_branch_path)
+
+#----------------------------------------------------------------------
+# Test for issue #4169 'added subtrees with non-inheritable mergeinfo
+# cause spurious subtree mergeinfo'.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4169)
+def merge_with_added_subtrees_with_mergeinfo(sbox):
+ "merge with added subtrees with mergeinfo"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ Y_path = sbox.ospath('A/C/X/Y')
+ Z_path = sbox.ospath('A/C/X/Y/Z')
+ nu_path = sbox.ospath('A/C/X/Y/Z/nu')
+ A_COPY_path = sbox.ospath('A_COPY')
+ Y_COPY_path = sbox.ospath('A_COPY/C/X/Y')
+ W_COPY_path = sbox.ospath('A_COPY/C/X/Y/Z/W')
+ A_COPY2_path = sbox.ospath('A_COPY_2')
+
+ # Make two branches of ^/A and then make a few edits under A in r4-7:
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2)
+
+ # r8 - Add a subtree under A.
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', '--parents',
+ Z_path)
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Add a subtree on our "trunk"')
+
+ # r9 - Sync ^/A to the first branch A_COPY.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Sync ^/A to ^/A_COPY')
+
+ # r10 - Make some edits on the first branch.
+ svntest.actions.run_and_verify_svn(None, [], 'ps', 'branch-prop-foo',
+ 'bar', Y_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', W_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Make some edits on "branch 1"')
+
+ # r11 - Cherry-pick r10 on the first branch back to A, but
+ # do so at depth=empty so non-inheritable mergeinfo is created.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c10', '--depth=empty',
+ sbox.repo_url + '/A_COPY/C/X/Y', Y_path)
+ svntest.actions.run_and_verify_svn(
+ None, [], 'ci', wc_dir,
+ '-m', 'Depth empty subtree cherry pick from "branch 1" to "trunk"')
+
+ # Sync ^/A to the second branch A_COPY_2.
+ #
+ # Previously this failed because spurious mergeinfo was created on
+ # A_COPY_2/C/X/Y/Z:
+ #
+ # >svn merge ^^/A A_COPY_2
+ # --- Merging r3 through r11 into 'A_COPY_2':
+ # U A_COPY_2\B\E\beta
+ # A A_COPY_2\C\X
+ # A A_COPY_2\C\X\Y
+ # A A_COPY_2\C\X\Y\Z
+ # A A_COPY_2\C\X\Y\Z\nu
+ # U A_COPY_2\D\G\rho
+ # U A_COPY_2\D\H\omega
+ # U A_COPY_2\D\H\psi
+ # --- Recording mergeinfo for merge of r3 through r11 into 'A_COPY_2':
+ # U A_COPY_2
+ # --- Recording mergeinfo for merge of r3 through r11 into 'A_COPY_2\C\X\Y':
+ # G A_COPY_2\C\X\Y
+ # vvvvvvvvvvvvvvvvvvvv
+ # U A_COPY_2\C\X\Y\Z
+ # ^^^^^^^^^^^^^^^^^^^^
+ #
+ # >svn pl -vR A_COPY_2
+ # Properties on 'A_COPY_2':
+ # svn:mergeinfo
+ # /A:3-11
+ # Properties on 'A_COPY_2\C\X\Y':
+ # branch-prop-foo
+ # bar
+ # svn:mergeinfo
+ # /A/C/X/Y:8-11
+ # /A_COPY/C/X/Y:10*
+ # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
+ # Properties on 'A_COPY_2\C\X\Y\Z':
+ # svn:mergeinfo
+ # /A/C/X/Y/Z:8-11
+ # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_output = wc.State(A_COPY2_path, {
+ 'B/E/beta' : Item(status='U '),
+ 'D/G/rho' : Item(status='U '),
+ 'D/H/omega' : Item(status='U '),
+ 'D/H/psi' : Item(status='U '),
+ 'C/X' : Item(status='A '),
+ 'C/X/Y' : Item(status='A '),
+ 'C/X/Y/Z' : Item(status='A '),
+ 'C/X/Y/Z/nu' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY2_path, {
+ '' : Item(status=' U'),
+ 'C/X/Y' : Item(status=' U'), # Added with explicit mergeinfo
+ })
+ expected_elision_output = wc.State(A_COPY2_path, {
+ })
+ expected_status = wc.State(A_COPY2_path, {
+ '' : Item(status=' M', wc_rev=11),
+ 'B' : Item(status=' ', wc_rev=11),
+ 'mu' : Item(status=' ', wc_rev=11),
+ 'B/E' : Item(status=' ', wc_rev=11),
+ 'B/E/alpha' : Item(status=' ', wc_rev=11),
+ 'B/E/beta' : Item(status='M ', wc_rev=11),
+ 'B/lambda' : Item(status=' ', wc_rev=11),
+ 'B/F' : Item(status=' ', wc_rev=11),
+ 'C' : Item(status=' ', wc_rev=11),
+ 'C/X' : Item(status='A ', wc_rev='-', copied='+'),
+ 'C/X/Y' : Item(status=' M', wc_rev='-', copied='+'),
+ 'C/X/Y/Z' : Item(status=' ', wc_rev='-', copied='+'),
+ 'C/X/Y/Z/nu' : Item(status=' ', wc_rev='-', copied='+'),
+ 'D' : Item(status=' ', wc_rev=11),
+ 'D/G' : Item(status=' ', wc_rev=11),
+ 'D/G/pi' : Item(status=' ', wc_rev=11),
+ 'D/G/rho' : Item(status='M ', wc_rev=11),
+ 'D/G/tau' : Item(status=' ', wc_rev=11),
+ 'D/gamma' : Item(status=' ', wc_rev=11),
+ 'D/H' : Item(status=' ', wc_rev=11),
+ 'D/H/chi' : Item(status=' ', wc_rev=11),
+ 'D/H/psi' : Item(status='M ', wc_rev=11),
+ 'D/H/omega' : Item(status='M ', wc_rev=11),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:3-11'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("New content"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(),
+ 'C/X' : Item(),
+ 'C/X/Y' : Item(props={
+ SVN_PROP_MERGEINFO : '/A/C/X/Y:8-11\n/A_COPY/C/X/Y:10*',
+ 'branch-prop-foo' : 'bar'}),
+ 'C/X/Y/Z' : Item(),
+ 'C/X/Y/Z/nu' : Item("This is the file 'nu'.\n"),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("New content"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("New content"),
+ 'D/H/omega' : Item("New content"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY2_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_with_externals_with_mergeinfo(sbox):
+ "merge with externals with mergeinfo"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+ file_external_path = sbox.ospath('A/file-external')
+ mu_COPY_path = sbox.ospath('A_COPY/mu')
+ mu_path = sbox.ospath('A/mu')
+
+ # Make a branch of ^/A and then make a few edits under A in r3-6:
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ svntest.main.file_write(mu_COPY_path, "branch edit")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'file edit on the branch', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Create a file external under 'A' and set some bogus mergeinfo
+ # on it (the fact that this mergeinfo is bogus has no bearing on
+ # this test).
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:externals',
+ '^/iota file-external', A_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'set file external', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO,
+ "/bogus-mergeinfo:5", file_external_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'set mergeinfo on file external',
+ file_external_path)
+
+ # Sync merge ^/A to A_COPY and then reintegrate A_COPY back to A.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'sync merge', wc_dir)
+ # This was segfaulting, see
+ # http://svn.haxx.se/dev/archive-2012-10/0364.shtml
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output(None,
+ ['U ' + mu_path + '\n',
+ ' U ' + A_path + '\n'],
+ two_url=True),
+ [], 'merge', '--reintegrate', sbox.repo_url + '/A_COPY',
+ A_path)
+
+#----------------------------------------------------------------------
+# Test merging 'binary' files with keyword expansion enabled.
+# Tests issue #4221 'Trivial merge of a binary file with svn:keywords
+# raises a conflict', among other cases.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4221)
+def merge_binary_file_with_keywords(sbox):
+ "merge binary file with keywords"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # Some binary files, and some binary files that will become text files.
+ # 'mod_src' means a content change on the branch (the merge source);
+ # 'mod_tgt' means a content change on the original (the merge target);
+ # 'to_txt' means svn:mime-type removed on the branch (the merge source).
+ file_mod_both = 'A/B/E/alpha'
+ file_mod_src = 'A/D/G/pi'
+ file_mod_tgt = 'A/D/G/rho'
+ file_mod_none = 'A/D/G/tau'
+ file_mod_both_to_txt = 'A/B/E/beta'
+ file_mod_src_to_txt = 'A/D/H/chi'
+ file_mod_tgt_to_txt = 'A/D/H/psi'
+ file_mod_none_to_txt = 'A/D/H/omega'
+ files_bin = [ file_mod_both, file_mod_src, file_mod_tgt, file_mod_none ]
+ files_txt = [ file_mod_both_to_txt, file_mod_src_to_txt,
+ file_mod_tgt_to_txt, file_mod_none_to_txt ]
+ files = files_bin + files_txt
+
+ # make some 'binary' files with keyword expansion enabled
+ for f in files:
+ svntest.main.file_append(sbox.ospath(f), "With $Revision: $ keyword.\n")
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream', sbox.ospath(f))
+ sbox.simple_propset('svn:keywords', 'Revision', f)
+ sbox.simple_commit()
+
+ # branch the files
+ sbox.simple_repo_copy('A', 'A2')
+ sbox.simple_update()
+
+ # Modify the branched (source) and/or original (target) versions. Remove
+ # the svn:mime-type from the 'to_txt' files on the branch.
+ # The original bug in issue #4221 gave a conflict if we modified either
+ # version or neither (using a single-file merge test case).
+ for f in [ file_mod_both, file_mod_both_to_txt,
+ file_mod_src, file_mod_src_to_txt ]:
+ f_branch = 'A2' + f[1:]
+ svntest.main.file_append(sbox.ospath(f_branch), "Incoming mod.\n")
+ for f in [ file_mod_both, file_mod_both_to_txt,
+ file_mod_tgt, file_mod_tgt_to_txt ]:
+ svntest.main.file_append(sbox.ospath(f), "Mod on merge target.\n")
+ for f in files_txt:
+ f_branch = 'A2' + f[1:]
+ sbox.simple_propdel('svn:mime-type', f_branch)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # merge back
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3,4]],
+ ['C ' + sbox.ospath(file_mod_both) + '\n',
+ 'U ' + sbox.ospath(file_mod_src) + '\n',
+ #' ' + sbox.ospath(file_mod_tgt) + '\n',
+ #' ' + sbox.ospath(file_mod_none) + '\n',
+ 'CU ' + sbox.ospath(file_mod_both_to_txt) + '\n',
+ 'UU ' + sbox.ospath(file_mod_src_to_txt) + '\n',
+ ' U ' + sbox.ospath(file_mod_tgt_to_txt) + '\n',
+ ' U ' + sbox.ospath(file_mod_none_to_txt) + '\n',
+ ' U A\n'],
+ text_conflicts=2),
+ [], 'merge', '^/A2', 'A')
+
+#----------------------------------------------------------------------
+# Test for issue #4155 'Merge conflict text of expanded keyword incorrect
+# when svn:keyword property value removed'. Failed in 1.7.0 through 1.7.8.
+@SkipUnless(server_has_mergeinfo)
+@Issue(4155)
+def merge_conflict_when_keywords_removed(sbox):
+ "merge conflict when keywords removed"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # make a file with keyword expansion enabled
+ svntest.main.file_write('A/keyfile', "$Date$ $Revision$\n")
+ sbox.simple_add('A/keyfile')
+ sbox.simple_propset('svn:keywords', 'Date Revision', 'A/keyfile')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # branch the file
+ sbox.simple_repo_copy('A', 'A2')
+ sbox.simple_update()
+
+ #
+ svntest.main.file_append('A/keyfile', " some changes\n")
+ sbox.simple_commit()
+
+ # sync merge
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3,4]],
+ ['U '+ sbox.ospath('A2/keyfile') + '\n',
+ ' U A2\n']),
+ [], 'merge', '^/A', 'A2')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # modify the original version: disable those KW & enable 'Id'
+ sbox.simple_propset('svn:keywords', 'Id', 'A/keyfile')
+ svntest.main.file_append('A/keyfile', "$Id$\n")
+ sbox.simple_commit()
+
+ # sync merge again
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5,6]],
+ ['UU ' + sbox.ospath('A2/keyfile') + '\n',
+ ' U A2\n']),
+ [], 'merge', '--accept=postpone', '^/A', 'A2')
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4139, 3274, 3503)
+def merge_target_selection(sbox):
+ "merge target selection handling"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # r2
+ sbox.simple_mkdir('dir')
+ sbox.simple_add_text('\1\2\3\4\5', 'dir/binary-file')
+ sbox.simple_add_text('abcde', 'dir/text-file')
+ sbox.simple_commit()
+
+ # r3
+ sbox.simple_copy('dir', 'branch')
+ sbox.simple_commit()
+
+ # r4
+ svntest.main.file_write(sbox.ospath('dir/binary-file'),
+ '\9\8\7\6\5\4\3\2\1')
+ sbox.simple_commit()
+
+ sbox.simple_update()
+
+ os.chdir(sbox.ospath('branch'))
+
+ # Merge the directory (no target)
+ expected_output = [
+ '--- Merging r4 into \'.\':\n',
+ 'U binary-file\n',
+ '--- Recording mergeinfo for merge of r4 into \'.\':\n',
+ ' U .\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir', '-c', '4')
+
+ svntest.main.run_svn(None, 'revert', '-R', '.')
+
+ # Merge the file (no target)
+ expected_output = [
+ '--- Merging r4 into \'binary-file\':\n',
+ 'U binary-file\n',
+ '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n',
+ ' U binary-file\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir/binary-file', '-c', '4')
+
+ svntest.main.run_svn(None, 'revert', '-R', '.')
+
+ # Merge the directory (explicit target)
+ expected_output = [
+ '--- Merging r4 into \'.\':\n',
+ 'U binary-file\n',
+ '--- Recording mergeinfo for merge of r4 into \'.\':\n',
+ ' U .\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir', '-c', '4', '.')
+
+ svntest.main.run_svn(None, 'revert', '-R', '.')
+
+ # Merge the file (explicit target)
+ expected_output = [
+ '--- Merging r4 into \'binary-file\':\n',
+ 'U binary-file\n',
+ '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n',
+ ' U binary-file\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir/binary-file', '-c', '4', 'binary-file')
+
+ svntest.main.run_svn(None, 'revert', '-R', '.')
+
+ # Merge the file (wrong target)
+ expected_output = [
+ '--- Merging r4 into \'.\':\n',
+ ' C .\n',
+ '--- Recording mergeinfo for merge of r4 into \'.\':\n',
+ ' U .\n',
+ ] + svntest.main.summary_of_conflicts(tree_conflicts=1)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir/binary-file',
+ '-c', '4', '.', '--accept', 'postpone')
+
+ svntest.main.run_svn(None, 'revert', '-R', '.')
+
+ # Merge the dir (wrong target)
+ expected_output = [
+ '--- Merging r4 into \'binary-file\':\n',
+ ' C %s\n' % os.path.join('binary-file'),
+ '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n',
+ ' U binary-file\n',
+ ] + svntest.main.summary_of_conflicts(tree_conflicts=1)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '^/dir', '-c', '4', 'binary-file',
+ '--accept', 'postpone')
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(3405) # seems to be the wrong issue number
+def merge_properties_on_adds(sbox):
+ "merged directory properties are added"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ sbox.simple_copy('A/D/G', 'G')
+
+ sbox.simple_mkdir('A/D/G/M')
+ sbox.simple_mkdir('A/D/G/M/N')
+ sbox.simple_add_text('QQ', 'A/D/G/file', 'A/D/G/M/file')
+ sbox.simple_propset('key', 'value',
+ 'A/D/G/M', 'A/D/G/file', 'A/D/G/M/N', 'A/D/G/M/file')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '^/A/D/G', sbox.ospath('G'))
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Properties on \'%s\':\n' % sbox.ospath('G'),
+ ' svn:mergeinfo\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/file'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M/N'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M/file'),
+ ' key\n',
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'proplist', '-R', sbox.ospath('G'))
+
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/file'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M/N'),
+ ' key\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('G/M/file'),
+ ' key\n',
+ ])
+
+ # I merged the tree, which should include history but only the files have
+ # the properties stored in PRISTINE. All directories have the properties
+ # as local changes in ACTUAL.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'proplist', '-R', sbox.ospath('G'),
+ '-r', 'BASE')
+
+ # Note that this is not a regression. This has been the case since 1.0.
+ # ### We just made status, update and merge handle this without users
+ # ### knowing about this limitation.
+
+ # ### My guess is that the base merge support on svn_wc_merge_props()
+ # ### was originally designed to resolve this problem, but I can't
+ # ### find a released version where this was actually implemented.
+
+ # For fun, also check the status: 'svn status' suppresses the M from AM.
+
+ # G = sbox.ospath('G')
+ #
+ # expected_status = wc.State('G', {
+ # '' : Item(status=' M', wc_rev='2'),
+ # 'pi' : Item(status=' ', wc_rev='2'),
+ # 'tau' : Item(status=' ', wc_rev='2'),
+ # 'file' : Item(status='A ', copied='+', wc_rev='-'), # Copied, no changes
+ # 'M' : Item(status='A ', copied='+', wc_rev='-'), # Copied, changes
+ # 'M/file' : Item(status=' ', copied='+', wc_rev='-'), # Copied, no changes
+ # 'M/N' : Item(status=' M', copied='+', wc_rev='-'), # Local changes
+ # 'rho' : Item(status=' ', wc_rev='2'),
+ # })
+ # svntest.actions.run_and_verify_status(G, expected_status)
+
+
+# ======================================================================
+# Functions for parsing mergeinfo
+
+def parse_changes_list(changes_string):
+ """Parse a string containing a list of revision numbers in the form
+ of the '--change' command-line argument (e.g. '1,3,-5,7-10').
+ Return a list of elements of the form [[1], [3], [-5], [7,10]].
+ """
+ rev_ranges = []
+ for rr in changes_string.split(','):
+ if '-' in rr[1:]:
+ revs = rr.split('-')
+ rev_ranges.append([int(revs[0]), int(revs[1])])
+ else:
+ rev_ranges.append([int(rr)])
+ return rev_ranges
+
+def parse_rev_args(arg_list):
+ """Return a list of [rX:rY] or [rZ] elements representing ARG_LIST
+ whose elements are strings in the form '-rX:Y' or '-cZ,X-Y,...'.
+ """
+ rev_ranges = []
+ for arg in arg_list:
+ kind = arg[:2]
+ val = arg[2:]
+ if kind == '-r':
+ if ':' in val:
+ revs = map(int, val.split(':'))
+ if revs[0] < revs[1]:
+ rev_ranges.append([revs[0] + 1, revs[1]])
+ else:
+ rev_ranges.append([revs[0], revs[1] + 1])
+ else:
+ rev_ranges.append([int(val)])
+ elif kind == '-c':
+ rev_ranges.extend(parse_changes_list(val))
+ else:
+ raise ValueError("revision arg '" + arg + "' in '" + arg_list +
+ "' does not start with -r or -c")
+ return rev_ranges
+
+class RangeList(list):
+ """Represents of a list of revision ranges, as a list of one- or
+ two-element lists, each of the form [X] meaning "--revision (X-1):X"
+ or [X,Y] meaning "--revision (X-1):Y".
+ """
+ def __init__(self, arg):
+ """
+ """
+ self.as_given = arg
+ if isinstance(arg, str):
+ list.__init__(self, parse_changes_list(arg))
+ elif isinstance(arg, list):
+ list.__init__(self, parse_rev_args(arg))
+ else:
+ raise ValueError("RangeList needs a string or a list, not '" + str(arg) + "'")
+
+def expected_merge_output2(tgt_ospath,
+ recorded_ranges,
+ merged_ranges=None,
+ prop_conflicts=0,
+ prop_resolved=0):
+ """Return an ExpectedOutput instance corresponding to the expected
+ output of a merge into TGT_OSPATH, with one 'recording
+ mergeinfo...' notification per specified revision range in
+ RECORDED_RANGES and one 'merging...' notification per revision
+ range in MERGED_RANGES.
+
+ RECORDED_RANGES is a mergeinfo-string or a RangeList.
+
+ MERGED_RANGES is a list of mergeinfo-strings or a list of
+ RangeLists. If None, it means [[r] for r in RECORDED_RANGES].
+ """
+ # Convert RECORDED_RANGES to a RangeList.
+ if isinstance(recorded_ranges, str):
+ recorded_ranges = RangeList(recorded_ranges)
+ # Convert MERGED_RANGES to a list of RangeLists.
+ if merged_ranges is None:
+ merged_ranges = [[r] for r in recorded_ranges]
+ elif len(merged_ranges) > 0 and isinstance(merged_ranges[0], str):
+ # List of mergeinfo-strings => list of rangelists
+ merged_ranges = [RangeList(r) for r in merged_ranges]
+
+ status_letters_re = (prop_conflicts or prop_resolved) and ' [UC]' or ' U'
+ status_letters_mi = ' [UG]'
+ lines = []
+ for i, rr in enumerate(recorded_ranges):
+ # Merging ...
+ for sr in merged_ranges[i]:
+ revstart = sr[0]
+ revend = len(sr) > 1 and sr[1] or None
+ lines += [svntest.main.merge_notify_line(revstart, revend,
+ target=tgt_ospath)]
+ lines += [status_letters_re + ' ' + re.escape(tgt_ospath) + '\n']
+ # Recording mergeinfo ...
+ revstart = rr[0]
+ revend = len(rr) > 1 and rr[1] or None
+ lines += [svntest.main.mergeinfo_notify_line(revstart, revend,
+ target=tgt_ospath)]
+ lines += [status_letters_mi + ' ' + re.escape(tgt_ospath) + '\n']
+
+ # Summary of conflicts
+ lines += svntest.main.summary_of_conflicts(prop_conflicts=prop_conflicts,
+ prop_resolved=prop_resolved,
+ as_regex=True)
+
+ # The 'match_all=False' is because we also expect some
+ # 'Resolved conflicted state of ...' lines.
+ return RegexListOutput(lines, match_all=False)
+
+def expected_out_and_err(tgt_ospath,
+ recorded_ranges,
+ merged_ranges=None,
+ prop_conflicts=0,
+ prop_resolved=0,
+ expect_error=True):
+ """Return a tuple (expected_out, expected_err) giving the expected
+ output and expected error output for a merge into TGT_OSPATH. See
+ expected_merge_output2() for details of RECORDED_RANGES and
+ MERGED_RANGES and PROP_CONFLICTS. EXPECT_ERROR should be true iff
+ we expect the merge to abort with an error about conflicts being
+ raised.
+ """
+ expected_out = expected_merge_output2(tgt_ospath, recorded_ranges,
+ merged_ranges,
+ prop_conflicts, prop_resolved)
+ if expect_error:
+ expected_err = RegexListOutput([
+ '^svn: E155015: .* conflicts were produced .* into$',
+ "^'.*" + re.escape(tgt_ospath) + "' --$",
+ '^resolve all conflicts .* remaining$',
+ '^unmerged revisions$'],
+ match_all=False)
+ else:
+ expected_err = []
+
+ return expected_out, expected_err
+
+def check_mergeinfo(expected_mergeinfo, tgt_ospath):
+ """Read the mergeinfo on TGT_OSPATH; verify that it matches
+ EXPECTED_MERGEINFO (list of lines).
+ """
+ svntest.actions.run_and_verify_svn(
+ expected_mergeinfo, [], 'pg', SVN_PROP_MERGEINFO, tgt_ospath)
+
+def simple_merge(src_path, tgt_ospath, rev_args):
+ """Merge from ^/SRC_PATH to TGT_OSPATH using revision arguments REV_ARGS
+ (list of '-r...' or '-c...' strings); expect a single-target merge
+ with no conflicts or errors.
+ """
+ rev_ranges = RangeList(rev_args)
+
+ expected_out = expected_merge_output(rev_ranges,
+ [' U ' + tgt_ospath + '\n',
+ ' [UG] ' + tgt_ospath + '\n'],
+ target=tgt_ospath)
+ src_url = '^/' + src_path
+ svntest.actions.run_and_verify_svn(
+ expected_out, [],
+ 'merge', src_url, tgt_ospath, '--accept', 'postpone', *rev_args)
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4306)
+# Test for issue #4306 'multiple editor drive file merges record wrong
+# mergeinfo during conflicts'
+def conflict_aborted_mergeinfo_described_partial_merge(sbox):
+ "conflicted split merge can be repeated"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ trunk = 'A'
+ branch = 'A2'
+ file = 'mu'
+ dir = 'B'
+ trunk_file = 'A/mu'
+ trunk_dir = 'A/B'
+
+ # r2: initial state
+ for rev in range(4, 11):
+ sbox.simple_propset('prop-' + str(rev), 'Old pval ' + str(rev),
+ trunk_file, trunk_dir)
+ sbox.simple_commit()
+
+ # r3: branch
+ sbox.simple_copy(trunk, branch)
+ sbox.simple_commit()
+
+ zero_rev = 3
+
+ def edit_file_or_dir(path, rev, val):
+ """Make a local edit to the file at PATH."""
+ sbox.simple_propset('prop-' + str(rev), val + ' pval ' + str(rev), path)
+
+ # r4 through r10: simple edits
+ for rev in range(4, 11):
+ edit_file_or_dir(trunk_file, rev, 'Edited')
+ edit_file_or_dir(trunk_dir, rev, 'Edited')
+ sbox.simple_commit()
+
+ # r14: merge some changes to the branch so that later merges will be split
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5,9',
+ '^/' + trunk, sbox.ospath(branch),
+ '--accept', 'theirs-conflict')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ def revert_branch():
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R',
+ sbox.ospath(branch))
+
+ def try_merge(relpath, conflict_rev, rev_args,
+ expected_out_err, expected_mi):
+ """Revert RELPATH in the branch; make a change that will conflict
+ with CONFLICT_REV if not None; merge RELPATH in the trunk
+ to RELPATH in the branch using revision arguments REV_ARGS (list of
+ '-r...' or '-c...' strings).
+
+ EXPECTED_OUT_ERR_MI is a tuple: (expected_out, expected_err,
+ expected_mi). EXPECTED_OUT and EXPECTED_ERR are instances of
+ ExpectedOutput.
+
+ Expect to find mergeinfo EXPECTED_MI if not None. EXPECTED_MI is
+ a single mergeinfo-string.
+ """
+ src_path = trunk + '/' + relpath
+ tgt_path = branch + '/' + relpath
+ tgt_ospath = sbox.ospath(tgt_path)
+
+ expected_out, expected_err = expected_out_err
+
+ revert_branch()
+
+ # Arrange for the merge to conflict at CONFLICT_REV.
+ if conflict_rev:
+ edit_file_or_dir(tgt_path, conflict_rev, 'Conflict')
+
+ src_url = '^/' + src_path
+ svntest.actions.run_and_verify_svn(
+ expected_out, expected_err,
+ 'merge', src_url, tgt_ospath, '--accept', 'postpone',
+ *rev_args)
+
+ if expected_mi is not None:
+ expected_mergeinfo = ['/' + src_path + ':' + expected_mi + '\n']
+ check_mergeinfo(expected_mergeinfo, tgt_ospath)
+
+ # In a mergeinfo-aware merge, each specified revision range is split
+ # internally into sub-ranges, to avoid any already-merged revisions.
+ #
+ # From white-box inspection, we see there are code paths that treat
+ # the last specified range and the last sub-range specially. The
+ # first specified range or sub-range is not treated specially in terms
+ # of the code paths, although it might be in terms of data flow.
+ #
+ # We test merges that raise a conflict in the first and last sub-range
+ # of the first and last specified range.
+
+ for target in [file, dir]:
+
+ tgt_ospath = sbox.ospath(branch + '/' + target)
+
+ # First test: Merge "everything" to the branch.
+ #
+ # This merge is split into three sub-ranges: r3-4, r6-8, r10-head.
+ # We have arranged that the merge will raise a conflict in the first
+ # sub-range. Since we are postponing conflict resolution, the merge
+ # should stop after the first sub-range, allowing us to resolve and
+ # repeat the merge at which point the next sub-range(s) can be merged.
+ # The mergeinfo on the target then should only reflect that the first
+ # sub-range (r3-4) has been merged.
+ #
+ # Previously the merge failed after merging only r3-4 (as it should)
+ # but mergeinfo for the whole range was recorded, preventing subsequent
+ # repeat merges from applying the rest of the source changes.
+ expect = expected_out_and_err(tgt_ospath,
+ '3-4', ['3-4'],
+ prop_conflicts=1)
+ try_merge(target, 4, [], expect, '3-5,9')
+
+ # Try a multiple-range merge that raises a conflict in the
+ # first sub-range in the first specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4', ['4'],
+ prop_conflicts=1)
+ try_merge(target, 4, ['-c4-6,8-10'], expect, '4-5,9')
+ # last sub-range in the first specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6', ['4,6'],
+ prop_conflicts=1)
+ try_merge(target, 6, ['-c4-6,8-10'], expect, '4-6,9')
+ # first sub-range in the last specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6,8', ['4,6', '8'],
+ prop_conflicts=1)
+ try_merge(target, 8, ['-c4-6,8-10'], expect, '4-6,8-9')
+ # last sub-range in the last specified range.
+ # (Expect no error, because 'svn merge' does not throw an error if
+ # there is no more merging to do when a conflict occurs.)
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6,8-10', ['4,6', '8,10'],
+ prop_conflicts=1, expect_error=False)
+ try_merge(target, 10, ['-c4-6,8-10'], expect, '4-6,8-10')
+
+ # Try similar merges but involving ranges in reverse order.
+ expect = expected_out_and_err(tgt_ospath,
+ '8', ['8'],
+ prop_conflicts=1)
+ try_merge(target, 8, ['-c8-10,4-6'], expect, '5,8-9')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10', ['8,10'],
+ prop_conflicts=1)
+ try_merge(target, 10, ['-c8-10,4-6'], expect, '5,8-10')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10,4', ['8,10', '4'],
+ prop_conflicts=1)
+ try_merge(target, 4, ['-c8-10,4-6'], expect, '4-5,8-10')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10,4-6', ['8,10', '4,6'],
+ prop_conflicts=1, expect_error=False)
+ try_merge(target, 6, ['-c8-10,4-6'], expect, '4-6,8-10')
+
+ # Try some reverse merges, with ranges in forward and reverse order.
+ #
+ # Reverse merges start with all source changes merged except 5 and 9.
+ revert_branch()
+ simple_merge(trunk + '/' + target, sbox.ospath(branch + '/' + target),
+ ['-c-5,-9,4,6-8,10'])
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,10-8', ['-6,-4', '-10,-8'],
+ expect_error=False)
+ try_merge(target, None, ['-r6:3', '-r10:7'], expect, '7')
+ expect = expected_out_and_err(tgt_ospath,
+ '-6', ['-6'],
+ prop_conflicts=1)
+ try_merge(target, 6, ['-r6:3', '-r10:7'], expect, '4,7-8,10')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4', ['-6,-4'],
+ prop_conflicts=1)
+ try_merge(target, 4, ['-r6:3', '-r10:7'], expect, '7-8,10')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,-10', ['-6,-4', '-10'],
+ prop_conflicts=1)
+ try_merge(target, 10, ['-r6:3', '-r10:7'], expect, '7-8')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,10-8', ['-6,-4', '-10,-8'],
+ prop_conflicts=1, expect_error=False)
+ try_merge(target, 8, ['-r6:3', '-r10:7'], expect, '7')
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4310)
+# Test for issue #4310 "each editor drive gets its own notification
+# during 'svn merge'"
+def multiple_editor_drive_merge_notifications(sbox):
+ "each editor drive gets its own notification"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ iota_branch_path = sbox.ospath('iota-copy')
+ C_branch_path = sbox.ospath('branch')
+
+ # Branch a file and a directory:
+
+ # r2
+ sbox.simple_copy('iota', 'iota-copy')
+ sbox.simple_commit()
+
+ # r3
+ sbox.simple_copy('A/C', 'branch')
+ sbox.simple_commit()
+
+ # r4-8 - Set five non-conflicting properties on the branch parents.
+ for i in range(0,5):
+ sbox.simple_propset('foo' + str(i) , 'bar', 'iota')
+ sbox.simple_propset('foo' + str(i) , 'bar', 'A/C')
+ sbox.simple_commit()
+
+ # Cherry pick merge r5 and r6 to each branch and commit.
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '^/iota',
+ '-c', '5,7', iota_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/C',
+ '-c', '5,7', C_branch_path)
+ sbox.simple_commit()
+
+ # Now auto merge all eligible revisions to each branch.
+ # First the directory target:
+ #
+ # TODO: We don't use run_and_verify_merge here because it has limitations
+ # re checking the merge notification headers -- which need to be improved
+ # at some point.
+ svntest.actions.run_and_verify_svn(
+ ["--- Merging r2 through r4 into '" + C_branch_path + "':\n",
+ " U " + C_branch_path + "\n",
+ "--- Merging r6 into '" + C_branch_path + "':\n",
+ " U " + C_branch_path + "\n",
+ "--- Merging r8 through r9 into '" + C_branch_path + "':\n",
+ " U " + C_branch_path + "\n",
+ "--- Recording mergeinfo for merge of r2 through r9 into '" +
+ C_branch_path + "':\n",
+ " U " + C_branch_path + "\n"],
+ [], 'merge', sbox.repo_url + '/A/C', C_branch_path)
+
+ # Then the file target:
+ # Previously this failed because only the first range notification was
+ # printed:
+ #
+ # >svn merge ^/iota iota-copy
+ # --- Merging r2 through r4 into 'iota-copy':
+ # U iota-copy
+ # U iota-copy
+ # U iota-copy
+ # --- Recording mergeinfo for merge of r2 through r9 into 'iota-copy':
+ # U iota-copy
+ #
+ # This is what we expect:
+ #
+ # --- Merging r2 through r4 into 'iota-copy':
+ # U iota-copy
+ # --- Merging r6 into 'iota-copy': <-- 2nd editor drive
+ # U iota-copy
+ # --- Merging r8 through r9 into 'iota-copy': <-- 3rd editor drive
+ # U iota-copy
+ # --- Recording mergeinfo for merge of r2 through r9 into 'iota-copy':
+ # U iota-copy
+ svntest.actions.run_and_verify_svn(
+ ["--- Merging r2 through r4 into '" + iota_branch_path + "':\n",
+ " U " + iota_branch_path + "\n",
+ "--- Merging r6 into '" + iota_branch_path + "':\n",
+ " U " + iota_branch_path + "\n",
+ "--- Merging r8 through r9 into '" + iota_branch_path + "':\n",
+ " U " + iota_branch_path + "\n",
+ "--- Recording mergeinfo for merge of r2 through r9 into '" +
+ iota_branch_path + "':\n",
+ " U " + iota_branch_path + "\n"],
+ [], 'merge', sbox.repo_url + '/iota', iota_branch_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(4317)
+# Test for issue #4317 "redundant notifications in single editor drive merge".
+def single_editor_drive_merge_notifications(sbox):
+ "single editor drive merge notifications"
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+ wc_dir = sbox.wc_dir
+
+ A_copy_path = sbox.ospath('A_COPY')
+ D_copy_path = sbox.ospath('A_COPY/D')
+ psi_copy_path = sbox.ospath('A_COPY/D/H/psi')
+ omega_copy_path = sbox.ospath('A_COPY/D/H/omega')
+ beta_copy_path = sbox.ospath('A_COPY/B/E/beta')
+
+ # r2 - r6: Copy A to A_COPY and then make some text changes under A.
+ set_up_branch(sbox)
+
+ # r7 - Subtree merge
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/D',
+ '-c4', D_copy_path)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Previously this failed because of redundant merge notifications
+ # for r4-7:
+ #
+ # >svn merge ^/A A_COPY
+ # --- Merging r2 through r3 into 'A_COPY\D':
+ # U A_COPY\D\H\psi
+ # --- Merging r5 through r7 into 'A_COPY\D':
+ # U A_COPY\D\H\omega
+ # --- Merging r4 through r7 into 'A_COPY':
+ # U A_COPY\B\E\beta
+ # --- Recording mergeinfo for merge of r2 through r7 into 'A_COPY':
+ # U A_COPY
+ # --- Recording mergeinfo for merge of r2 through r7 into 'A_COPY\D':
+ # U A_COPY\D
+ # --- Eliding mergeinfo from 'A_COPY\D':
+ # U A_COPY\D
+ #
+ # The order of 'beta' and 'omega' can vary, so use UnorderedOutput. This
+ # raises the possibility that the test could spuriously pass if the 'U'pdate
+ # notifications aren't grouped with the correct headers, but that's not what
+ # is being tested here.
+ expected_output = svntest.verify.UnorderedOutput(
+ ["--- Merging r2 through r3 into '" + A_copy_path + "':\n",
+ "U " + psi_copy_path + "\n",
+ "--- Merging r4 through r7 into '" + A_copy_path + "':\n",
+ "U " + omega_copy_path + "\n",
+ "U " + beta_copy_path + "\n",
+ "--- Recording mergeinfo for merge of r2 through r7 into '" +
+ A_copy_path + "':\n",
+ " U " + A_copy_path + "\n",
+ "--- Recording mergeinfo for merge of r2 through r7 into '" +
+ D_copy_path + "':\n",
+ " U " + D_copy_path + "\n",
+ "--- Eliding mergeinfo from '" + D_copy_path + "':\n",
+ " U " + D_copy_path + "\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ sbox.repo_url + '/A', A_copy_path)
+
+ # r8 and r9 - Commit and do reverse subtree merge.
+ sbox.simple_commit()
+ sbox.simple_update()
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/D',
+ '-c-4', D_copy_path)
+ sbox.simple_commit()
+
+ # Now try a reverse merge. There should only be one notification for
+ # r7-5:
+ sbox.simple_update()
+ expected_output = svntest.verify.UnorderedOutput(
+ ["--- Reverse-merging r7 through r5 into '" + A_copy_path + "':\n",
+ "U " + beta_copy_path + "\n",
+ "U " + omega_copy_path + "\n",
+ "--- Reverse-merging r4 through r3 into '" + A_copy_path + "':\n",
+ "U " + psi_copy_path + "\n",
+ "--- Recording mergeinfo for reverse merge of r7 through r3 into '" +
+ A_copy_path + "':\n",
+ " U " + A_copy_path + "\n",
+ "--- Recording mergeinfo for reverse merge of r7 through r3 into '" +
+ D_copy_path + "':\n",
+ " U " + D_copy_path + "\n",
+ "--- Eliding mergeinfo from '" + D_copy_path + "':\n",
+ " U " + D_copy_path + "\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'merge',
+ '-r9:2', sbox.repo_url + '/A',
+ A_copy_path)
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4316) # 'Merge errors out after resolving conflicts'
+# Very similar to conflict_aborted_mergeinfo_described_partial_merge()
+# (test number 135), except here we tell the merge to resolve the
+# conflicts that are generated part way through a multi-revision-range
+# merge, and we expect it to continue with the rest of the merge.
+def conflicted_split_merge_with_resolve(sbox):
+ "conflicted split merge with resolve"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ trunk = 'A'
+ branch = 'A2'
+ file = 'mu'
+ dir = 'B'
+ trunk_file = 'A/mu'
+ trunk_dir = 'A/B'
+
+ # r2: initial state
+ for rev in range(4, 11):
+ sbox.simple_propset('prop-' + str(rev), 'Old pval ' + str(rev),
+ trunk_file, trunk_dir)
+ sbox.simple_commit()
+
+ # r3: branch
+ sbox.simple_update()
+ sbox.simple_copy(trunk, branch)
+ sbox.simple_commit()
+
+ zero_rev = 3
+
+ def edit_file_or_dir(path, rev, val):
+ """Make a local edit to the file at PATH."""
+ sbox.simple_propset('prop-' + str(rev), val + ' pval ' + str(rev), path)
+
+ # r4 through r10: simple edits
+ for rev in range(4, 11):
+ edit_file_or_dir(trunk_file, rev, 'Edited')
+ edit_file_or_dir(trunk_dir, rev, 'Edited')
+ sbox.simple_commit()
+
+ # r14: merge some changes to the branch so that later merges will be split
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5,9',
+ '^/' + trunk, sbox.ospath(branch),
+ '--accept', 'theirs-conflict')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ def revert_branch():
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R',
+ sbox.ospath(branch))
+
+ def try_merge(relpath, conflict_rev, rev_args,
+ expected_out_err, expected_mi):
+ """Revert RELPATH in the branch; make a change that will conflict
+ with CONFLICT_REV if not None; merge RELPATH in the trunk
+ to RELPATH in the branch using revision arguments REV_ARGS (list of
+ '-r...' or '-c...' strings).
+
+ EXPECTED_OUT_ERR_MI is a tuple: (expected_out, expected_err,
+ expected_mi). EXPECTED_OUT and EXPECTED_ERR are instances of
+ ExpectedOutput.
+
+ Expect to find mergeinfo EXPECTED_MI if not None. EXPECTED_MI is
+ a single mergeinfo-string.
+ """
+ src_path = trunk + '/' + relpath
+ tgt_path = branch + '/' + relpath
+ tgt_ospath = sbox.ospath(tgt_path)
+
+ expected_out, expected_err = expected_out_err
+
+ revert_branch()
+
+ # Arrange for the merge to conflict at CONFLICT_REV.
+ if conflict_rev:
+ edit_file_or_dir(tgt_path, conflict_rev, 'Conflict')
+
+ src_url = '^/' + src_path + '@11'
+ svntest.actions.run_and_verify_svn(
+ expected_out, expected_err,
+ 'merge', src_url, tgt_ospath, '--accept', 'mine-full',
+ *rev_args)
+
+ if expected_mi is not None:
+ expected_mergeinfo = ['/' + src_path + ':' + expected_mi + '\n']
+ check_mergeinfo(expected_mergeinfo, tgt_ospath)
+
+ # In a mergeinfo-aware merge, each specified revision range is split
+ # internally into sub-ranges, to avoid any already-merged revisions.
+ #
+ # From white-box inspection, we see there are code paths that treat
+ # the last specified range and the last sub-range specially. The
+ # first specified range or sub-range is not treated specially in terms
+ # of the code paths, although it might be in terms of data flow.
+ #
+ # We test merges that raise a conflict in the first and last sub-range
+ # of the first and last specified range.
+
+ for target in [file, dir]:
+
+ tgt_ospath = sbox.ospath(branch + '/' + target)
+
+ # First test: Merge "everything" to the branch.
+ #
+ # This merge is split into three sub-ranges: r3-4, r6-8, r10-head.
+ # We have arranged that the merge will raise a conflict in the first
+ # sub-range. Since we are postponing conflict resolution, the merge
+ # should stop after the first sub-range, allowing us to resolve and
+ # repeat the merge at which point the next sub-range(s) can be merged.
+ # The mergeinfo on the target then should only reflect that the first
+ # sub-range (r3-4) has been merged.
+ expect = expected_out_and_err(tgt_ospath,
+ '3-4,6-11',
+ ['3-4', '6-8,10-11'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 4, [], expect, '3-11')
+
+ # Try a multiple-range merge that raises a conflict in the
+ # first sub-range in the first specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4,6,8-10',
+ ['4', '6', '8,10'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 4, ['-c4-6,8-10'], expect, '4-6,8-10')
+ # last sub-range in the first specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6,8-10', ['4,6', '8,10'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 6, ['-c4-6,8-10'], expect, '4-6,8-10')
+ # first sub-range in the last specified range;
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6,8,10',
+ ['4,6', '8', '10'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 8, ['-c4-6,8-10'], expect, '4-6,8-10')
+ # last sub-range in the last specified range.
+ # (Expect no error, because 'svn merge' does not throw an error if
+ # there is no more merging to do when a conflict occurs.)
+ expect = expected_out_and_err(tgt_ospath,
+ '4-6,8-10', ['4,6', '8,10'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 10, ['-c4-6,8-10'], expect, '4-6,8-10')
+
+ # Try similar merges but involving ranges in reverse order.
+ expect = expected_out_and_err(tgt_ospath,
+ '8', ['8'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 8, ['-c8-10,4-6'], expect, '4-6,8-10')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10', ['8,10'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 10, ['-c8-10,4-6'], expect, '4-6,8-10')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10,4', ['8,10', '4'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 4, ['-c8-10,4-6'], expect, '4-6,8-10')
+ expect = expected_out_and_err(tgt_ospath,
+ '8-10,4-6', ['8,10', '4,6'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 6, ['-c8-10,4-6'], expect, '4-6,8-10')
+
+ # Try some reverse merges, with ranges in forward and reverse order.
+ #
+ # Reverse merges start with all source changes merged except 5 and 9.
+ revert_branch()
+ simple_merge(trunk + '/' + target, sbox.ospath(branch + '/' + target),
+ ['-c-5,-9,4,6-8,10'])
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,10-8', ['-6,-4', '-10,-8'],
+ expect_error=False)
+ try_merge(target, None, ['-r6:3', '-r10:7'], expect, '7')
+ expect = expected_out_and_err(tgt_ospath,
+ '-6,-4,10-8',
+ ['-6', '-4', '-10,-8'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 6, ['-r6:3', '-r10:7'], expect, '7')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,10-8', ['-6,-4', '-10,-8'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 4, ['-r6:3', '-r10:7'], expect, '7')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,-10,-8',
+ ['-6,-4', '-10', '-8'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 10, ['-r6:3', '-r10:7'], expect, '7')
+ expect = expected_out_and_err(tgt_ospath,
+ '6-4,10-8', ['-6,-4', '-10,-8'],
+ prop_resolved=1, expect_error=False)
+ try_merge(target, 8, ['-r6:3', '-r10:7'], expect, '7')
+
+#----------------------------------------------------------------------
+# Test for issue 4367 'merge to shallow WC, repeat merge to infinite
+# depth WC is broken'.
+@SkipUnless(server_has_mergeinfo)
+@Issues(4367)
+def merge_to_empty_target_merge_to_infinite_target(sbox):
+ "repeat merge to infinite depth WC conflicts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, branch_only=True)
+ A_COPY_path = sbox.ospath('A_COPY')
+ C_COPY_path = sbox.ospath('A_COPY/C')
+ E_path = sbox.ospath('A/B/E')
+ J_path = sbox.ospath('A/C/J')
+ K_path = sbox.ospath('A/C/J/K')
+ nu1_path = sbox.ospath('A/C/J/nu1')
+ nu2_path = sbox.ospath('A/C/J/K/nu2')
+ L_path = sbox.ospath('A/B/L')
+ nu3_path = sbox.ospath('A/B/L/nu3')
+
+ B1_path = sbox.ospath('A/B/B1')
+ B1a_path = sbox.ospath('A/B/B1/B1a')
+ test1_path = sbox.ospath('A/B/B1/test.txt')
+ test2_path = sbox.ospath('A/B/B1/B1a/test.txt')
+
+ C1_path = sbox.ospath('A/C/C1')
+ test3_path = sbox.ospath('A/C/C1/test.txt')
+
+ # r3 - Add some subtrees:
+ # A /A/B/B1
+ # A /A/B/B1/B1a
+ # A /A/B/B1/B1a/test.txt
+ # A /A/B/B1/test.txt
+ svntest.main.run_svn(None, 'mkdir', B1_path)
+ svntest.main.run_svn(None, 'mkdir', B1a_path)
+ svntest.main.file_append(test1_path, "New file.\n")
+ svntest.main.file_append(test2_path, "New file.\n")
+ svntest.main.run_svn(None, 'add', test1_path, test2_path)
+ sbox.simple_commit()
+
+ # r4 - Add some another subtree.
+ # A /A/C/C1
+ # A /A/C/C1/test.txt
+ svntest.main.run_svn(None, 'mkdir', C1_path)
+ svntest.main.file_append(test3_path, "New file.\n")
+ svntest.main.run_svn(None, 'add', test3_path)
+ sbox.simple_commit()
+
+ # r5 - Delete part of the subtree added in r3.
+ # D /A/B/B1/B1a
+ svntest.main.run_svn(None, 'del', B1a_path)
+ sbox.simple_commit()
+
+ # r6 - Set depth of A_COPY to empty, merge all available revs from ^/A.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth=empty', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth=infinity', C_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A',
+ A_COPY_path)
+ sbox.simple_commit()
+
+ # Update A_COPY back to depth infinity and retry the prior merge.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--set-depth=infinity', A_COPY_path)
+
+ expected_output = wc.State(A_COPY_path, {
+ 'B/B1' : Item(status='A '),
+ 'B/B1/test.txt' : Item(status='A '),
+ 'B/B1/B1a' : Item(status='D ', prev_status='A '),
+ 'B/B1/B1a/test.txt' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ 'B' : Item(status=' G'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ 'B' : Item(status=' U'),
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M'),
+ 'B' : Item(status=' '),
+ 'mu' : Item(status=' '),
+ 'B/B1' : Item(status='A ', copied='+'),
+ 'B/B1/test.txt' : Item(status=' ', copied='+'),
+ 'B/B1/B1a' : Item(status='D ', copied='+'),
+ 'B/B1/B1a/test.txt' : Item(status='D ', copied='+'),
+ 'B/E' : Item(status=' '),
+ 'B/E/alpha' : Item(status=' '),
+ 'B/E/beta' : Item(status=' '),
+ 'B/lambda' : Item(status=' '),
+ 'B/F' : Item(status=' '),
+ 'C' : Item(status=' '),
+ 'C/C1' : Item(status=' '),
+ 'C/C1/test.txt' : Item(status=' '),
+ 'D' : Item(status=' '),
+ 'D/G' : Item(status=' '),
+ 'D/G/pi' : Item(status=' '),
+ 'D/G/rho' : Item(status=' '),
+ 'D/G/tau' : Item(status=' '),
+ 'D/gamma' : Item(status=' '),
+ 'D/H' : Item(status=' '),
+ 'D/H/chi' : Item(status=' '),
+ 'D/H/psi' : Item(status=' '),
+ 'D/H/omega' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev=6)
+ expected_status.tweak('B/B1', 'B/B1/test.txt', 'B/B1/B1a',
+ 'B/B1/B1a/test.txt', wc_rev='-')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-6'}),
+ 'B' : Item(),
+ 'mu' : Item("This is the file 'mu'.\n"),
+ 'B/B1' : Item(),
+ 'B/B1/test.txt' : Item("New file.\n"),
+ 'B/E' : Item(),
+ 'B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item("This is the file 'beta'.\n"),
+ 'B/lambda' : Item("This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'C' : Item(props={SVN_PROP_MERGEINFO : '/A/C:2-5'}),
+ 'C/C1' : Item(),
+ 'C/C1/test.txt' : Item("New file.\n"),
+ 'D' : Item(),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ 'D/H/omega' : Item("This is the file 'omega'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, { })
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ sbox.repo_url + '/A', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False)
+
+ # Commit the merge.
+ #sbox.simple_commit()
+
+def conflict_naming(sbox):
+ "verify conflict file naming"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_append('file.txt', 'This is the initial content\n')
+ sbox.simple_add('file.txt')
+ sbox.simple_commit()
+
+ sbox.simple_append('file.txt', 'This is the new content\n', truncate=True)
+ sbox.simple_commit()
+
+ sbox.simple_append('file.txt', 'This is conflicting content\n', truncate=True)
+
+ # Update - no preserve ext
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_output = svntest.wc.State(wc_dir, {
+ 'file.txt' : Item(status='C ')
+ })
+ expected_status.add({
+ 'file.txt' : Item(status='C ', wc_rev='2')
+ })
+
+ expected_disk.add({
+ 'file.txt.r3' : Item(contents="This is the new content\n"),
+ 'file.txt.r2' : Item(contents="This is the initial content\n"),
+ 'file.txt' : Item(contents="<<<<<<< .mine\n" \
+ "This is conflicting content\n" \
+ "||||||| .r3\n" \
+ "This is the new content\n" \
+ "=======\n" \
+ "This is the initial content\n" \
+ ">>>>>>> .r2\n"),
+ 'file.txt.mine' : Item(contents="This is conflicting content\n"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, expected_disk,
+ expected_status,
+ [], False,
+ wc_dir, '-r', '2')
+
+ sbox.simple_revert('file.txt')
+ sbox.simple_update('', revision=3)
+ sbox.simple_append('file.txt', 'This is conflicting content\n', truncate=True)
+
+ # Update - preserve ext
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_output = svntest.wc.State(wc_dir, {
+ 'file.txt' : Item(status='C ')
+ })
+ expected_status.add({
+ 'file.txt' : Item(status='C ', wc_rev='2')
+ })
+
+ expected_disk.add({
+ 'file.txt.r3.txt' : Item(contents="This is the new content\n"),
+ 'file.txt.r2.txt' : Item(contents="This is the initial content\n"),
+ 'file.txt' : Item(contents="<<<<<<< .mine.txt\n" \
+ "This is conflicting content\n" \
+ "||||||| .r3.txt\n" \
+ "This is the new content\n" \
+ "=======\n" \
+ "This is the initial content\n" \
+ ">>>>>>> .r2.txt\n"),
+ 'file.txt.mine.txt' : Item(contents="This is conflicting content\n"),
+ })
+ svntest.actions.run_and_verify_update(
+ wc_dir,
+ expected_output, expected_disk, expected_status,
+ [], False,
+ wc_dir, '-r', '2',
+ '--config-option',
+ 'config:miscellany:preserved-conflict-file-exts=' +
+ 'c txt h')
+
+ sbox.simple_revert('file.txt')
+ sbox.simple_update('', revision=3)
+ sbox.simple_append('file.txt', 'This is conflicting content\n', truncate=True)
+
+ # Merge - no preserve ext
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status.add({
+ 'file.txt' : Item(status='C ', wc_rev='3')
+ })
+ expected_disk.add({
+ 'file.txt.merge-left.r3' : Item(contents="This is the new content\n"),
+ 'file.txt.merge-right.r2': Item(contents="This is the initial content\n"),
+ 'file.txt' : Item(contents="<<<<<<< .working\n" \
+ "This is conflicting content\n" \
+ "||||||| .merge-left.r3\n" \
+ "This is the new content\n" \
+ "=======\n" \
+ "This is the initial content\n" \
+ ">>>>>>> .merge-right.r2\n"),
+ 'file.txt.working' : Item(contents="This is conflicting content\n"),
+ })
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c-3', '^/', sbox.ospath(''))
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+ sbox.simple_revert('file.txt')
+ sbox.simple_append('file.txt', 'This is conflicting content\n', truncate=True)
+
+ # Merge - preserve ext
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status.add({
+ 'file.txt' : Item(status='C ', wc_rev='3')
+ })
+ expected_disk.add({
+ 'file.txt.merge-left.r3.txt' : Item(contents="This is the new content\n"),
+ 'file.txt.merge-right.r2.txt': Item(contents="This is the initial content\n"),
+ 'file.txt' : Item(contents="<<<<<<< .working.txt\n" \
+ "This is conflicting content\n" \
+ "||||||| .merge-left.r3.txt\n" \
+ "This is the new content\n" \
+ "=======\n" \
+ "This is the initial content\n" \
+ ">>>>>>> .merge-right.r2.txt\n"),
+ 'file.txt.working.txt' : Item(contents="This is conflicting content\n"),
+ })
+
+ svntest.actions.run_and_verify_svn(
+ None, [],
+ 'merge', '-c-3', '^/', sbox.ospath(''),
+ '--config-option',
+ 'config:miscellany:preserved-conflict-file-exts=' +
+ 'c txt h')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+def merge_dir_delete_force(sbox):
+ "merge a directory delete with --force"
+
+ sbox.build()
+
+ sbox.simple_rm('A/D/G')
+ sbox.simple_commit() # r2
+
+ sbox.simple_update(revision=1)
+
+ # Just merging r2 on r1 succeeds
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c2', '^/', sbox.wc_dir,
+ '--ignore-ancestry')
+
+ # Bring working copy to r1 again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', sbox.wc_dir)
+
+ # But when using --force this same merge caused a segfault in 1.8.0-1.8.8
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c2', '^/', sbox.wc_dir,
+ '--ignore-ancestry', '--force')
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ textual_merges_galore,
+ add_with_history,
+ simple_property_merges,
+ merge_with_implicit_target_using_r,
+ merge_with_implicit_target_using_c,
+ merge_with_implicit_target_and_revs,
+ merge_similar_unrelated_trees,
+ merge_with_prev,
+ merge_binary_file,
+ merge_one_file_using_r,
+ merge_one_file_using_c,
+ merge_one_file_using_implicit_revs,
+ merge_record_only,
+ merge_in_new_file_and_diff,
+ merge_skips_obstructions,
+ merge_into_missing,
+ dry_run_adds_file_with_prop,
+ merge_binary_with_common_ancestry,
+ merge_funny_chars_on_path,
+ merge_keyword_expansions,
+ merge_prop_change_to_deleted_target,
+ merge_file_with_space_in_its_name,
+ merge_dir_branches,
+ safe_property_merge,
+ property_merge_from_branch,
+ property_merge_undo_redo,
+ cherry_pick_text_conflict,
+ merge_file_replace,
+ merge_dir_replace,
+ merge_dir_and_file_replace,
+ merge_file_replace_to_mixed_rev_wc,
+ merge_ignore_whitespace,
+ merge_ignore_eolstyle,
+ merge_conflict_markers_matching_eol,
+ merge_eolstyle_handling,
+ avoid_repeated_merge_using_inherited_merge_info,
+ avoid_repeated_merge_on_subtree_with_merge_info,
+ obey_reporter_api_semantics_while_doing_subtree_merges,
+ mergeinfo_inheritance,
+ mergeinfo_elision,
+ mergeinfo_inheritance_and_discontinuous_ranges,
+ merge_to_target_with_copied_children,
+ merge_to_switched_path,
+ merge_to_path_with_switched_children,
+ merge_with_implicit_target_file,
+ empty_mergeinfo,
+ prop_add_to_child_with_mergeinfo,
+ foreign_repos_does_not_update_mergeinfo,
+ avoid_reflected_revs,
+ update_loses_mergeinfo,
+ merge_loses_mergeinfo,
+ single_file_replace_style_merge_capability,
+ merge_to_out_of_date_target,
+ merge_with_depth_files,
+ merge_away_subtrees_noninheritable_ranges,
+ merge_to_sparse_directories,
+ merge_old_and_new_revs_from_renamed_dir,
+ merge_with_child_having_different_rev_ranges_to_merge,
+ merge_old_and_new_revs_from_renamed_file,
+ merge_with_auto_rev_range_detection,
+ cherry_picking,
+ propchange_of_subdir_raises_conflict,
+ reverse_merge_prop_add_on_child,
+ merge_target_with_non_inheritable_mergeinfo,
+ self_reverse_merge,
+ ignore_ancestry_and_mergeinfo,
+ merge_from_renamed_branch_fails_while_avoiding_repeat_merge,
+ merge_source_normalization_and_subtree_merges,
+ new_subtrees_should_not_break_merge,
+ dont_add_mergeinfo_from_own_history,
+ merge_range_predates_history,
+ foreign_repos,
+ foreign_repos_uuid,
+ foreign_repos_2_url,
+ merge_added_subtree,
+ merge_unknown_url,
+ reverse_merge_away_all_mergeinfo,
+ dont_merge_revs_into_subtree_that_predate_it,
+ merge_chokes_on_renamed_subtrees,
+ dont_explicitly_record_implicit_mergeinfo,
+ merge_broken_link,
+ subtree_merges_dont_intersect_with_targets,
+ subtree_source_missing_in_requested_range,
+ subtrees_with_empty_mergeinfo,
+ commit_to_subtree_added_by_merge,
+ del_identical_file,
+ del_sched_add_hist_file,
+ subtree_merges_dont_cause_spurious_conflicts,
+ merge_target_and_subtrees_need_nonintersecting_ranges,
+ merge_two_edits_to_same_prop,
+ merge_an_eol_unification_and_set_svn_eol_style,
+ merge_adds_mergeinfo_correctly,
+ natural_history_filtering,
+ subtree_gets_changes_even_if_ultimately_deleted,
+ no_self_referential_filtering_on_added_path,
+ merge_range_prior_to_rename_source_existence,
+ dont_merge_gaps_in_history,
+ mergeinfo_deleted_by_a_merge_should_disappear,
+ noop_file_merge,
+ handle_gaps_in_implicit_mergeinfo,
+ copy_then_replace_via_merge,
+ record_only_merge,
+ merge_automatic_conflict_resolution,
+ skipped_files_get_correct_mergeinfo,
+ committed_case_only_move_and_revert,
+ merge_into_wc_for_deleted_branch,
+ foreign_repos_del_and_props,
+ immediate_depth_merge_creates_minimal_subtree_mergeinfo,
+ record_only_merge_creates_self_referential_mergeinfo,
+ dav_skelta_mode_causes_spurious_conflicts,
+ merge_into_locally_added_file,
+ merge_into_locally_added_directory,
+ merge_with_os_deleted_subtrees,
+ no_self_referential_or_nonexistent_inherited_mergeinfo,
+ subtree_merges_inherit_invalid_working_mergeinfo,
+ merge_change_to_file_with_executable,
+ dry_run_merge_conflicting_binary,
+ foreign_repos_prop_conflict,
+ merge_adds_subtree_with_mergeinfo,
+ reverse_merge_adds_subtree,
+ merged_deletion_causes_tree_conflict,
+ record_only_merge_adds_new_subtree_mergeinfo,
+ unnecessary_noninheritable_mergeinfo_missing_subtrees,
+ unnecessary_noninheritable_mergeinfo_shallow_merge,
+ svnmucc_abuse_1,
+ merge_source_with_replacement,
+ reverse_merge_with_rename,
+ merge_adds_then_deletes_subtree,
+ merge_with_added_subtrees_with_mergeinfo,
+ merge_with_externals_with_mergeinfo,
+ merge_binary_file_with_keywords,
+ merge_conflict_when_keywords_removed,
+ merge_target_selection,
+ merge_properties_on_adds,
+ conflict_aborted_mergeinfo_described_partial_merge,
+ multiple_editor_drive_merge_notifications,
+ single_editor_drive_merge_notifications,
+ conflicted_split_merge_with_resolve,
+ merge_to_empty_target_merge_to_infinite_target,
+ conflict_naming,
+ merge_dir_delete_force,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/merge_tree_conflict_tests.py b/subversion/tests/cmdline/merge_tree_conflict_tests.py
new file mode 100755
index 0000000..4f1805b
--- /dev/null
+++ b/subversion/tests/cmdline/merge_tree_conflict_tests.py
@@ -0,0 +1,2409 @@
+#!/usr/bin/env python
+#
+# merge_tests.py: testing tree conflicts during merge
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+import svntest
+from svntest import main, wc, verify, actions, deeptrees
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import server_has_mergeinfo
+from svntest.mergetrees import set_up_branch
+from svntest.mergetrees import svn_copy
+from svntest.mergetrees import svn_merge
+from svntest.mergetrees import expected_merge_output
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def delete_file_and_dir(sbox):
+ "merge that deletes items"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Rev 2 copy B to B2
+ B_path = os.path.join(wc_dir, 'A', 'B')
+ B2_path = os.path.join(wc_dir, 'A', 'B2')
+ B_url = sbox.repo_url + '/A/B'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', B_path, B2_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B2' : Item(status=' ', wc_rev=2),
+ 'A/B2/E' : Item(status=' ', wc_rev=2),
+ 'A/B2/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B2/E/beta' : Item(status=' ', wc_rev=2),
+ 'A/B2/F' : Item(status=' ', wc_rev=2),
+ 'A/B2/lambda' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Rev 3 delete E and lambda from B
+ E_path = os.path.join(B_path, 'E')
+ lambda_path = os.path.join(B_path, 'lambda')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'delete', E_path, lambda_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Deleting'),
+ 'A/B/lambda' : Item(verb='Deleting'),
+ })
+ expected_status.remove('A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/B/lambda')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ def modify_B2():
+ # Local mods in B2
+ B2_E_path = os.path.join(B2_path, 'E')
+ B2_lambda_path = os.path.join(B2_path, 'lambda')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo_val',
+ B2_E_path, B2_lambda_path)
+ expected_status.tweak(
+ 'A/B2/E', 'A/B2/lambda', status=' M'
+ )
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ modify_B2()
+
+ # Merge rev 3 into B2
+
+ # The local mods to the paths modified in r3 cause the paths to be
+ # tree-conflicted upon deletion, resulting in only the mergeinfo change
+ # to the target of the merge 'B2'.
+ expected_output = wc.State(B2_path, {
+ '' : Item(),
+ 'lambda' : Item(status=' ', treeconflict='C'),
+ 'E' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State(B2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B2_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}),
+ 'E' : Item(props={'foo' : 'foo_val'}),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\n",
+ props={'foo' : 'foo_val'}),
+ })
+ expected_status2 = wc.State(B2_path, {
+ '' : Item(status=' M'),
+ 'E' : Item(status=' M', treeconflict='C'),
+ 'E/alpha' : Item(status=' '),
+ 'E/beta' : Item(status=' '),
+ 'F' : Item(status=' '),
+ 'lambda' : Item(status=' M', treeconflict='C'),
+ })
+ expected_status2.tweak(wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(B2_path, '2', '3', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status2,
+ expected_skip,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# This is a regression for issue #1176.
+@Issue(1176)
+@SkipUnless(server_has_mergeinfo)
+def merge_catches_nonexistent_target(sbox):
+ "merge should not die if a target file is absent"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Copy G to a new directory, Q. Create Q/newfile. Commit a change
+ # to Q/newfile. Now merge that change... into G. Merge should not
+ # error, rather, it should report the tree conflict and continue.
+
+ G_path = os.path.join(wc_dir, 'A', 'D', 'G')
+ Q_path = os.path.join(wc_dir, 'A', 'D', 'Q')
+ newfile_path = os.path.join(Q_path, 'newfile')
+ Q_url = sbox.repo_url + '/A/D/Q'
+
+ # Copy dir A/D/G to A/D/Q
+ svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, Q_path)
+
+ svntest.main.file_append(newfile_path, 'This is newfile.\n')
+ svntest.actions.run_and_verify_svn(None, [], 'add', newfile_path)
+
+ # Add newfile to dir G, creating r2.
+ expected_output = wc.State(wc_dir, {
+ 'A/D/Q' : Item(verb='Adding'),
+ 'A/D/Q/newfile' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/Q' : Item(status=' ', wc_rev=2),
+ 'A/D/Q/pi' : Item(status=' ', wc_rev=2),
+ 'A/D/Q/rho' : Item(status=' ', wc_rev=2),
+ 'A/D/Q/tau' : Item(status=' ', wc_rev=2),
+ 'A/D/Q/newfile' : Item(status=' ', wc_rev=2),
+ })
+ ### right now, we cannot denote that Q/newfile is a local-add rather than
+ ### a child of the A/D/Q copy. thus, it appears in the status output as a
+ ### (M)odified child.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Change newfile, creating r3.
+ svntest.main.file_append(newfile_path, 'A change to newfile.\n')
+ expected_output = wc.State(wc_dir, {
+ 'A/D/Q/newfile' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/D/Q/newfile', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Merge the change to newfile (from r3) into G, where newfile
+ # doesn't exist. This is a tree conflict (use case 4, see
+ # notes/tree-conflicts/detection.txt).
+ os.chdir(G_path)
+ expected_output = wc.State('', {
+ 'newfile' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State('', {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State('', {
+ })
+ expected_status = wc.State('', {
+ '' : Item(status=' M' ),
+ 'pi' : Item(status=' ' ),
+ 'rho' : Item(status=' ' ),
+ 'tau' : Item(status=' ' ),
+ })
+ expected_status.tweak(wc_rev=1)
+
+ expected_status.add({
+ 'newfile': Item(status='! ', treeconflict='C' )
+ })
+
+ expected_status.tweak('', status=' M')
+
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/D/Q:3'}),
+ 'pi' : Item("This is the file 'pi'.\n"),
+ 'rho' : Item("This is the file 'rho'.\n"),
+ 'tau' : Item("This is the file 'tau'.\n"),
+ })
+ expected_skip = wc.State('', {
+ })
+ svntest.actions.run_and_verify_merge('', '2', '3', Q_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ expected_status.add({
+ 'newfile' : Item(status='! ', treeconflict='C'),
+ })
+ svntest.actions.run_and_verify_unquiet_status('', expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def merge_tree_deleted_in_target(sbox):
+ "merge on deleted directory in target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Copy B to a new directory, I. Modify B/E/alpha, Remove I/E. Now
+ # merge that change... into I. Merge should report a tree conflict.
+
+ B_path = os.path.join(wc_dir, 'A', 'B')
+ I_path = os.path.join(wc_dir, 'A', 'I')
+ alpha_path = os.path.join(B_path, 'E', 'alpha')
+ B_url = sbox.repo_url + '/A/B'
+ I_url = sbox.repo_url + '/A/I'
+
+
+ # Copy B to I, creating r1.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', B_url, I_url, '-m', 'rev 2')
+
+ # Change some files, creating r2.
+ svntest.main.file_append(alpha_path, 'A change to alpha.\n')
+ svntest.main.file_append(os.path.join(B_path, 'lambda'), 'change lambda.\n')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'rev 3', B_path)
+
+ # Remove E, creating r3.
+ E_url = sbox.repo_url + '/A/I/E'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', E_url, '-m', 'rev 4')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', os.path.join(wc_dir,'A'))
+
+ expected_output = wc.State(I_path, {
+ 'lambda' : Item(status='U '),
+ 'E' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State(I_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(I_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}),
+ 'F' : Item(),
+ 'lambda' : Item("This is the file 'lambda'.\nchange lambda.\n"),
+ })
+ expected_status = wc.State(I_path, {
+ '' : Item(status=' M'),
+ 'F' : Item(status=' '),
+ 'lambda' : Item(status='M '),
+ })
+ expected_status.tweak(wc_rev=4)
+ expected_status.add({
+ 'E' : Item(status='! ', treeconflict='C' )
+ })
+ expected_skip = wc.State(I_path, {
+ })
+ svntest.actions.run_and_verify_merge(I_path, '2', '3', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ expected_status.add({
+ 'E' : Item(status='! ', treeconflict='C'),
+ })
+ svntest.actions.run_and_verify_unquiet_status(I_path, expected_status)
+
+#----------------------------------------------------------------------
+# Regression test for issue #2403: Incorrect 3-way merge of "added"
+# binary file which already exists (unmodified) in the WC
+@SkipUnless(server_has_mergeinfo)
+@Issue(2403)
+def three_way_merge_add_of_existing_binary_file(sbox):
+ "3-way merge of 'file add' into existing binary"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a branch of A, creating revision 2.
+ A_url = sbox.repo_url + "/A"
+ branch_A_url = sbox.repo_url + "/copy-of-A"
+ svntest.actions.run_and_verify_svn(None, [],
+ "cp",
+ A_url, branch_A_url,
+ "-m", "Creating copy-of-A")
+
+ # Add a binary file to the WC.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ # Write PNG file data into 'A/theta'.
+ A_path = os.path.join(wc_dir, 'A')
+ theta_path = os.path.join(wc_dir, 'A', 'theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, "add", theta_path)
+
+ # Commit the new binary file to the repos, creating revision 3.
+ expected_output = svntest.wc.State(wc_dir, {
+ "A/theta" : Item(verb="Adding (bin)"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "A/theta" : Item(status=" ", wc_rev=3),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # In the working copy, attempt to 'svn merge branch_A_url@2 A_url@3 A'.
+ # We should *not* see a conflict during the merge, but an 'A'.
+ # And after the merge, the status should not report any differences.
+
+ expected_output = wc.State(wc_dir, {
+ "A/theta" : Item(status=" ", treeconflict='C'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+
+ # As greek_state is rooted at / instead of /A (our merge target), we
+ # need a sub-tree of it rather than straight copy.
+ expected_disk = svntest.main.greek_state.subtree("A")
+ expected_disk.add({
+ "" : Item(props={SVN_PROP_MERGEINFO : '/A:2-3'}),
+ "theta" : Item(theta_contents,
+ props={"svn:mime-type" : "application/octet-stream"}),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "A/theta" : Item(status=" ", wc_rev=3, treeconflict='C'),
+ })
+ expected_status.tweak("A", status=" M")
+ expected_status.remove("") # top-level of the WC
+ expected_status.remove("iota")
+ expected_skip = wc.State("", { })
+
+ # If we merge into wc_dir alone, theta appears at the WC root,
+ # which is in the wrong location -- append "/A" to stay on target.
+ #
+ # Note we don't bother checking expected mergeinfo output because
+ # three-way merges record mergeinfo multiple times on the same
+ # path, 'A' in this case. The first recording is reported as ' U'
+ # but the second is reported as ' G'. Our expected tree structures
+ # can't handle checking for multiple values for the same key.
+ svntest.actions.run_and_verify_merge(A_path, "2", "3",
+ branch_A_url, A_url,
+ expected_output,
+ None, # expected_mergeinfo_output
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, False,
+ '--allow-mixed-revisions', A_path)
+
+#----------------------------------------------------------------------
+# Issue #2515
+@Issue(2515)
+def merge_added_dir_to_deleted_in_target(sbox):
+ "merge an added dir on a deleted dir in target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # copy B to a new directory, I.
+ # delete F in I.
+ # add J to B/F.
+ # merge add to I.
+
+ B_url = sbox.repo_url + '/A/B'
+ I_url = sbox.repo_url + '/A/I'
+ F_url = sbox.repo_url + '/A/I/F'
+ J_url = sbox.repo_url + '/A/B/F/J'
+ I_path = os.path.join(wc_dir, 'A', 'I')
+
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', B_url, I_url, '-m', 'rev 2')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', F_url, '-m', 'rev 3')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'rev 4', J_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', os.path.join(wc_dir,'A'))
+
+ expected_output = wc.State(I_path, {
+ 'F' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State(I_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(I_path, {
+ })
+ expected_disk = wc.State('', {
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ })
+ expected_skip = wc.State(I_path, {
+ })
+
+ svntest.actions.run_and_verify_merge(I_path, '2', '4', B_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ None,
+ expected_skip)
+
+#----------------------------------------------------------------------
+# Issue 2584
+@Issue(2584)
+@SkipUnless(server_has_mergeinfo)
+def merge_add_over_versioned_file_conflicts(sbox):
+ "conflict from merge of add over versioned file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ alpha_path = os.path.join(E_path, 'alpha')
+ new_alpha_path = os.path.join(wc_dir, 'A', 'C', 'alpha')
+
+ # Create a new "alpha" file, with enough differences to cause a conflict.
+ svntest.main.file_write(new_alpha_path, 'new alpha content\n')
+
+ # Add and commit the new "alpha" file, creating revision 2.
+ svntest.main.run_svn(None, "add", new_alpha_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/alpha' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/C/alpha' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge r1:2 from A/C to A/B/E. This will attempt to add A/C/alpha,
+ # but since A/B/E/alpha already exists we get a tree conflict.
+ expected_output = wc.State(E_path, {
+ 'alpha' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State(E_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(E_path, {
+ })
+ expected_disk = wc.State('', {
+ 'alpha' : Item("This is the file 'alpha'.\n"),
+ 'beta' : Item("This is the file 'beta'.\n"),
+ })
+ expected_status = wc.State(E_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'alpha' : Item(status=' ', wc_rev=1, treeconflict='C'),
+ 'beta' : Item(status=' ', wc_rev=1),
+ })
+ expected_skip = wc.State(E_path, { })
+ svntest.actions.run_and_verify_merge(E_path, '1', '2',
+ sbox.repo_url + '/A/C', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2829)
+def mergeinfo_recording_in_skipped_merge(sbox):
+ "mergeinfo recording in skipped merge"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2829. ##
+
+ # Create a WC with a single branch
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, True, 1)
+
+ # Some paths we'll care about
+ A_url = sbox.repo_url + '/A'
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
+ A_COPY_B_E_path = os.path.join(wc_dir, 'A_COPY', 'B', 'E')
+ A_COPY_alpha_path = os.path.join(wc_dir, 'A_COPY', 'B', 'E', 'alpha')
+ A_COPY_beta_path = os.path.join(wc_dir, 'A_COPY', 'B', 'E', 'beta')
+
+ # Make a modification to A/mu
+ svntest.main.file_write(mu_path, "This is the file 'mu' modified.\n")
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ wc_status.add({'A/mu' : Item(status=' ', wc_rev=3)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Make a modification to A/B/E/alpha
+ svntest.main.file_write(alpha_path, "This is the file 'alpha' modified.\n")
+ expected_output = wc.State(wc_dir, {'A/B/E/alpha' : Item(verb='Sending')})
+ wc_status.add({'A/B/E/alpha' : Item(status=' ', wc_rev=4)})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ wc_status)
+
+ # Delete A_COPY/B/E
+ svntest.actions.run_and_verify_svn(None, [], 'rm',
+ A_COPY_B_E_path)
+
+ # Merge /A to /A_COPY ie., r1 to r4
+ expected_output = wc.State(A_COPY_path, {
+ 'mu' : Item(status='U '),
+ 'B/E' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = wc.State(A_COPY_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(A_COPY_path, {
+ })
+ expected_status = wc.State(A_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'mu' : Item(status='M ', wc_rev=2),
+ 'B' : Item(status=' ', wc_rev=2),
+ 'B/lambda' : Item(status=' ', wc_rev=2),
+ 'B/F' : Item(status=' ', wc_rev=2),
+ 'B/E' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'B/E/alpha': Item(status='D ', wc_rev=2),
+ 'B/E/beta' : Item(status='D ', wc_rev=2),
+ 'C' : Item(status=' ', wc_rev=2),
+ 'D' : Item(status=' ', wc_rev=2),
+ 'D/gamma' : Item(status=' ', wc_rev=2),
+ 'D/G' : Item(status=' ', wc_rev=2),
+ 'D/G/pi' : Item(status=' ', wc_rev=2),
+ 'D/G/rho' : Item(status=' ', wc_rev=2),
+ 'D/G/tau' : Item(status=' ', wc_rev=2),
+ 'D/H' : Item(status=' ', wc_rev=2),
+ 'D/H/chi' : Item(status=' ', wc_rev=2),
+ 'D/H/omega': Item(status=' ', wc_rev=2),
+ 'D/H/psi' : Item(status=' ', wc_rev=2),
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A:2-4'}),
+ 'mu' : Item("This is the file 'mu' modified.\n"),
+ 'C' : Item(),
+ 'D' : Item(),
+ 'B' : Item(),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'D/G' : Item(),
+ 'D/G/pi' : Item("This is the file 'pi'.\n"),
+ 'D/G/rho' : Item("This is the file 'rho'.\n"),
+ 'D/G/tau' : Item("This is the file 'tau'.\n"),
+ 'D/H' : Item(),
+ 'D/H/chi' : Item("This is the file 'chi'.\n"),
+ 'D/H/omega': Item("This is the file 'omega'.\n"),
+ 'D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+ expected_skip = wc.State(A_COPY_path, {})
+ svntest.actions.run_and_verify_merge(A_COPY_path, None, None,
+ A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+#----------------------------------------------------------------------
+def del_differing_file(sbox):
+ "merge tries to delete a file of different content"
+
+ # Setup a standard greek tree in r1.
+ sbox.build()
+
+ saved_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ source = 'A/D/G'
+ s_rev_orig = 1
+
+ # Delete files in the source
+ sbox.simple_rm(source+"/tau")
+ sbox.simple_commit(source)
+ s_rev_tau = 2
+ sbox.simple_rm(source+"/pi")
+ sbox.simple_commit(source)
+ s_rev_pi = 3
+
+ # Copy a file, modify it, and merge a deletion to it.
+ target = 'A/D/G2'
+ svn_copy(s_rev_orig, source, target)
+ svntest.main.file_append(target+"/tau", "An extra line in the target.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'newprop', 'v', target+"/pi")
+
+ dir_D = os.path.join('A','D')
+ tau = os.path.join(dir_D,'G2','tau')
+ pi = os.path.join(dir_D, 'G2', 'pi')
+ # Should complain and "skip" it.
+ svn_merge(s_rev_tau, source, target, [
+ " C %s\n" % tau, # merge
+ ], tree_conflicts=1)
+
+ svn_merge(s_rev_pi, source, target, [
+ " C %s\n" % pi, # merge
+ ], tree_conflicts=1)
+
+
+ # Copy a file, modify it, commit, and merge a deletion to it.
+ target = 'A/D/G3'
+ svn_copy(s_rev_orig, source, target)
+ svntest.main.file_append(target+"/tau", "An extra line in the target.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'newprop', 'v', target+"/pi")
+ sbox.simple_commit(target)
+
+
+ tau = os.path.join(dir_D,'G3','tau')
+ pi = os.path.join(dir_D, 'G3', 'pi')
+
+ # Should complain and "skip" it.
+ svn_merge(s_rev_tau, source, target, [
+ " C %s\n" % tau,
+ ], tree_conflicts=1)
+
+ svn_merge(s_rev_pi, source, target, [
+ " C %s\n" % pi,
+ ], tree_conflicts=1)
+
+ os.chdir(saved_cwd)
+
+#----------------------------------------------------------------------
+# This test used to involve tree conflicts, hence its name.
+@Issue(3146)
+def tree_conflicts_and_obstructions(sbox):
+ "tree conflicts and obstructions"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3146. ##
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ trunk_url = sbox.repo_url + '/A/B/E'
+ branch_path = os.path.join(wc_dir, 'branch')
+ br_alpha_moved = os.path.join(branch_path, 'alpha-moved')
+
+ # Create a branch
+ svntest.actions.run_and_verify_svn(None, [], 'cp',
+ trunk_url,
+ sbox.repo_url + '/branch',
+ '-m', "Creating the Branch")
+
+ svntest.actions.run_and_verify_svn(None, [], 'mv',
+ trunk_url + '/alpha',
+ trunk_url + '/alpha-moved',
+ '-m', "Move alpha to alpha-moved")
+
+ # Update to revision 2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ svntest.main.file_write(br_alpha_moved, "I am blocking myself from trunk\n")
+
+ branch_path = os.path.join(wc_dir, "branch")
+
+ # Merge the obstructions into the branch.
+ expected_output = svntest.wc.State(branch_path, {
+ 'alpha' : Item(status='D '),
+ })
+ expected_mergeinfo_output = wc.State(branch_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(branch_path, {
+ })
+ expected_disk = wc.State('', {
+ 'beta' : Item("This is the file 'beta'.\n"),
+ 'alpha-moved' : Item("I am blocking myself from trunk\n"),
+ })
+ expected_status = wc.State(branch_path, {
+ '' : Item(status=' M', wc_rev=3),
+ 'alpha' : Item(status='D ', wc_rev=3),
+ 'beta' : Item(status=' ', wc_rev=3),
+ })
+ expected_skip = wc.State(branch_path, {
+ 'alpha-moved' : Item(verb='Skipped'),
+ })
+
+ svntest.actions.run_and_verify_merge(branch_path,
+ '1', 'HEAD', trunk_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+
+#----------------------------------------------------------------------
+
+# Detect tree conflicts among files and directories,
+# edited or deleted in a deep directory structure.
+#
+# See use cases 4-6 in notes/tree-conflicts/use-cases.txt for background.
+# Note that we do not try to track renames. The only difference from
+# the behavior of Subversion 1.4 and 1.5 is the conflicted status of the
+# parent directory.
+
+# convenience definitions
+leaf_edit = svntest.deeptrees.deep_trees_leaf_edit
+tree_del = svntest.deeptrees.deep_trees_tree_del
+leaf_del = svntest.deeptrees.deep_trees_leaf_del
+
+disk_after_leaf_edit = svntest.deeptrees.deep_trees_after_leaf_edit
+disk_after_leaf_del = svntest.deeptrees.deep_trees_after_leaf_del
+disk_after_tree_del = svntest.deeptrees.deep_trees_after_tree_del
+disk_after_leaf_del_no_ci = svntest.deeptrees.deep_trees_after_leaf_del_no_ci
+disk_after_tree_del_no_ci = svntest.deeptrees.deep_trees_after_tree_del_no_ci
+
+deep_trees_conflict_output = svntest.deeptrees.deep_trees_conflict_output
+
+j = os.path.join
+
+DeepTreesTestCase = svntest.deeptrees.DeepTreesTestCase
+
+alpha_beta_gamma = svntest.wc.State('', {
+ 'F/alpha' : Item(),
+ 'DF/D1/beta' : Item(),
+ 'DDF/D1/D2/gamma' : Item(),
+ })
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_local_ci_4_1(sbox):
+ "tree conflicts 4.1: tree del, leaf edit"
+
+ # use case 4, as in notes/tree-conflicts/use-cases.txt
+ # 4.1) local tree delete, incoming leaf edit
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='! ', treeconflict='C'),
+ 'F/alpha' : Item(status='! ', treeconflict='C'),
+ 'DD/D1' : Item(status='! ', treeconflict='C'),
+ 'DF/D1' : Item(status='! ', treeconflict='C'),
+ 'DDD/D1' : Item(status='! ', treeconflict='C'),
+ 'DDF/D1' : Item(status='! ', treeconflict='C'),
+ })
+
+ expected_skip = svntest.wc.State('', { })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_edit",
+ tree_del,
+ leaf_edit,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip) ], True)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_local_ci_4_2(sbox):
+ "tree conflicts 4.2: tree del, leaf del"
+
+ # 4.2) local tree delete, incoming leaf delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='! ', treeconflict='C'),
+ 'D/D1' : Item(status='! ', treeconflict='C'),
+ 'DF/D1' : Item(status='! ', treeconflict='C'),
+ 'DD/D1' : Item(status='! ', treeconflict='C'),
+ 'DDF/D1' : Item(status='! ', treeconflict='C'),
+ 'DDD/D1' : Item(status='! ', treeconflict='C'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_del",
+ tree_del,
+ leaf_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip) ], True)
+
+#----------------------------------------------------------------------
+@Issue(2282)
+def tree_conflicts_on_merge_local_ci_5_1(sbox):
+ "tree conflicts 5.1: leaf edit, tree del"
+
+ # use case 5, as in notes/tree-conflicts/use-cases.txt
+ # 5.1) local leaf edit, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_edit
+
+ # We should detect 6 tree conflicts, and nothing should be deleted (when
+ # we skip tree conflict victims).
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status=' ', treeconflict='C', wc_rev='4'),
+ 'D/D1/delta' : Item(status=' ', wc_rev='4'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DD/D1/D2' : Item(status=' ', wc_rev='4'),
+ 'DD/D1/D2/epsilon' : Item(status=' ', wc_rev='4'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DDD/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status=' ', wc_rev='4'),
+ 'DDD/D1/D2/D3/zeta' : Item(status=' ', wc_rev='4'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DDF/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', wc_rev='4'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DF/D1/beta' : Item(status=' ', wc_rev='4'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status=' ', treeconflict='C', wc_rev='4'),
+
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase("local_leaf_edit_incoming_tree_del",
+ leaf_edit,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip) ], True)
+
+#----------------------------------------------------------------------
+@Issue(2282)
+def tree_conflicts_on_merge_local_ci_5_2(sbox):
+ "tree conflicts 5.2: leaf del, tree del"
+
+ # 5.2) local leaf del, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_del
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DDD/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DDF/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='! ', treeconflict='C'),
+ 'F/alpha' : Item(status='! ', treeconflict='C'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase("local_leaf_del_incoming_tree_del",
+ leaf_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip) ], True)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_local_ci_6(sbox):
+ "tree conflicts 6: tree del, tree del"
+
+ # use case 6, as in notes/tree-conflicts/use-cases.txt
+ # local tree delete, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='! ', treeconflict='C'),
+ 'F/alpha' : Item(status='! ', treeconflict='C'),
+ 'DD/D1' : Item(status='! ', treeconflict='C'),
+ 'DF/D1' : Item(status='! ', treeconflict='C'),
+ 'DDD/D1' : Item(status='! ', treeconflict='C'),
+ 'DDF/D1' : Item(status='! ', treeconflict='C'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_tree_del",
+ tree_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip) ], True)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_no_local_ci_4_1(sbox):
+ "tree conflicts 4.1: tree del (no ci), leaf edit"
+
+ sbox.build()
+
+ # use case 4, as in notes/tree-conflicts/use-cases.txt
+ # 4.1) local tree delete, incoming leaf edit
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del_no_ci(sbox.wc_dir)
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DDD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status='D ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DDF/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status='D ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DF/D1/beta' : Item(status='D ', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_tree_del_incoming_leaf_edit",
+ tree_del,
+ leaf_edit,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_no_local_ci_4_2(sbox):
+ "tree conflicts 4.2: tree del (no ci), leaf del"
+
+ sbox.build()
+
+ # 4.2) local tree delete, incoming leaf delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del_no_ci(sbox.wc_dir)
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DDD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status='D ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DDF/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status='D ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ 'DF/D1/beta' : Item(status='D ', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_tree_del_incoming_leaf_del",
+ tree_del,
+ leaf_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_no_local_ci_5_1(sbox):
+ "tree conflicts 5.1: leaf edit (no ci), tree del"
+
+
+ # use case 5, as in notes/tree-conflicts/use-cases.txt
+ # 5.1) local leaf edit, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_edit
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status=' M', treeconflict='C', wc_rev='3'),
+ 'D/D1/delta' : Item(status='A ', wc_rev='0'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DD/D1/D2' : Item(status=' M', wc_rev='3'),
+ 'DD/D1/D2/epsilon' : Item(status='A ', wc_rev='0'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DDD/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status=' M', wc_rev='3'),
+ 'DDD/D1/D2/D3/zeta' : Item(status='A ', wc_rev='0'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DDF/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status='MM', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status=' ', treeconflict='C', wc_rev='3'),
+ 'DF/D1/beta' : Item(status='MM', wc_rev='3'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='MM', treeconflict='C', wc_rev='3'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_leaf_edit_incoming_tree_del",
+ leaf_edit,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False)
+
+#----------------------------------------------------------------------
+@Issue(2282)
+def tree_conflicts_on_merge_no_local_ci_5_2(sbox):
+ "tree conflicts 5.2: leaf del (no ci), tree del"
+
+ # 5.2) local leaf del, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_del_no_ci(sbox.wc_dir)
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DF/D1/beta' : Item(status='D ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DDD/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status='D ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status=' ', wc_rev='3', treeconflict='C'),
+ 'DDF/D1/D2' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status='D ', wc_rev='3'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_leaf_del_incoming_tree_del",
+ leaf_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False)
+
+#----------------------------------------------------------------------
+def tree_conflicts_on_merge_no_local_ci_6(sbox):
+ "tree conflicts 6: tree del (no ci), tree del"
+
+ sbox.build()
+
+ # use case 6, as in notes/tree-conflicts/use-cases.txt
+ # local tree delete, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_tree_del_no_ci(sbox.wc_dir)
+
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' M', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'F' : Item(status=' ', wc_rev='3'),
+ 'F/alpha' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DD' : Item(status=' ', wc_rev='3'),
+ 'DD/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DF' : Item(status=' ', wc_rev='3'),
+ 'DF/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DF/D1/beta' : Item(status='D ', wc_rev='3'),
+ 'DDD' : Item(status=' ', wc_rev='3'),
+ 'DDD/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DDD/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDD/D1/D2/D3' : Item(status='D ', wc_rev='3'),
+ 'DDF' : Item(status=' ', wc_rev='3'),
+ 'DDF/D1' : Item(status='D ', wc_rev='3', treeconflict='C'),
+ 'DDF/D1/D2' : Item(status='D ', wc_rev='3'),
+ 'DDF/D1/D2/gamma' : Item(status='D ', wc_rev='3'),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_tree_del_incoming_tree_del",
+ tree_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False)
+
+#----------------------------------------------------------------------
+def tree_conflicts_merge_edit_onto_missing(sbox):
+ "tree conflicts: tree missing, leaf edit"
+
+ # local tree missing (via shell delete), incoming leaf edit
+
+ # Note: In 1.7 merge tracking aware merges raise an error if the
+ # merge target has subtrees missing due to a shell delete. To
+ # preserve the original intent of this test we'll run the merge
+ # with the --ignore-ancestry option, which neither considers nor
+ # records mergeinfo. With this option the merge should "succeed"
+ # while skipping the missing paths. Of course with no mergeinfo
+ # recorded and everything skipped, there is nothing to commit, so
+ # unlike most of the tree conflict tests we don't bother with the
+ # final commit step.
+
+ sbox.build()
+ expected_output = wc.State('', {
+ # Below the skips
+ 'DD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DD/D1/D2/epsilon' : Item(status=' ', treeconflict='A'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3/zeta' : Item(status=' ', treeconflict='A'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='U'),
+ 'D/D1/delta' : Item(status=' ', treeconflict='A'),
+ 'DF/D1/beta' : Item(status=' ', treeconflict='U'),
+ })
+
+ expected_disk = disk_after_tree_del
+
+ # Don't expect any mergeinfo property changes because we run
+ # the merge with the --ignore-ancestry option.
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=3),
+ 'F' : Item(status=' ', wc_rev=3),
+ 'F/alpha' : Item(status='! ', wc_rev=3),
+ 'D' : Item(status=' ', wc_rev=3),
+ 'D/D1' : Item(status='! ', wc_rev='3', entry_rev='?'),
+ 'DF' : Item(status=' ', wc_rev=3),
+ 'DF/D1' : Item(status='! ', wc_rev=3, entry_rev='?'),
+ 'DF/D1/beta' : Item(status='! ', wc_rev=3),
+ 'DD' : Item(status=' ', wc_rev=3),
+ 'DD/D1' : Item(status='! ', wc_rev=3, entry_rev='?'),
+ 'DD/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDF' : Item(status=' ', wc_rev=3),
+ 'DDF/D1' : Item(status='! ', wc_rev=3, entry_rev='?'),
+ 'DDF/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDF/D1/D2/gamma' : Item(status='! ', wc_rev=3),
+ 'DDD' : Item(status=' ', wc_rev=3),
+ 'DDD/D1' : Item(status='! ', wc_rev=3, entry_rev='?'),
+ 'DDD/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDD/D1/D2/D3' : Item(status='! ', wc_rev=3),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ 'F/alpha' : Item(verb='Skipped missing target'),
+ # Obstruction handling improvements in 1.7 and 1.8 added
+ 'DDD/D1' : Item(verb='Skipped missing target'),
+ 'DF/D1' : Item(verb='Skipped missing target'),
+ 'DDF/D1' : Item(verb='Skipped missing target'),
+ 'D/D1' : Item(verb='Skipped missing target'),
+ 'DD/D1' : Item(verb='Skipped missing target'),
+ 'F/alpha' : Item(verb='Skipped missing target'),
+ })
+
+ # Currently this test fails because some parts of the merge
+ # start succeeding.
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_tree_missing_incoming_leaf_edit",
+ svntest.deeptrees.deep_trees_rmtree,
+ leaf_edit,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False, do_commit_conflicts=False, ignore_ancestry=True)
+
+#----------------------------------------------------------------------
+def tree_conflicts_merge_del_onto_missing(sbox):
+ "tree conflicts: tree missing, leaf del"
+
+ # local tree missing (via shell delete), incoming leaf edit
+
+ # Note: In 1.7 merge tracking aware merges raise an error if the
+ # merge target has subtrees missing due to a shell delete. To
+ # preserve the original intent of this test we'll run the merge
+ # with the --ignore-ancestry option, which neither considers nor
+ # records mergeinfo. With this option the merge should "succeed"
+ # while skipping the missing paths. Of course with no mergeinfo
+ # recorded and everything skipped, there is nothing to commit, so
+ # unlike most of the tree conflict tests we don't bother with the
+ # final commit step.
+
+ sbox.build()
+ expected_output = wc.State('', {
+ # Below the skips
+ 'DF/D1/beta' : Item(status=' ', treeconflict='D'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='D'),
+ 'DD/D1/D2' : Item(status=' ', treeconflict='D'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='D'),
+ })
+
+ expected_disk = disk_after_tree_del
+
+ # Don't expect any mergeinfo property changes because we run
+ # the merge with the --ignore-ancestry option.
+ expected_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=3),
+ 'F' : Item(status=' ', wc_rev=3),
+ 'F/alpha' : Item(status='! ', wc_rev=3),
+ 'D' : Item(status=' ', wc_rev=3),
+ 'D/D1' : Item(status='! ', wc_rev=3),
+ 'DF' : Item(status=' ', wc_rev=3),
+ 'DF/D1' : Item(status='! ', wc_rev=3),
+ 'DF/D1/beta' : Item(status='! ', wc_rev=3),
+ 'DD' : Item(status=' ', wc_rev=3),
+ 'DD/D1' : Item(status='! ', wc_rev=3),
+ 'DD/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDF' : Item(status=' ', wc_rev=3),
+ 'DDF/D1' : Item(status='! ', wc_rev=3),
+ 'DDF/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDF/D1/D2/gamma' : Item(status='! ', wc_rev=3),
+ 'DDD' : Item(status=' ', wc_rev=3),
+ 'DDD/D1' : Item(status='! ', wc_rev=3),
+ 'DDD/D1/D2' : Item(status='! ', wc_rev=3),
+ 'DDD/D1/D2/D3' : Item(status='! ', wc_rev=3),
+ })
+
+ expected_skip = svntest.wc.State('', {
+ 'F/alpha' : Item(verb='Skipped missing target'),
+ 'D/D1' : Item(verb='Skipped missing target'),
+ # Obstruction handling improvements in 1.7 and 1.8 added
+ 'DDD/D1' : Item(verb='Skipped missing target'),
+ 'DD/D1' : Item(verb='Skipped missing target'),
+ 'DDF/D1' : Item(verb='Skipped missing target'),
+ 'DF/D1' : Item(verb='Skipped missing target'),
+ })
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_merge(sbox,
+ [ DeepTreesTestCase(
+ "local_tree_missing_incoming_leaf_del",
+ svntest.deeptrees.deep_trees_rmtree,
+ leaf_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ ) ], False, do_commit_conflicts=False, ignore_ancestry=True)
+
+#----------------------------------------------------------------------
+def merge_replace_setup(sbox):
+ "helper for merge_replace_causes_tree_conflict*()."
+
+ # svntest.factory.make(sbox,r"""
+ # # make a branch of A
+ # svn cp $URL/A $URL/branch
+ # svn up
+ # # ACTIONS ON THE MERGE SOURCE (branch)
+ # # various deletes of files and dirs
+ # svn delete branch/mu branch/B/E branch/D/G/pi branch/D/H
+ # svn ci
+ # svn up
+ #
+ # # replacements.
+ # # file-with-file
+ # echo "replacement for mu" > branch/mu
+ # svn add branch/mu
+ # # dir-with-dir
+ # svn mkdir branch/B/E
+ # svn ps propname propval branch/B/E
+ # # file-with-dir
+ # svn mkdir branch/D/G/pi
+ # svn ps propname propval branch/D/G/pi
+ # # dir-with-file
+ # echo "replacement for H" > branch/D/H
+ # svn add branch/D/H
+ # svn ci
+ # """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ branch_B_E = os.path.join(wc_dir, 'branch', 'B', 'E')
+ branch_D_G_pi = os.path.join(wc_dir, 'branch', 'D', 'G', 'pi')
+ branch_D_H = os.path.join(wc_dir, 'branch', 'D', 'H')
+ branch_mu = os.path.join(wc_dir, 'branch', 'mu')
+ url_A = url + '/A'
+ url_branch = url + '/branch'
+
+ # make a branch of A
+ # svn cp $URL/A $URL/branch
+ expected_stdout = [
+ 'Committing transaction...\n',
+ 'Committed revision 2.\n',
+ ]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', url_A,
+ url_branch, '-m', 'copy log')
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branch' : Item(status='A '),
+ 'branch/B' : Item(status='A '),
+ 'branch/B/F' : Item(status='A '),
+ 'branch/B/E' : Item(status='A '),
+ 'branch/B/E/beta' : Item(status='A '),
+ 'branch/B/E/alpha' : Item(status='A '),
+ 'branch/B/lambda' : Item(status='A '),
+ 'branch/D' : Item(status='A '),
+ 'branch/D/H' : Item(status='A '),
+ 'branch/D/H/psi' : Item(status='A '),
+ 'branch/D/H/chi' : Item(status='A '),
+ 'branch/D/H/omega' : Item(status='A '),
+ 'branch/D/G' : Item(status='A '),
+ 'branch/D/G/tau' : Item(status='A '),
+ 'branch/D/G/pi' : Item(status='A '),
+ 'branch/D/G/rho' : Item(status='A '),
+ 'branch/D/gamma' : Item(status='A '),
+ 'branch/C' : Item(status='A '),
+ 'branch/mu' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'branch' : Item(),
+ 'branch/D' : Item(),
+ 'branch/D/G' : Item(),
+ 'branch/D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'branch/D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'branch/D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'branch/D/H' : Item(),
+ 'branch/D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'branch/D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'branch/D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'branch/D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'branch/B' : Item(),
+ 'branch/B/E' : Item(),
+ 'branch/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'branch/B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'branch/B/F' : Item(),
+ 'branch/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'branch/mu' : Item(contents="This is the file 'mu'.\n"),
+ 'branch/C' : Item(),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'branch' : Item(status=' ', wc_rev='2'),
+ 'branch/D' : Item(status=' ', wc_rev='2'),
+ 'branch/D/gamma' : Item(status=' ', wc_rev='2'),
+ 'branch/D/H' : Item(status=' ', wc_rev='2'),
+ 'branch/D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'branch/D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'branch/D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'branch/D/G' : Item(status=' ', wc_rev='2'),
+ 'branch/D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'branch/D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'branch/D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'branch/B' : Item(status=' ', wc_rev='2'),
+ 'branch/B/F' : Item(status=' ', wc_rev='2'),
+ 'branch/B/E' : Item(status=' ', wc_rev='2'),
+ 'branch/B/E/beta' : Item(status=' ', wc_rev='2'),
+ 'branch/B/E/alpha' : Item(status=' ', wc_rev='2'),
+ 'branch/B/lambda' : Item(status=' ', wc_rev='2'),
+ 'branch/C' : Item(status=' ', wc_rev='2'),
+ 'branch/mu' : Item(status=' ', wc_rev='2'),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # ACTIONS ON THE MERGE SOURCE (branch)
+ # various deletes of files and dirs
+ # svn delete branch/mu branch/B/E branch/D/G/pi branch/D/H
+ expected_stdout = verify.UnorderedOutput([
+ 'D ' + branch_mu + '\n',
+ 'D ' + os.path.join(branch_B_E, 'alpha') + '\n',
+ 'D ' + os.path.join(branch_B_E, 'beta') + '\n',
+ 'D ' + branch_B_E + '\n',
+ 'D ' + branch_D_G_pi + '\n',
+ 'D ' + os.path.join(branch_D_H, 'chi') + '\n',
+ 'D ' + os.path.join(branch_D_H, 'omega') + '\n',
+ 'D ' + os.path.join(branch_D_H, 'psi') + '\n',
+ 'D ' + branch_D_H + '\n',
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'delete',
+ branch_mu, branch_B_E, branch_D_G_pi, branch_D_H)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branch/D/G/pi' : Item(verb='Deleting'),
+ 'branch/D/H' : Item(verb='Deleting'),
+ 'branch/mu' : Item(verb='Deleting'),
+ 'branch/B/E' : Item(verb='Deleting'),
+ })
+
+ expected_status.remove('branch/mu', 'branch/D/H', 'branch/D/H/omega',
+ 'branch/D/H/chi', 'branch/D/H/psi', 'branch/D/G/pi', 'branch/B/E',
+ 'branch/B/E/beta', 'branch/B/E/alpha')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ expected_disk.remove('branch/mu', 'branch/D/H', 'branch/D/H/omega',
+ 'branch/D/H/chi', 'branch/D/H/psi', 'branch/D/G/pi', 'branch/B/E',
+ 'branch/B/E/alpha', 'branch/B/E/beta')
+
+ expected_status.tweak(wc_rev='3')
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # replacements.
+ # file-with-file
+ # echo "replacement for mu" > branch/mu
+ main.file_write(branch_mu, 'replacement for mu')
+
+ # svn add branch/mu
+ expected_stdout = ['A ' + branch_mu + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'add',
+ branch_mu)
+
+ # dir-with-dir
+ # svn mkdir branch/B/E
+ expected_stdout = ['A ' + branch_B_E + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir',
+ branch_B_E)
+
+ # svn ps propname propval branch/B/E
+ expected_stdout = ["property 'propname' set on '" + branch_B_E + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'propname', 'propval', branch_B_E)
+
+ # file-with-dir
+ # svn mkdir branch/D/G/pi
+ expected_stdout = ['A ' + branch_D_G_pi + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir',
+ branch_D_G_pi)
+
+ # svn ps propname propval branch/D/G/pi
+ expected_stdout = ["property 'propname' set on '" + branch_D_G_pi + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'propname', 'propval', branch_D_G_pi)
+
+ # dir-with-file
+ # echo "replacement for H" > branch/D/H
+ main.file_write(branch_D_H, 'replacement for H')
+
+ # svn add branch/D/H
+ expected_stdout = ['A ' + branch_D_H + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'add',
+ branch_D_H)
+
+ # svn ci
+ expected_output = svntest.wc.State(wc_dir, {
+ 'branch/D/G/pi' : Item(verb='Adding'),
+ 'branch/D/H' : Item(verb='Adding'),
+ 'branch/mu' : Item(verb='Adding'),
+ 'branch/B/E' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'branch/D/G/pi' : Item(status=' ', wc_rev='4'),
+ 'branch/D/H' : Item(status=' ', wc_rev='4'),
+ 'branch/B/E' : Item(status=' ', wc_rev='4'),
+ 'branch/mu' : Item(status=' ', wc_rev='4'),
+ })
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ return expected_disk, expected_status
+
+#----------------------------------------------------------------------
+# ra_serf causes duplicate notifications with this test:
+@Issue(3802)
+def merge_replace_causes_tree_conflict(sbox):
+ "replace vs. edit tree-conflicts"
+
+ expected_disk, expected_status = merge_replace_setup(sbox)
+
+ # svntest.factory.make(sbox,r"""
+ # # ACTIONS ON THE MERGE TARGET (A)
+ # # local mods to conflict with merge source
+ # echo modified > A/mu
+ # svn ps propname otherpropval A/B/E
+ # echo modified > A/D/G/pi
+ # svn ps propname propval A/D/H
+ # svn merge $URL/A $URL/branch A
+ # svn st
+ # """, prev_status=expected_status, prev_disk=expected_disk)
+
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A = os.path.join(wc_dir, 'A')
+ A_B_E = os.path.join(wc_dir, 'A', 'B', 'E')
+ A_D_G_pi = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ A_D_H = os.path.join(wc_dir, 'A', 'D', 'H')
+ A_mu = os.path.join(wc_dir, 'A', 'mu')
+ url_A = url + '/A'
+ url_branch = url + '/branch'
+
+ # ACTIONS ON THE MERGE TARGET (A)
+ # local mods to conflict with merge source
+ # echo modified > A/mu
+ main.file_write(A_mu, 'modified')
+
+ # svn ps propname otherpropval A/B/E
+ expected_stdout = ["property 'propname' set on '" + A_B_E + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'propname', 'otherpropval', A_B_E)
+
+ # echo modified > A/D/G/pi
+ main.file_write(A_D_G_pi, 'modified')
+
+ # svn ps propname propval A/D/H
+ expected_stdout = ["property 'propname' set on '" + A_D_H + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps',
+ 'propname', 'propval', A_D_H)
+
+ # svn merge $URL/A $URL/branch A
+ expected_stdout = expected_merge_output(None, [
+ # merge
+ ' C ' + A_B_E + '\n',
+ ' C ' + A_mu + '\n',
+ ' C ' + A_D_G_pi + '\n',
+ ' C ' + A_D_H + '\n',
+ # mergeinfo
+ ' U ' + A + '\n',
+ ], target=A, two_url=True, tree_conflicts=4)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ url_A, url_branch, A, '--accept=postpone')
+
+ # svn st
+ expected_status.tweak('A', status=' M')
+ expected_status.tweak('A/D/G/pi', 'A/mu', status='M ', treeconflict='C')
+ expected_status.tweak('A/D/H', status=' M', treeconflict='C')
+ expected_status.tweak('A/B/E', status=' M', treeconflict='C')
+
+ actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+@Issue(3806)
+def merge_replace_causes_tree_conflict2(sbox):
+ "replace vs. delete tree-conflicts"
+
+ expected_disk, expected_status = merge_replace_setup(sbox)
+
+ # svntest.factory.make(sbox,r"""
+ # # ACTIONS ON THE MERGE TARGET (A)
+ # # local mods to conflict with merge source
+ # # Delete each of the files and dirs to be replaced by the merge.
+ # svn delete A/mu A/B/E A/D/G/pi A/D/H
+ # # Merge them one by one to see all the errors.
+ # svn merge $URL/A/mu $URL/branch/mu A/mu
+ # svn merge $URL/A/B $URL/branch/B A/B
+ # svn merge --depth=immediates $URL/A/D $URL/branch/D A/D
+ # svn merge $URL/A/D/G $URL/branch/D/G A/D/G
+ # svn st
+ # """, prev_disk=expected_disk, prev_status=expected_status)
+
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A_B = os.path.join(wc_dir, 'A', 'B')
+ A_B_E = os.path.join(wc_dir, 'A', 'B', 'E')
+ A_D = os.path.join(wc_dir, 'A', 'D')
+ A_D_G = os.path.join(wc_dir, 'A', 'D', 'G')
+ A_D_G_pi = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ A_D_H = os.path.join(wc_dir, 'A', 'D', 'H')
+ A = os.path.join(wc_dir, 'A')
+ A_mu = os.path.join(wc_dir, 'A', 'mu')
+ url_A_B = url + '/A/B'
+ url_A_D = url + '/A/D'
+ url_A_D_G = url + '/A/D/G'
+ url_A = url + '/A'
+ url_branch_B = url + '/branch/B'
+ url_branch_D = url + '/branch/D'
+ url_branch_D_G = url + '/branch/D/G'
+ url_branch = url + '/branch'
+
+ # ACTIONS ON THE MERGE TARGET (A)
+ # local mods to conflict with merge source
+ # Delete each of the files and dirs to be replaced by the merge.
+ # svn delete A/mu A/B/E A/D/G/pi A/D/H
+ expected_stdout = verify.UnorderedOutput([
+ 'D ' + A_mu + '\n',
+ 'D ' + os.path.join(A_B_E, 'alpha') + '\n',
+ 'D ' + os.path.join(A_B_E, 'beta') + '\n',
+ 'D ' + A_B_E + '\n',
+ 'D ' + A_D_G_pi + '\n',
+ 'D ' + os.path.join(A_D_H, 'chi') + '\n',
+ 'D ' + os.path.join(A_D_H, 'omega') + '\n',
+ 'D ' + os.path.join(A_D_H, 'psi') + '\n',
+ 'D ' + A_D_H + '\n',
+ ])
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'delete',
+ A_mu, A_B_E, A_D_G_pi, A_D_H)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/D/G/pi',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi',
+ status='D ')
+
+ # H is now a file. This hides the status of the descendants.
+ expected_status.remove('A/D/H/chi', 'A/D/H/psi', 'A/D/H/omega')
+
+ # Merge them one by one to see all the errors.
+
+ ### A file-with-file replacement onto a deleted file.
+ # svn merge $URL/A/mu $URL/branch/mu A/mu
+ expected_stdout = expected_merge_output(None, [
+ ' C ' + A_mu + '\n', # merge
+ 'A ' + A_mu + '\n', # merge
+ " U " + A + "\n", # mergeinfo
+ " U " + A_mu + "\n", # mergeinfo -> 'RM' status
+ ], target=A, two_url=True, tree_conflicts=1)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ url_A, url_branch, A, '--depth=files', '--accept=postpone')
+ # New mergeinfo describing the merge.
+ expected_status.tweak('A', status=' M')
+ # Currently this fails because the local status is 'D'eleted rather than
+ # 'R'eplaced with history:
+ #
+ # D C merge_tree_conflict_tests-23\A\mu
+ # > local delete, incoming replace upon merge
+ expected_status.tweak('A/mu', status='RM', wc_rev='-', copied='+',
+ treeconflict='C')
+
+ ### A dir-with-dir replacement onto a deleted directory.
+ # svn merge $URL/A/B $URL/branch/B A/B
+ expected_stdout = expected_merge_output(None, [
+ ' C ' + A_B_E + '\n', # merge
+ 'A ' + A_B_E + '\n', # merge
+ " U " + A_B + "\n", # mergeinfo
+ ], target=A_B, two_url=True, tree_conflicts=1)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ url_A_B, url_branch_B, A_B, '--accept=postpone')
+ # New mergeinfo describing the merge.
+ expected_status.tweak('A/B', status=' M')
+ # Currently this fails because the local status shows a property mod (and
+ # the TC type is listed as incoming delete, not incoming replace):
+ #
+ # RM + C merge_tree_conflict_tests-23\A\B\E
+ # > local delete, incoming delete upon merge
+ expected_status.tweak('A/B/E', status='R ', wc_rev='-', copied='+',
+ treeconflict='C')
+
+ ### A dir-with-file replacement onto a deleted directory.
+ # svn merge --depth=immediates $URL/A/D $URL/branch/D A/D
+ expected_stdout = expected_merge_output(None, [
+ ' C ' + A_D_H + '\n', # merge
+ 'A ' + A_D_H + '\n', # merge
+ " U " + A_D + "\n", # mergeinfo
+ ], target=A_D, two_url=True, tree_conflicts=1)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ '--depth=immediates', url_A_D, url_branch_D, A_D, '--accept=postpone')
+ # New mergeinfo describing the merge.
+ expected_status.tweak('A/D', 'A/D/G', status=' M')
+ # Currently this fails because the local status is 'D'eleted rather than
+ # 'R'eplaced with history:
+ #
+ # D C merge_tree_conflict_tests-23\A\D\H
+ # > local delete, incoming replace upon merge
+ expected_status.tweak('A/D/H', status='R ', wc_rev='-', copied='+',
+ treeconflict='C')
+
+ ### A file-with-dir replacement onto a deleted file.
+ # svn merge $URL/A/D/G $URL/branch/D/G A/D/G
+ expected_stdout = expected_merge_output(None, [
+ ' C ' + A_D_G_pi + '\n', # merge
+ 'A ' + A_D_G_pi + '\n', # merge
+ " U " + A_D_G + "\n", # mergeinfo
+ ], target=A_D_G, two_url=True, tree_conflicts=1)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ url_A_D_G, url_branch_D_G, A_D_G, '--accept=postpone')
+ # New mergeinfo describing the merge.
+ expected_status.tweak('A/D/G', status=' M')
+ # Currently this fails because the local status shows a property mod (and
+ # the TC type is listed as incoming delete, not incoming replace):
+ #
+ # RM + C merge_tree_conflict_tests-23\A\D\G\pi
+ # > local delete, incoming delete upon merge
+ expected_status.tweak('A/D/G/pi', status='R ', wc_rev='-', copied='+',
+ treeconflict='C')
+
+ # Check the resulting status:
+ actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Check the tree conflict types:
+ expected_stdout = '(R.*)|(Summary of conflicts.*)|( Tree conflicts.*)' \
+ '|(.*local delete, incoming replace upon merge.*)' \
+ '|( \>.*)'
+ tree_conflicted_path = [A_B_E, A_mu, A_D_G_pi, A_D_H]
+ for path in tree_conflicted_path:
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'st',
+ '--depth=empty', path)
+
+#----------------------------------------------------------------------
+# Test for issue #4011 'merge of replacement on local delete fails'
+@SkipUnless(server_has_mergeinfo)
+@Issue(4011)
+def merge_replace_on_del_fails(sbox):
+ "merge replace on local delete fails"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ branch_path = os.path.join(wc_dir, 'branch')
+ C_branch_path = os.path.join(wc_dir, 'branch', 'C')
+
+ # r2 - Copy ^/A to ^/branch
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/branch',
+ '-m', 'Create a branch')
+
+ # r3 - Replace A/C
+ svntest.actions.run_and_verify_svn(None, [], 'del', C_path)
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', C_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Replace A/C', wc_dir)
+
+ # r4 - Delete branch/C
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'del', C_branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Delete branch/C', wc_dir)
+
+ # Sync merge ^/A to branch
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ expected_stdout = expected_merge_output([[2,4]], [
+ ' C ' + C_branch_path + '\n', # merge
+ ' U ' + branch_path + '\n', # mergeinfo
+ ], target=branch_path, tree_conflicts=1)
+ # This currently fails with:
+ #
+ # >svn merge ^/A branch
+ # ..\..\..\subversion\svn\util.c:913: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:11349: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:11303: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:11303: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:11273: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:9287: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:8870: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:5349: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_repos\reporter.c:1430: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\ra.c:247: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_repos\reporter.c:1269: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_repos\reporter.c:1205: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_repos\reporter.c:920: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_delta\cancel.c:120: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_delta\cancel.c:120: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\repos_diff.c:710: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_client\merge.c:2234: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:1069: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\adm_ops.c:956: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\update_editor.c:5036: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:6985: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:6929: (apr_err=155010)
+ # ..\..\..\subversion\libsvn_wc\wc_db.c:6920: (apr_err=155010)
+ # svn: E155010: The node 'C:\SVN\src-trunk\Debug\subversion\tests\
+ # cmdline\svn-test-work\working_copies\merge_tree_conflict_tests-24\
+ # branch\C' was not found.
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge',
+ sbox.repo_url + '/A', branch_path, '--accept=postpone')
+
+def merge_conflict_details(sbox):
+ "merge conflict details"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/B/E/new', 'new\n')
+ sbox.simple_add('A/B/E/new')
+ sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n')
+ sbox.simple_rm('A/B/E/beta', 'A/B/F')
+ sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B')
+ sbox.simple_mkdir('A/B/E/new-dir1')
+ sbox.simple_mkdir('A/B/E/new-dir2')
+ sbox.simple_mkdir('A/B/E/new-dir3')
+ sbox.simple_rm('A/B/lambda')
+ sbox.simple_mkdir('A/B/lambda')
+ sbox.simple_commit()
+
+ sbox.simple_update('', 1)
+
+ sbox.simple_move('A/B', 'B')
+
+ sbox.simple_propset('key', 'vAl', 'B')
+ sbox.simple_move('B/E/beta', 'beta')
+ sbox.simple_propset('a', 'b', 'B/F', 'B/lambda')
+ sbox.simple_append('B/E/alpha', 'other\nnew\nlines')
+ sbox.simple_mkdir('B/E/new')
+ sbox.simple_mkdir('B/E/new-dir1')
+ sbox.simple_append('B/E/new-dir2', 'something')
+ sbox.simple_append('B/E/new-dir3', 'something')
+ sbox.simple_add('B/E/new-dir3')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'B' : Item(status=' C', copied='+', moved_from='A/B',
+ wc_rev='-', entry_status='AC'),
+ 'B/E' : Item(status=' M', copied='+', wc_rev='-'),
+ 'B/E/new' : Item(status='A ', treeconflict='C', wc_rev='-'),
+ 'B/E/beta' : Item(status='D ', copied='+', treeconflict='C',
+ wc_rev='-', moved_to='beta'),
+ 'B/E/alpha' : Item(status='C ', copied='+', wc_rev='-'),
+ 'B/E/new-dir3' : Item(status='A ', treeconflict='C', wc_rev='-'),
+ 'B/E/new-dir1' : Item(status='A ', treeconflict='C', wc_rev='-'),
+ 'B/F' : Item(status=' M', copied='+', treeconflict='C',
+ wc_rev='-'),
+ 'B/lambda' : Item(status=' M', copied='+', treeconflict='C',
+ wc_rev='-'),
+ 'beta' : Item(status='A ', copied='+',
+ moved_from='B/E/beta', wc_rev='-')
+ })
+ expected_status.tweak('A/B', status='D ', wc_rev='1', moved_to='B')
+ expected_status.tweak('A/B/lambda', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/F', status='D ')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'B' : Item(status=' C'),
+ 'B/E' : Item(status=' U'),
+ 'B/E/new' : Item(status=' ', treeconflict='C'),
+ 'B/E/beta' : Item(status=' ', treeconflict='C'),
+ 'B/E/alpha' : Item(status='C '),
+ 'B/E/new-dir3' : Item(status=' ', treeconflict='C'),
+ 'B/E/new-dir1' : Item(status=' ', treeconflict='C'),
+ 'B/F' : Item(status=' ', treeconflict='C'),
+ 'B/lambda' : Item(status=' ', treeconflict='C'),
+ })
+ expected_skip = wc.State(wc_dir, {
+ 'B/E/new-dir2' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_merge(sbox.ospath('B'),
+ 1, 2, '^/A/B', '^/A/B',
+ expected_output,
+ None, None,
+ None, None, expected_skip)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_info = [
+ {
+ "Path" : re.escape(sbox.ospath('B')),
+ "Conflicted Properties" : "key",
+ "Conflict Details": re.escape(
+ 'incoming dir edit upon merge' +
+ ' Source left: (dir) ^/A/B@1' +
+ ' Source right: (dir) ^/A/B@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E')),
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/alpha')),
+ "Conflict Previous Base File" : '.*alpha.*',
+ "Conflict Previous Working File" : '.*alpha.*',
+ "Conflict Current Base File": '.*alpha.*',
+ "Conflict Details": re.escape(
+ 'incoming file edit upon merge' +
+ ' Source left: (file) ^/A/B/E/alpha@1' +
+ ' Source right: (file) ^/A/B/E/alpha@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/beta')),
+ "Tree conflict": re.escape(
+ 'local file moved away, incoming file delete or move upon merge' +
+ ' Source left: (file) ^/A/B/E/beta@1' +
+ ' Source right: (none) ^/A/B/E/beta@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new')),
+ "Tree conflict": re.escape(
+ 'local dir add, incoming file add upon merge' +
+ ' Source left: (none) ^/A/B/E/new@1' +
+ ' Source right: (file) ^/A/B/E/new@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new-dir1')),
+ "Tree conflict": re.escape(
+ 'local dir add, incoming dir add upon merge' +
+ ' Source left: (none) ^/A/B/E/new-dir1@1' +
+ ' Source right: (dir) ^/A/B/E/new-dir1@2')
+ },
+ #{ ### Skipped
+ # "Path" : re.escape(sbox.ospath('B/E/new-dir2')),
+ # "Tree conflict": re.escape(
+ # 'local file unversioned, incoming dir add upon merge' +
+ # ' Source left: (none) ^/A/B/E/new-dir2@1' +
+ # ' Source right: (dir) ^/A/B/E/new-dir2@2')
+ #},
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new-dir3')),
+ "Tree conflict": re.escape(
+ 'local file add, incoming dir add upon merge' +
+ ' Source left: (none) ^/A/B/E/new-dir3@1' +
+ ' Source right: (dir) ^/A/B/E/new-dir3@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/F')),
+ "Tree conflict": re.escape(
+ 'local dir edit, incoming dir delete or move upon merge' +
+ ' Source left: (dir) ^/A/B/F@1' +
+ ' Source right: (none) ^/A/B/F@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/lambda')),
+ "Tree conflict": re.escape(
+ 'local file edit, incoming replace with dir upon merge' +
+ ' Source left: (file) ^/A/B/lambda@1' +
+ ' Source right: (dir) ^/A/B/lambda@2')
+ },
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('B'),
+ '--depth', 'infinity')
+
+def merge_obstruction_recording(sbox):
+ "merge obstruction recording"
+
+ sbox.build(empty=True)
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_mkdir('trunk')
+ sbox.simple_mkdir('branches')
+ sbox.simple_commit() #r1
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branches/branch',
+ '-mCopy') # r2
+
+ sbox.simple_mkdir('trunk/dir')
+ sbox.simple_add_text('The file on trunk\n', 'trunk/dir/file.txt')
+ sbox.simple_commit() #r3
+
+ sbox.simple_update()
+
+ sbox.simple_mkdir('branches/branch/dir')
+ sbox.simple_add_text('The file on branch\n', 'branches/branch/dir/file.txt')
+ sbox.simple_commit() #r4
+
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', '^/branches/branch', wc_dir,
+ '--ignore-ancestry')
+
+ expected_output = wc.State(wc_dir, {
+ 'dir' : Item(status=' ', treeconflict='C'),
+ 'dir/file.txt' : Item(status=' ', treeconflict='A'),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'dir/file.txt' : Item(contents="The file on branch\n"),
+ '.' : Item(props={'svn:mergeinfo':'/trunk:2-4'}),
+ })
+ expected_status = wc.State(wc_dir, {
+ '' : Item(status=' M', wc_rev='4'),
+ 'dir' : Item(status=' ', treeconflict='C', wc_rev='4'),
+ 'dir/file.txt' : Item(status=' ', wc_rev='4'),
+ })
+ expected_skip = wc.State('', {
+ })
+ svntest.actions.run_and_verify_merge(wc_dir, '1', '4', sbox.repo_url + '/trunk',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ expected_info = [
+ {
+ "Path" : re.escape(sbox.ospath('dir')),
+ "Tree conflict": re.escape(
+ 'local dir obstruction, incoming dir add upon merge' +
+ ' Source left: (none) ^/trunk/dir@1' +
+ ' Source right: (dir) ^/trunk/dir@4')
+ },
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('dir'))
+
+ # How should the user handle this conflict?
+ # ### Would be nice if we could just accept mine (leave as is, fix mergeinfo)
+ # ### or accept theirs (delete what is here and insert copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--accept=working',
+ sbox.ospath('dir'))
+
+ # Redo the skipped merge as record only merge
+ expected_output = [
+ '--- Recording mergeinfo for merge of r4 into \'%s\':\n' % \
+ sbox.ospath('dir'),
+ ' U %s\n' % sbox.ospath('dir'),
+ ]
+ # ### Why are r1-r3 not recorded?
+ # ### Guess: Because dir's history only exists since r4.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'merge', '--record-only',
+ sbox.repo_url + '/trunk/dir',
+ sbox.ospath('dir'),
+ '-c', '1-4')
+
+ expected_disk = wc.State('', {
+ 'dir' : Item(props={'svn:mergeinfo':'/trunk/dir:4'}),
+ 'dir/file.txt' : Item(contents="The file on branch\n"),
+ '.' : Item(props={'svn:mergeinfo':'/trunk:2-4'}),
+ })
+ svntest.actions.verify_disk(wc_dir, expected_disk, check_props=True)
+
+ # Because r1-r3 are not recorded, the mergeinfo is not elided :(
+
+ # Even something like a two url merge wouldn't work, because dir
+ # didn't exist below trunk in r1 either.
+
+ # A resolver action could be smarter though...
+
+def added_revision_recording_in_tree_conflict(sbox):
+ "tree conflict stores added revision for victim"
+
+ sbox.build(empty=True)
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_mkdir('trunk')
+ sbox.simple_commit() #r1
+
+ # Create a branch
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branch',
+ '-mcopy') # r2
+
+ sbox.simple_add_text('The file on trunk\n', 'trunk/foo')
+ sbox.simple_commit() #r3
+
+ sbox.simple_update()
+
+ # Merge ^/trunk into ^/branch
+ expected_output = svntest.wc.State(sbox.ospath('branch'), {
+ 'foo' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(sbox.ospath('branch'), {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'foo' : Item(contents="The file on trunk\n"),
+ '.' : Item(props={u'svn:mergeinfo': u'/trunk:2-3'}),
+ })
+ expected_status = wc.State(sbox.ospath('branch'), {
+ '' : Item(status=' M', wc_rev='3'),
+ 'foo' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ expected_skip = wc.State('', {
+ })
+ svntest.actions.run_and_verify_merge(sbox.ospath('branch'), None, None,
+ sbox.repo_url + '/trunk',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ sbox.simple_commit() #r4
+
+ # Edit the file on the branch
+ sbox.simple_append('branch/foo', 'The file on the branch\n')
+ sbox.simple_commit() #r5
+
+ # Replace file with a directory on trunk
+ sbox.simple_rm('trunk/foo')
+ sbox.simple_mkdir('trunk/foo')
+ sbox.simple_commit() #r6
+
+ sbox.simple_update()
+
+ # Merge ^/trunk into ^/branch
+ expected_output = svntest.wc.State(sbox.ospath('branch'), {
+ 'foo' : Item(status=' ', treeconflict='C')
+ })
+ expected_mergeinfo_output = wc.State(sbox.ospath('branch'), {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(wc_dir, {
+ })
+ expected_disk = wc.State('', {
+ 'foo' : Item(contents="The file on trunk\nThe file on the branch\n"),
+ '.' : Item(props={u'svn:mergeinfo': u'/trunk:2-6'}),
+ })
+ expected_status = wc.State(sbox.ospath('branch'), {
+ '' : Item(status=' M', wc_rev='6'),
+ 'foo' : Item(status=' ', treeconflict='C', wc_rev='6'),
+ })
+ expected_skip = wc.State('', {
+ })
+ svntest.actions.run_and_verify_merge(sbox.ospath('branch'), None, None,
+ sbox.repo_url + '/trunk',
+ None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+
+ # Ensure that revisions in tree conflict info match what we expect.
+ # We used to record source left as ^/trunk/foo@1 instead of ^/trunk/foo@3.
+ # Note that foo was first added in r3.
+ expected_info = [
+ {
+ "Path" : re.escape(sbox.ospath('branch/foo')),
+ "Tree conflict": re.escape(
+ 'local file edit, incoming replace with dir upon merge' +
+ ' Source left: (file) ^/trunk/foo@3' +
+ ' Source right: (dir) ^/trunk/foo@6'),
+ },
+ ]
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('branch/foo'))
+
+def spurios_tree_conflict_with_added_file(sbox):
+ "spurious tree conflict with unmodified added file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a branch of A, A_copy
+ sbox.simple_copy('A', 'A_branch')
+ sbox.simple_commit()
+
+ # Create a new file on the trunk
+ sbox.simple_append('A/new', 'new\n')
+ sbox.simple_add('A/new')
+ sbox.simple_commit()
+
+ # Sync the branch with the trunk
+ sbox.simple_update()
+ expected_output = wc.State(wc_dir, {
+ "A_branch/new" : Item(status="A "),
+ })
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_merge(sbox.ospath('A_branch'),
+ None, None, '^/A', None,
+ expected_output,
+ None, None,
+ None, None, expected_skip)
+ sbox.simple_commit()
+
+ # Reintegrate the branch (a no-op change, but users are free to do this)
+ sbox.simple_update()
+ expected_output = wc.State(wc_dir, { })
+ svntest.actions.run_and_verify_merge(sbox.ospath('A'),
+ None, None, '^/A_branch', None,
+ expected_output,
+ None, None,
+ None, None, expected_skip,
+ [], False, True, '--reintegrate',
+ sbox.ospath('A'))
+
+ # Delete the new file on the branch
+ sbox.simple_rm('A_branch/new')
+ sbox.simple_commit()
+
+ # Make an unrelated change on the trunk
+ sbox.simple_append('A/mu', 'more text\n')
+ sbox.simple_commit()
+
+ # Merge the trunk to the branch. Forcing a reintegrate merge here since
+ # this is what the automatic merge does, as of the time this test was written.
+ # This merge would raise an 'local missing vs incoming edit' tree conflict
+ # on the new file, which is bogus since there are no incoming edits.
+ expected_output = wc.State(wc_dir, {
+ 'A_branch/mu' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(wc_dir, {
+ 'A_branch' : Item(status=' U'),
+ })
+ svntest.actions.run_and_verify_merge(sbox.ospath('A_branch'),
+ None, None, '^/A', None,
+ expected_output,
+ expected_mergeinfo_output, None,
+ None, None, expected_skip,
+ [], False, True, '--reintegrate',
+ sbox.ospath('A_branch'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ delete_file_and_dir,
+ merge_catches_nonexistent_target,
+ merge_tree_deleted_in_target,
+ three_way_merge_add_of_existing_binary_file,
+ merge_added_dir_to_deleted_in_target,
+ merge_add_over_versioned_file_conflicts,
+ mergeinfo_recording_in_skipped_merge,
+ del_differing_file,
+ tree_conflicts_and_obstructions,
+ tree_conflicts_on_merge_local_ci_4_1,
+ tree_conflicts_on_merge_local_ci_4_2,
+ tree_conflicts_on_merge_local_ci_5_1,
+ tree_conflicts_on_merge_local_ci_5_2,
+ tree_conflicts_on_merge_local_ci_6,
+ tree_conflicts_on_merge_no_local_ci_4_1,
+ tree_conflicts_on_merge_no_local_ci_4_2,
+ tree_conflicts_on_merge_no_local_ci_5_1,
+ tree_conflicts_on_merge_no_local_ci_5_2,
+ tree_conflicts_on_merge_no_local_ci_6,
+ tree_conflicts_merge_edit_onto_missing,
+ tree_conflicts_merge_del_onto_missing,
+ merge_replace_causes_tree_conflict,
+ merge_replace_causes_tree_conflict2,
+ merge_replace_on_del_fails,
+ merge_conflict_details,
+ merge_obstruction_recording,
+ added_revision_recording_in_tree_conflict,
+ spurios_tree_conflict_with_added_file,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/mergeinfo_tests.py b/subversion/tests/cmdline/mergeinfo_tests.py
new file mode 100755
index 0000000..328a9f2
--- /dev/null
+++ b/subversion/tests/cmdline/mergeinfo_tests.py
@@ -0,0 +1,974 @@
+#!/usr/bin/env python
+#
+# mergeinfo_tests.py: testing Merge Tracking reporting
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import server_has_mergeinfo
+
+# Get a couple merge helpers
+from svntest.mergetrees import set_up_branch
+from svntest.mergetrees import expected_merge_output
+
+def adjust_error_for_server_version(expected_err):
+ "Return the expected error regexp appropriate for the server version."
+ if server_has_mergeinfo():
+ return expected_err
+ else:
+ return ".*Retrieval of mergeinfo unsupported by '.+'"
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def no_mergeinfo(sbox):
+ "'mergeinfo' on a URL that lacks mergeinfo"
+
+ sbox.build(create_wc=False)
+ sbox.simple_repo_copy('A', 'A2')
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ [],
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/A2',
+ "--show-revs=merged")
+
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo(sbox):
+ "'mergeinfo' on a path with mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # make a branch 'A2'
+ sbox.simple_repo_copy('A', 'A2') # r2
+ # make a change in branch 'A'
+ sbox.simple_mkdir('A/newdir')
+ sbox.simple_commit() # r3
+ sbox.simple_update()
+
+ # Dummy up some mergeinfo.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', SVN_PROP_MERGEINFO, '/A:3',
+ sbox.ospath('A2'))
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3'],
+ sbox.repo_url + '/A',
+ sbox.ospath('A2'),
+ "--show-revs=merged")
+
+@SkipUnless(server_has_mergeinfo)
+def explicit_mergeinfo_source(sbox):
+ "'mergeinfo' with source selection"
+
+ # The idea is the target has mergeinfo pertaining to two or more different
+ # source branches and we're asking about just one of them.
+
+ sbox.build()
+
+ def url(relpath):
+ return sbox.repo_url + '/' + relpath
+ def path(relpath):
+ return sbox.ospath(relpath)
+
+ B = 'A/B'
+
+ # make some branches
+ B2 = 'A/B2'
+ B3 = 'A/B3'
+ sbox.simple_repo_copy(B, B2) # r2
+ sbox.simple_repo_copy(B, B3) # r3
+ sbox.simple_update()
+
+ # make changes in the branches
+ sbox.simple_mkdir('A/B2/newdir')
+ sbox.simple_commit() # r4
+ sbox.simple_mkdir('A/B3/newdir')
+ sbox.simple_commit() # r5
+
+ # Put dummy mergeinfo on branch root
+ mergeinfo = '/A/B2:2-5\n/A/B3:2-5\n'
+ sbox.simple_propset(SVN_PROP_MERGEINFO, mergeinfo, B)
+ sbox.simple_commit()
+
+ # Check using each of our recorded merge sources (as paths and URLs).
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['2', '4'], url(B2), path(B),
+ "--show-revs=merged")
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['2', '4'], path(B2), path(B),
+ "--show-revs=merged")
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3', '5'], url(B3), path(B),
+ "--show-revs=merged")
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3', '5'], path(B3), path(B),
+ "--show-revs=merged")
+
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_non_source(sbox):
+ "'mergeinfo' with uninteresting source selection"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+ H2_path = os.path.join(wc_dir, 'A', 'D', 'H2')
+ B_url = sbox.repo_url + '/A/B'
+ B_path = os.path.join(wc_dir, 'A', 'B')
+ G_url = sbox.repo_url + '/A/D/G'
+ G_path = os.path.join(wc_dir, 'A', 'D', 'G')
+ H2_url = sbox.repo_url + '/A/D/H2'
+
+ # Make a copy, and dummy up some mergeinfo.
+ mergeinfo = '/A/B:1\n/A/D/G:1\n'
+ svntest.actions.set_prop(SVN_PROP_MERGEINFO, mergeinfo, H_path)
+ svntest.main.run_svn(None, "cp", H_path, H2_path)
+ svntest.main.run_svn(None, "ci", "-m", "r2", wc_dir)
+
+ # Check on a source we haven't "merged" from.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ [], H2_url, H_path,
+ "--show-revs=merged")
+
+#----------------------------------------------------------------------
+# Issue #3138
+@SkipUnless(server_has_mergeinfo)
+@Issue(3138)
+def mergeinfo_on_unknown_url(sbox):
+ "mergeinfo of an unknown url should return error"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # remove a path from the repo and commit.
+ iota_path = os.path.join(wc_dir, 'iota')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ "ci", wc_dir, "-m", "log message")
+
+ url = sbox.repo_url + "/iota"
+ expected_err = adjust_error_for_server_version(".*File not found.*iota.*|"
+ ".*iota.*path not found.*")
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ "mergeinfo", "--show-revs", "eligible",
+ url, wc_dir)
+
+# Test for issue #3126 'svn mergeinfo shows too few or too many
+# eligible revisions'. Specifically
+# http://subversion.tigris.org/issues/show_bug.cgi?id=3126#desc5.
+@SkipUnless(server_has_mergeinfo)
+@Issue(3126)
+def non_inheritable_mergeinfo(sbox):
+ "non-inheritable mergeinfo shows as merged"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_COPY_path = os.path.join(wc_dir, "A_COPY")
+ D_COPY_path = os.path.join(wc_dir, "A_COPY", "D")
+ rho_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "G", "rho")
+
+ # Update the WC, then merge r4 from A to A_COPY and r6 from A to A_COPY
+ # at --depth empty and commit the merges as r7.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(6), [], 'up',
+ wc_dir)
+ expected_status.tweak(wc_rev=6)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]],
+ ['U ' + rho_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',]),
+ [], 'merge', '-c4',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[6]], ' G ' + A_COPY_path + '\n'),
+ [], 'merge', '-c6',
+ sbox.repo_url + '/A',
+ A_COPY_path, '--depth', 'empty')
+ expected_output = wc.State(wc_dir, {
+ 'A_COPY' : Item(verb='Sending'),
+ 'A_COPY/D/G/rho' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A_COPY', 'A_COPY/D/G/rho', wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update the WC a last time to ensure full inheritance.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up',
+ wc_dir)
+
+ # Despite being non-inheritable, r6 should still show as merged to A_COPY
+ # and not eligible for merging.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['4','6*'],
+ sbox.repo_url + '/A',
+ A_COPY_path,
+ '--show-revs', 'merged')
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3','5','6*'],
+ sbox.repo_url + '/A',
+ A_COPY_path,
+ '--show-revs', 'eligible')
+ # But if we drop down to A_COPY/D, r6 should show as eligible because it
+ # was only merged into A_COPY, no deeper.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['4'],
+ sbox.repo_url + '/A/D',
+ D_COPY_path,
+ '--show-revs', 'merged')
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3','6'],
+ sbox.repo_url + '/A/D',
+ D_COPY_path,
+ '--show-revs', 'eligible')
+
+# Test for -R option with svn mergeinfo subcommand.
+#
+# Test for issue #3242 'Subversion demands unnecessary access to parent
+# directories of operations'
+@Issue(3242)
+@SkipUnless(server_has_mergeinfo)
+def recursive_mergeinfo(sbox):
+ "test svn mergeinfo -R"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = os.path.join(wc_dir, "A")
+ A_COPY_path = os.path.join(wc_dir, "A_COPY")
+ B_COPY_path = os.path.join(wc_dir, "A_COPY", "B")
+ C_COPY_path = os.path.join(wc_dir, "A_COPY", "C")
+ rho_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "G", "rho")
+ H_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H")
+ F_COPY_path = os.path.join(wc_dir, "A_COPY", "B", "F")
+ omega_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H", "omega")
+ beta_COPY_path = os.path.join(wc_dir, "A_COPY", "B", "E", "beta")
+ A2_path = os.path.join(wc_dir, "A2")
+ nu_path = os.path.join(wc_dir, "A2", "B", "F", "nu")
+ nu_COPY_path = os.path.join(wc_dir, "A_COPY", "B", "F", "nu")
+ nu2_path = os.path.join(wc_dir, "A2", "C", "nu2")
+
+ # Rename A to A2 in r7.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(6), [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ren', A_path, A2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir, '-m', 'rename A to A2')
+
+ # Add the files A/B/F/nu and A/C/nu2 and commit them as r8.
+ svntest.main.file_write(nu_path, "A new file.\n")
+ svntest.main.file_write(nu2_path, "Another new file.\n")
+ svntest.main.run_svn(None, "add", nu_path, nu2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir, '-m', 'Add 2 new files')
+
+ # Do several merges to create varied subtree mergeinfo
+
+ # Merge r4 from A2 to A_COPY at depth empty
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up',
+ wc_dir)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[4]], ' U ' + A_COPY_path + '\n'),
+ [], 'merge', '-c4', '--depth', 'empty',
+ sbox.repo_url + '/A2',
+ A_COPY_path)
+
+ # Merge r6 from A2/D/H to A_COPY/D/H
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[6]],
+ ['U ' + omega_COPY_path + '\n',
+ ' G ' + H_COPY_path + '\n']),
+ [], 'merge', '-c6',
+ sbox.repo_url + '/A2/D/H',
+ H_COPY_path)
+
+ # Merge r5 from A2 to A_COPY
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + beta_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',
+ ' G ' + B_COPY_path + '\n',
+ ' U ' + B_COPY_path + '\n',], # Elision
+ elides=True),
+ [], 'merge', '-c5',
+ sbox.repo_url + '/A2',
+ A_COPY_path)
+
+ # Reverse merge -r5 from A2/C to A_COPY/C leaving empty mergeinfo on
+ # A_COPY/C.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[-5]],
+ ' G ' + C_COPY_path + '\n'),
+ [], 'merge', '-c-5',
+ sbox.repo_url + '/A2/C', C_COPY_path)
+
+ # Merge r8 from A2/B/F to A_COPY/B/F
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[8]],
+ ['A ' + nu_COPY_path + '\n',
+ ' G ' + F_COPY_path + '\n']),
+ [], 'merge', '-c8',
+ sbox.repo_url + '/A2/B/F',
+ F_COPY_path)
+
+ # Commit everything this far as r9
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir, '-m', 'Many merges')
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up',
+ wc_dir)
+
+ # Test svn mergeinfo -R / --depth infinity.
+
+ # Asking for eligible revisions from A2 to A_COPY should show:
+ #
+ # r3 - Was never merged.
+ #
+ # r4 - Was merged at depth empty, so while there is mergeinfo for the
+ # revision, the actual text change to A_COPY/D/G/rho hasn't yet
+ # happened.
+ #
+ # r8* - Was only partially merged to the subtree at A_COPY/B/F. The
+ # addition of A_COPY/C/nu2 is still outstanding.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['3', '4*', '8*'],
+ sbox.repo_url + '/A2',
+ sbox.repo_url + '/A_COPY',
+ '--show-revs', 'eligible', '-R')
+ # Do the same as above, but test that we can request the revisions
+ # in reverse order.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['8*', '4*', '3'],
+ sbox.repo_url + '/A2',
+ sbox.repo_url + '/A_COPY',
+ '--show-revs', 'eligible', '-R',
+ '-r', '9:0')
+
+ # Asking for merged revisions from A2 to A_COPY should show:
+ #
+ # r4* - Was merged at depth empty, so while there is mergeinfo for the
+ # revision, the actual text change to A_COPY/D/G/rho hasn't yet
+ # happened.
+ #
+ # r5 - Was merged at depth infinity to the root of the 'branch', so it
+ # should show as fully merged.
+ #
+ # r6 - This was a subtree merge, but since the subtree A_COPY/D/H was
+ # the ancestor of the only change made in r6 it is considered
+ # fully merged.
+ #
+ # r8* - Was only partially merged to the subtree at A_COPY/B/F. The
+ # addition of A_COPY/C/nu2 is still outstanding.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['4*', '5', '6', '8*'],
+ A2_path,
+ A_COPY_path,
+ '--show-revs', 'merged',
+ '--depth', 'infinity')
+ # Do the same as above, but test that we can request the revisions
+ # in reverse order.
+ svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""),
+ ['8*', '6', '5', '4*'],
+ A2_path,
+ A_COPY_path,
+ '--show-revs', 'merged',
+ '--depth', 'infinity',
+ '-r', '9:0')
+
+ # A couple tests of problems found with initial issue #3242 fixes.
+ # We should be able to check for the merged revs from a URL to a URL
+ # when the latter has explicit mergeinfo...
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''), ['6'],
+ sbox.repo_url + '/A2/D/H',
+ sbox.repo_url + '/A_COPY/D/H',
+ '--show-revs', 'merged')
+ # ...and when the latter has inherited mergeinfo.
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''), ['6'],
+ sbox.repo_url + '/A2/D/H/omega',
+ sbox.repo_url + '/A_COPY/D/H/omega',
+ '--show-revs', 'merged')
+
+# Test for issue #3180 'svn mergeinfo ignores peg rev for WC target'.
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_on_pegged_wc_path(sbox):
+ "svn mergeinfo on pegged working copy target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = os.path.join(wc_dir, "A")
+ A_COPY_path = os.path.join(wc_dir, "A_COPY")
+ psi_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H", "psi")
+ omega_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H", "omega")
+ beta_COPY_path = os.path.join(wc_dir, "A_COPY", "B", "E", "beta")
+
+ # Do a couple merges
+ #
+ # r7 - Merge -c3,6 from A to A_COPY.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3],[6]],
+ ['U ' + psi_COPY_path + '\n',
+ 'U ' + omega_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n',
+ ' G ' + A_COPY_path + '\n',]),
+ [], 'merge', '-c3,6', sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', 'Merge r3 and r6')
+
+ # r8 - Merge -c5 from A to A_COPY.
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[5]],
+ ['U ' + beta_COPY_path + '\n',
+ ' U ' + A_COPY_path + '\n']),
+ [], 'merge', '-c5', '--allow-mixed-revisions',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', wc_dir,
+ '-m', 'Merge r5')
+
+ # Ask for merged and eligible revisions to A_COPY pegged at various values.
+ # Prior to issue #3180 fix the peg revision was ignored.
+ #
+ # A_COPY pegged to non-existent revision
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version('.*No such revision 99'),
+ [], A_path, A_COPY_path + '@99', '--show-revs', 'merged')
+
+ # A_COPY@BASE
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3','5','6'], A_path, A_COPY_path + '@BASE', '--show-revs', 'merged')
+
+ # A_COPY@HEAD
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3','5','6'], A_path, A_COPY_path + '@HEAD', '--show-revs', 'merged')
+
+ # A_COPY@4 (Prior to any merges)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ [], A_path, A_COPY_path + '@4', '--show-revs', 'merged')
+
+ # A_COPY@COMMITTED (r8)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3','5','6'], A_path, A_COPY_path + '@COMMITTED', '--show-revs',
+ 'merged')
+
+ # A_COPY@PREV (r7)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3', '6'], A_path, A_COPY_path + '@PREV', '--show-revs', 'merged')
+
+ # A_COPY@BASE
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4'], A_path, A_COPY_path + '@BASE', '--show-revs', 'eligible')
+
+ # A_COPY@HEAD
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4'], A_path, A_COPY_path + '@HEAD', '--show-revs', 'eligible')
+
+ # A_COPY@4 (Prior to any merges)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3', '4', '5', '6'], A_path, A_COPY_path + '@4', '--show-revs', 'eligible')
+
+ # A_COPY@COMMITTED (r8)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4'], A_path, A_COPY_path + '@COMMITTED', '--show-revs',
+ 'eligible')
+
+ # A_COPY@PREV (r7)
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4', '5'], A_path, A_COPY_path + '@PREV', '--show-revs', 'eligible')
+
+#----------------------------------------------------------------------
+# A test for issue 3986 'svn_client_mergeinfo_log API is broken'.
+@Issue(3986)
+@SkipUnless(server_has_mergeinfo)
+def wc_target_inherits_mergeinfo_from_repos(sbox):
+ "wc target inherits mergeinfo from repos"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2)
+
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ rho_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D', 'G', 'rho')
+ gamma_2_path = os.path.join(wc_dir, 'A_COPY_2', 'D', 'gamma')
+ tau_path = os.path.join(wc_dir, 'A', 'D', 'G', 'tau')
+ D_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D')
+
+ # Merge -c5 ^/A/D/G/rho A_COPY\D\G\rho
+ # Merge -c7 ^/A A_COPY
+ # Commit as r8
+ #
+ # This gives us some explicit mergeinfo on the "branch" root and
+ # one of its subtrees:
+ #
+ # Properties on 'A_COPY\D\G\rho':
+ # svn:mergeinfo
+ # /A/D/G/rho:5
+ # Properties on 'A_COPY':
+ # svn:mergeinfo
+ # /A:7
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/D/G/rho',
+ rho_COPY_path, '-c5')
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A',
+ A_COPY_path, '-c7')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Cherrypicks to branch subtree and root',
+ wc_dir)
+
+ # Checkout a new wc rooted at ^/A_COPY/D.
+ subtree_wc = sbox.add_wc_path('D_COPY')
+ svntest.actions.run_and_verify_svn(None, [], 'co',
+ sbox.repo_url + '/A_COPY/D',
+ subtree_wc)
+
+ # Check the merged and eligible revisions both recursively and
+ # non-recursively.
+
+ # Eligible : Non-recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4','5'], sbox.repo_url + '/A/D', subtree_wc,
+ '--show-revs', 'eligible')
+
+ # Eligible : Recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4'], sbox.repo_url + '/A/D', subtree_wc,
+ '--show-revs', 'eligible', '-R')
+
+ # Merged : Non-recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['7'], sbox.repo_url + '/A/D', subtree_wc,
+ '--show-revs', 'merged')
+
+ # Merged : Recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['5','7'], sbox.repo_url + '/A/D', subtree_wc,
+ '--show-revs', 'merged', '-R')
+
+ # Test that intersecting revisions in the 'svn mergeinfo' target
+ # from one source don't show up as merged when asking about a different
+ # source.
+ #
+ # In r9 make a change that effects two branches:
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.main.file_write(gamma_2_path, "New content.\n")
+ svntest.main.file_write(tau_path, "New content.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Make changes under both A and A_COPY_2',
+ wc_dir)
+
+ # In r10 merge r9 from A_COPY_2 to A_COPY.
+ #
+ # This gives us this mergeinfo:
+ #
+ # Properties on 'A_COPY':
+ # svn:mergeinfo
+ # /A:7
+ # /A_COPY_2:9
+ # Properties on 'A_COPY\D\G\rho':
+ # svn:mergeinfo
+ # /A/D/G/rho:5
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A_COPY_2',
+ A_COPY_path, '-c9')
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r8 from A_COPY_2 to A_COPY',
+ wc_dir)
+
+ def test_svn_mergeinfo_4_way(wc_target):
+ # Eligible : Non-recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4','5','9'], sbox.repo_url + '/A/D', wc_target,
+ '--show-revs', 'eligible')
+
+ # Eligible : Recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['4','9'], sbox.repo_url + '/A/D', wc_target,
+ '--show-revs', 'eligible', '-R')
+
+ # Merged : Non-recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['7'], sbox.repo_url + '/A/D', wc_target,
+ '--show-revs', 'merged')
+
+ # Merged : Recursive
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['5','7'], sbox.repo_url + '/A/D', wc_target,
+ '--show-revs', 'merged', '-R')
+
+ # Test while the target is the full WC and then with the subtree WC:
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', subtree_wc)
+
+ test_svn_mergeinfo_4_way(D_COPY_path)
+ test_svn_mergeinfo_4_way(subtree_wc)
+
+#----------------------------------------------------------------------
+# A test for issue 3791 'svn mergeinfo shows natural history of added
+# subtrees as eligible'.
+@Issue(3791)
+@SkipUnless(server_has_mergeinfo)
+def natural_history_is_not_eligible_nor_merged(sbox):
+ "natural history is not eligible nor merged"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ nu_path = os.path.join(wc_dir, 'A', 'C', 'nu')
+ A_COPY_path = os.path.join(wc_dir, 'A_COPY')
+ nu_COPY_path = os.path.join(wc_dir, 'A_COPY', 'C', 'nu')
+
+ # r7 - Add a new file A/C/nu
+ svntest.main.file_write(nu_path, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', nu_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Add a file', wc_dir)
+
+ # r8 - Sync merge ^/A to A_COPY
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Add a file', wc_dir)
+
+ # r9 - Modify the file added in r7
+ svntest.main.file_write(nu_path, "Modification to file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Modify added file', wc_dir)
+
+ # r10 - Merge ^/A/C/nu to A_COPY/C/nu, creating subtree mergeinfo.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A/C/nu', nu_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci',
+ '-m', 'Add a file', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # We've effectively merged everything from ^/A to A_COPY, check
+ # that svn mergeinfo -R agrees.
+ #
+ # First check if there are eligible revisions, there should be none.
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ [], sbox.repo_url + '/A',
+ A_COPY_path, '--show-revs', 'eligible', '-R')
+
+ # Now check that all operative revisions show as merged.
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3','4','5','6','7','9'], sbox.repo_url + '/A',
+ A_COPY_path, '--show-revs', 'merged', '-R')
+
+#----------------------------------------------------------------------
+# A test for issue 4050 "'svn mergeinfo' always considers non-inheritable
+# ranges as partially merged".
+@Issue(4050)
+@SkipUnless(server_has_mergeinfo)
+def noninheritable_mergeinfo_not_always_eligible(sbox):
+ "noninheritable mergeinfo not always eligible"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = os.path.join(wc_dir, 'A')
+ branch_path = os.path.join(wc_dir, 'branch')
+
+ # r2 - Branch ^/A to ^/branch.
+ svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/branch', '-m', 'make a branch')
+
+ # r3 - Make prop edit to A.
+ svntest.main.run_svn(None, 'ps', 'prop', 'val', A_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'file edit', wc_dir)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # r4 - Merge r3 from ^/A to branch at depth=empty.
+ svntest.actions.run_and_verify_svn(None, [], 'merge',
+ sbox.repo_url + '/A', branch_path,
+ '-c3', '--depth=empty')
+ # Forcibly set non-inheritable mergeinfo to replicate the pre-1.8 behavior,
+ # where prior to the fix for issue #4057, non-inheritable mergeinfo was
+ # unconditionally set for merges with shallow operational depths.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', SVN_PROP_MERGEINFO,
+ '/A:3*\n', branch_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'shallow merge', wc_dir)
+
+ # Now check that r3 is reported as fully merged from ^/A to ^/branch
+ # and does not show up all when asking for eligible revs.
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ ['3'], sbox.repo_url + '/A', sbox.repo_url + '/branch',
+ '--show-revs', 'merged', '-R')
+ # Likewise r3 shows up as partially eligible when asking about
+ # for --show-revs=eligible.
+ svntest.actions.run_and_verify_mergeinfo(
+ adjust_error_for_server_version(''),
+ [], sbox.repo_url + '/A', sbox.repo_url + '/branch',
+ '--show-revs', 'eligible', '-R')
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4301)
+def mergeinfo_local_move(sbox):
+ "'mergeinfo' on a locally moved path"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_move('A', 'A2')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mergeinfo', sbox.repo_url + '/A',
+ sbox.ospath('A2'))
+
+@SkipUnless(server_has_mergeinfo)
+@Issue(4582)
+def no_mergeinfo_on_tree_conflict_victim(sbox):
+ "do not record mergeinfo on tree conflict victims"
+ sbox.build()
+
+ # Create a branch of A called A_copy
+ sbox.simple_copy('A', 'A_copy')
+ sbox.simple_commit()
+
+ # Add a new directory and file on both branches
+ sbox.simple_mkdir('A/dir')
+ sbox.simple_add_text('new file', 'A/dir/f')
+ sbox.simple_commit()
+
+ sbox.simple_mkdir('A_copy/dir')
+ sbox.simple_add_text('new file', 'A_copy/dir/f')
+ sbox.simple_commit()
+
+ # Run a merge from A to A_copy
+ expected_output = wc.State(sbox.ospath('A_copy'), {
+ 'dir' : Item(status=' ', treeconflict='C'),
+ 'dir/f' : Item(status=' ', treeconflict='A'),
+ })
+ expected_mergeinfo_output = wc.State(sbox.ospath('A_copy'), {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(sbox.ospath('A_copy'), {
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'C' : Item(),
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'dir/f' : Item(contents="new file"),
+ 'mu' : Item(contents="This is the file 'mu'.\n"),
+ })
+
+ # The merge will create an add vs add tree conflict on A_copy/dir
+ expected_status = svntest.wc.State(sbox.ospath('A_copy'), {
+ '' : Item(status=' M', wc_rev='4'),
+ 'D' : Item(status=' ', wc_rev='4'),
+ 'D/G' : Item(status=' ', wc_rev='4'),
+ 'D/G/pi' : Item(status=' ', wc_rev='4'),
+ 'D/G/rho' : Item(status=' ', wc_rev='4'),
+ 'D/G/tau' : Item(status=' ', wc_rev='4'),
+ 'D/H' : Item(status=' ', wc_rev='4'),
+ 'D/H/psi' : Item(status=' ', wc_rev='4'),
+ 'D/H/omega' : Item(status=' ', wc_rev='4'),
+ 'D/H/chi' : Item(status=' ', wc_rev='4'),
+ 'D/gamma' : Item(status=' ', wc_rev='4'),
+ 'B' : Item(status=' ', wc_rev='4'),
+ 'B/F' : Item(status=' ', wc_rev='4'),
+ 'B/E' : Item(status=' ', wc_rev='4'),
+ 'B/E/alpha' : Item(status=' ', wc_rev='4'),
+ 'B/E/beta' : Item(status=' ', wc_rev='4'),
+ 'B/lambda' : Item(status=' ', wc_rev='4'),
+ 'C' : Item(status=' ', wc_rev='4'),
+ 'dir' : Item(status=' ', treeconflict='C', wc_rev='4'),
+ 'dir/f' : Item(status=' ', wc_rev='4'),
+ 'mu' : Item(status=' ', wc_rev='4'),
+ })
+
+ expected_skip = wc.State('', { })
+
+ sbox.simple_update('A_copy')
+ svntest.actions.run_and_verify_merge(sbox.ospath('A_copy'),
+ None, None, # rev1, rev2
+ '^/A',
+ None, # URL2
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+ # Resolve the tree conflict by accepting the working copy state left
+ # behind by the merge. This preserves the line of history of A_copy/dir,
+ # which originated on the branch 'A_copy', rather than replacing it with
+ # Jthe line f history of A/dir which originated on branch 'A'
+ svntest.actions.run_and_verify_resolve([sbox.ospath('A_copy/dir')],
+ '--accept', 'working',
+ sbox.ospath('A_copy/dir'))
+ sbox.simple_commit('A_copy')
+
+ # Now try to merge the 'A_copy' branch back to 'A"
+ expected_output = wc.State(sbox.ospath('A'), {
+ 'dir' : Item(status='R '), # changes line of history of A/dir
+ 'dir/f' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(sbox.ospath('A'), {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(sbox.ospath('A'), {
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'C' : Item(),
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'B/F' : Item(),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'dir/f' : Item(contents="new file"),
+ 'mu' : Item(contents="This is the file 'mu'.\n"),
+ })
+
+ expected_status = svntest.wc.State(sbox.ospath('A'), {
+ '' : Item(status=' M', wc_rev='5'),
+ 'dir' : Item(status='R ', copied='+', wc_rev='-'),
+ 'dir/f' : Item(status=' ', copied='+', wc_rev='-'),
+ 'D' : Item(status=' ', wc_rev='5'),
+ 'D/H' : Item(status=' ', wc_rev='5'),
+ 'D/H/chi' : Item(status=' ', wc_rev='5'),
+ 'D/H/omega' : Item(status=' ', wc_rev='5'),
+ 'D/H/psi' : Item(status=' ', wc_rev='5'),
+ 'D/G' : Item(status=' ', wc_rev='5'),
+ 'D/G/pi' : Item(status=' ', wc_rev='5'),
+ 'D/G/rho' : Item(status=' ', wc_rev='5'),
+ 'D/G/tau' : Item(status=' ', wc_rev='5'),
+ 'D/gamma' : Item(status=' ', wc_rev='5'),
+ 'B' : Item(status=' ', wc_rev='5'),
+ 'B/E' : Item(status=' ', wc_rev='5'),
+ 'B/E/beta' : Item(status=' ', wc_rev='5'),
+ 'B/E/alpha' : Item(status=' ', wc_rev='5'),
+ 'B/lambda' : Item(status=' ', wc_rev='5'),
+ 'B/F' : Item(status=' ', wc_rev='5'),
+ 'mu' : Item(status=' ', wc_rev='5'),
+ 'C' : Item(status=' ', wc_rev='5'),
+ })
+
+ expected_skip = wc.State('', { })
+ sbox.simple_update('A')
+ svntest.actions.run_and_verify_merge(sbox.ospath('A'),
+ None, None, # rev1, rev2
+ '^/A_copy',
+ None, # URL2
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+ sbox.simple_commit('A')
+
+########################################################################
+# Run the tests
+
+# Note that mergeinfo --log is tested in log_tests.py
+
+# list all tests here, starting with None:
+test_list = [ None,
+ no_mergeinfo,
+ mergeinfo,
+ explicit_mergeinfo_source,
+ mergeinfo_non_source,
+ mergeinfo_on_unknown_url,
+ non_inheritable_mergeinfo,
+ recursive_mergeinfo,
+ mergeinfo_on_pegged_wc_path,
+ wc_target_inherits_mergeinfo_from_repos,
+ natural_history_is_not_eligible_nor_merged,
+ noninheritable_mergeinfo_not_always_eligible,
+ mergeinfo_local_move,
+ no_mergeinfo_on_tree_conflict_victim,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
diff --git a/subversion/tests/cmdline/mod_authz_svn_tests.py b/subversion/tests/cmdline/mod_authz_svn_tests.py
new file mode 100755
index 0000000..f3c52dd
--- /dev/null
+++ b/subversion/tests/cmdline/mod_authz_svn_tests.py
@@ -0,0 +1,1069 @@
+#!/usr/bin/env python
+#
+# mod_authz_svn_tests.py: testing mod_authz_svn
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, re, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+ls_of_D_no_H = '''<html><head><title>repos - Revision 1: /A/D</title></head>
+<body>
+ <h2>repos - Revision 1: /A/D</h2>
+ <ul>
+ <li><a href="../">..</a></li>
+ <li><a href="G/">G/</a></li>
+ <li><a href="gamma">gamma</a></li>
+ </ul>
+</body></html>'''
+
+ls_of_D_H = '''<html><head><title>repos - Revision 1: /A/D</title></head>
+<body>
+ <h2>repos - Revision 1: /A/D</h2>
+ <ul>
+ <li><a href="../">..</a></li>
+ <li><a href="G/">G/</a></li>
+ <li><a href="H/">H/</a></li>
+ <li><a href="gamma">gamma</a></li>
+ </ul>
+</body></html>'''
+
+ls_of_H = '''<html><head><title>repos - Revision 1: /A/D/H</title></head>
+<body>
+ <h2>repos - Revision 1: /A/D/H</h2>
+ <ul>
+ <li><a href="../">..</a></li>
+ <li><a href="chi">chi</a></li>
+ <li><a href="omega">omega</a></li>
+ <li><a href="psi">psi</a></li>
+ </ul>
+</body></html>'''
+
+user1 = svntest.main.wc_author
+user1_upper = user1.upper()
+user1_pass = svntest.main.wc_passwd
+user1_badpass = 'XXX'
+assert user1_pass != user1_badpass, "Passwords can't match"
+user2 = svntest.main.wc_author2
+user2_upper = user2.upper()
+user2_pass = svntest.main.wc_passwd
+user2_badpass = 'XXX'
+assert user2_pass != user2_badpass, "Passwords can't match"
+
+def write_authz_file(sbox):
+ svntest.main.write_authz_file(sbox, {
+ '/': '$anonymous = r\n' +
+ 'jrandom = rw\n' +
+ 'jconstant = rw',
+ '/A/D/H': '$anonymous =\n' +
+ '$authenticated =\n' +
+ 'jrandom = rw'
+ })
+
+def write_authz_file_groups(sbox):
+ authz_name = sbox.authz_name()
+ svntest.main.write_authz_file(sbox,{
+ '/': '* =',
+ })
+
+def verify_get(test_area_url, path, user, pw,
+ expected_status, expected_body, headers):
+ import base64
+
+ req_url = test_area_url + path
+
+ h = svntest.main.create_http_connection(req_url, 0)
+
+ if headers is None:
+ headers = {}
+
+ if user and pw:
+ auth_info = user + ':' + pw
+ user_pw = base64.b64encode(auth_info.encode()).decode()
+ headers['Authorization'] = 'Basic ' + user_pw
+ else:
+ auth_info = "anonymous"
+
+ h.request('GET', req_url, None, headers)
+
+ r = h.getresponse()
+
+ actual_status = r.status
+ if expected_status and expected_status != actual_status:
+
+ logger.warn("Expected status '" + str(expected_status) +
+ "' but got '" + str(actual_status) +
+ "' on url '" + req_url + "' (" +
+ auth_info + ").")
+ raise svntest.Failure
+
+ if expected_body:
+ actual_body = r.read()
+ if isinstance(expected_body, str) and not isinstance(actual_body, str):
+ actual_body = actual_body.decode()
+ if expected_body != actual_body:
+ logger.warn("Expected body:")
+ logger.warn(expected_body)
+ logger.warn("But got:")
+ logger.warn(actual_body)
+ logger.warn("on url '" + req_url + "' (" + auth_info + ").")
+ raise svntest.Failure
+
+def verify_gets(test_area_url, tests):
+ for test in tests:
+ verify_get(test_area_url, test['path'], test.get('user'), test.get('pw'),
+ test['status'], test.get('body'), test.get('headers'))
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def anon(sbox):
+ "test anonymous access"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/anon')
+
+ write_authz_file(sbox)
+
+ anon_tests = (
+ { 'path': '', 'status': 301 },
+ { 'path': '/', 'status': 200 },
+ { 'path': '/repos', 'status': 301 },
+ { 'path': '/repos/', 'status': 200 },
+ { 'path': '/repos/A', 'status': 301 },
+ { 'path': '/repos/A/', 'status': 200 },
+ { 'path': '/repos/A/D', 'status': 301 },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H },
+ { 'path': '/repos/A/D/gamma', 'status': 200 },
+ { 'path': '/repos/A/D/H', 'status': 403 },
+ { 'path': '/repos/A/D/H/', 'status': 403 },
+ { 'path': '/repos/A/D/H/chi', 'status': 403 },
+ # auth isn't configured so nothing should change when passing
+ # authn details
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ )
+
+ verify_gets(test_area_url, anon_tests)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def mixed(sbox):
+ "test mixed anonymous and authenticated access"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/mixed')
+
+ write_authz_file(sbox)
+
+ mixed_tests = (
+ { 'path': '', 'status': 301, },
+ { 'path': '/', 'status': 200, },
+ { 'path': '/repos', 'status': 301, },
+ { 'path': '/repos/', 'status': 200, },
+ { 'path': '/repos/A', 'status': 301, },
+ { 'path': '/repos/A/', 'status': 200, },
+ { 'path': '/repos/A/D', 'status': 301, },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ },
+ { 'path': '/repos/A/D/gamma', 'status': 200, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access to H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ )
+
+ verify_gets(test_area_url, mixed_tests)
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+@XFail(svntest.main.is_httpd_authz_provider_enabled)
+# uses the AuthzSVNNoAuthWhenAnonymousAllowed On directive
+# this is broken with httpd 2.3.x+ since it requires the auth system to accept
+# r->user == NULL and there is a test for this in server/request.c now. It
+# was intended as a workaround for the lack of Satisfy Any in 2.3.x+ which
+# was resolved by httpd with mod_access_compat in 2.3.x+.
+def mixed_noauthwhenanon(sbox):
+ "test mixed with noauthwhenanon directive"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/mixed-noauthwhenanon')
+
+ write_authz_file(sbox)
+
+ noauthwhenanon_tests = (
+ { 'path': '', 'status': 301, },
+ { 'path': '/', 'status': 200, },
+ { 'path': '/repos', 'status': 301, },
+ { 'path': '/repos/', 'status': 200, },
+ { 'path': '/repos/A', 'status': 301, },
+ { 'path': '/repos/A/', 'status': 200, },
+ { 'path': '/repos/A/D', 'status': 301, },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ },
+ { 'path': '/repos/A/D/gamma', 'status': 200, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access to H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ # try with the wrong password for user1
+ # note that unlike doing this with Satisfy Any this case
+ # actually provides anon access when provided with an invalid
+ # password
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ )
+
+ verify_gets(test_area_url, noauthwhenanon_tests)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def authn(sbox):
+ "test authenticated only access"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/authn')
+
+ write_authz_file(sbox)
+
+ authn_tests = (
+ { 'path': '', 'status': 401, },
+ { 'path': '/', 'status': 401, },
+ { 'path': '/repos', 'status': 401, },
+ { 'path': '/repos/', 'status': 401, },
+ { 'path': '/repos/A', 'status': 401, },
+ { 'path': '/repos/A/', 'status': 401, },
+ { 'path': '/repos/A/D', 'status': 401, },
+ { 'path': '/repos/A/D/', 'status': 401, },
+ { 'path': '/repos/A/D/gamma', 'status': 401, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access to H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ # try with upper case username for user1
+ { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with upper case username for user2
+ { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ )
+
+ verify_gets(test_area_url, authn_tests)
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def authn_anonoff(sbox):
+ "test authenticated only access with anonoff"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/authn-anonoff')
+
+ write_authz_file(sbox)
+
+ anonoff_tests = (
+ { 'path': '', 'status': 401, },
+ { 'path': '/', 'status': 401, },
+ { 'path': '/repos', 'status': 401, },
+ { 'path': '/repos/', 'status': 401, },
+ { 'path': '/repos/A', 'status': 401, },
+ { 'path': '/repos/A/', 'status': 401, },
+ { 'path': '/repos/A/D', 'status': 401, },
+ { 'path': '/repos/A/D/', 'status': 401, },
+ { 'path': '/repos/A/D/gamma', 'status': 401, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access to H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ # try with upper case username for user1
+ { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass},
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with upper case username for user2
+ { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ )
+
+ verify_gets(test_area_url, anonoff_tests)
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def authn_lcuser(sbox):
+ "test authenticated only access with lcuser"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/authn-lcuser')
+
+ write_authz_file(sbox)
+
+ lcuser_tests = (
+ # try with upper case username for user1 (works due to lcuser option)
+ { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1_upper, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1_upper, 'pw': user1_pass},
+ # try with upper case username for user2 (works due to lcuser option)
+ { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass},
+ )
+
+ verify_gets(test_area_url, lcuser_tests)
+
+# authenticated access only by group - a excuse to use AuthzSVNAuthoritative Off
+# this is terribly messed up, Require group runs after mod_authz_svn.
+# so if mod_authz_svn grants the access then it doesn't matter what the group
+# requirement says. If we reject the access then you can use the AuthzSVNAuthoritative Off
+# directive to fall through to the group check. Overall the behavior of setups like this
+# is almost guaranteed to not be what users expect.
+@SkipUnless(svntest.main.is_ra_type_dav)
+def authn_group(sbox):
+ "test authenticated only access via groups"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/authn-group')
+
+ # Can't use write_authz_file() as most tests because we want to deny all
+ # access with mod_authz_svn so the tests fall through to the group handling
+ authz_name = sbox.authz_name()
+ svntest.main.write_authz_file(sbox, {
+ '/': '* =',
+ })
+
+ group_tests = (
+ { 'path': '', 'status': 401, },
+ { 'path': '/', 'status': 401, },
+ { 'path': '/repos', 'status': 401, },
+ { 'path': '/repos/', 'status': 401, },
+ { 'path': '/repos/A', 'status': 401, },
+ { 'path': '/repos/A/', 'status': 401, },
+ { 'path': '/repos/A/D', 'status': 401, },
+ { 'path': '/repos/A/D/', 'status': 401, },
+ { 'path': '/repos/A/D/gamma', 'status': 401, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access repo including H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ )
+
+ verify_gets(test_area_url, group_tests)
+
+# This test exists to validate our behavior when used with the new authz
+# provider system introduced in httpd 2.3.x. The Satisfy directive
+# determines how older authz hooks are combined and the RequireA(ll|ny)
+# blocks handles how new authz providers are combined. The overall results of
+# all the authz providers (combined per the Require* blocks) are then
+# combined with the other authz hooks via the Satisfy directive.
+# Meaning this test requires that mod_authz_svn says yes and there is
+# either a valid user or the ALLOW header is 1. The header may seem
+# like a silly test but it's easier to excercise than say a host directive
+# in a repeatable test.
+@SkipUnless(svntest.main.is_httpd_authz_provider_enabled)
+def authn_sallrany(sbox):
+ "test satisfy all require any config"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/sallrany')
+
+ write_authz_file(sbox)
+
+ allow_header = { 'ALLOW': '1' }
+
+ sallrany_tests = (
+ #anon access isn't allowed without ALLOW header
+ { 'path': '', 'status': 401, },
+ { 'path': '/', 'status': 401, },
+ { 'path': '/repos', 'status': 401, },
+ { 'path': '/repos/', 'status': 401, },
+ { 'path': '/repos/A', 'status': 401, },
+ { 'path': '/repos/A/', 'status': 401, },
+ { 'path': '/repos/A/D', 'status': 401, },
+ { 'path': '/repos/A/D/', 'status': 401, },
+ { 'path': '/repos/A/D/gamma', 'status': 401, },
+ { 'path': '/repos/A/D/H', 'status': 401, },
+ { 'path': '/repos/A/D/H/', 'status': 401, },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, },
+ # auth is configured and user1 is allowed access repo including H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass},
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass},
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass},
+ # anon is allowed with the ALLOW header
+ { 'path': '', 'status': 301, 'headers': allow_header },
+ { 'path': '/', 'status': 200, 'headers': allow_header },
+ { 'path': '/repos', 'status': 301, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 200, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 301, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 200, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 301, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'headers': allow_header },
+ # these 3 tests return 403 instead of 401 becasue the config allows
+ # the anon user with the ALLOW header without any auth and the old hook
+ # system has no way of knowing it should return 401 since authentication is
+ # configured and can change the behavior. It could decide to return 401 just on
+ # the basis of authentication being configured but then that leaks info in other
+ # cases so it's better for this case to be "broken".
+ { 'path': '/repos/A/D/H', 'status': 403, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 403, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'headers': allow_header },
+ # auth is configured and user1 is allowed access repo including H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+
+ )
+
+ verify_gets(test_area_url, sallrany_tests)
+
+# See comments on authn_sallrany test for some background on the interaction
+# of Satisfy Any and the newer Require blocks.
+@SkipUnless(svntest.main.is_httpd_authz_provider_enabled)
+def authn_sallrall(sbox):
+ "test satisfy all require all config"
+ sbox.build(read_only = True, create_wc = False)
+
+ test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos',
+ '/authz-test-work/sallrall')
+
+ write_authz_file(sbox)
+
+ allow_header = { 'ALLOW': '1' }
+
+ sallrall_tests = (
+ #anon access isn't allowed without ALLOW header
+ { 'path': '', 'status': 403, },
+ { 'path': '/', 'status': 403, },
+ { 'path': '/repos', 'status': 403, },
+ { 'path': '/repos/', 'status': 403, },
+ { 'path': '/repos/A', 'status': 403, },
+ { 'path': '/repos/A/', 'status': 403, },
+ { 'path': '/repos/A/D', 'status': 403, },
+ { 'path': '/repos/A/D/', 'status': 403, },
+ { 'path': '/repos/A/D/gamma', 'status': 403, },
+ { 'path': '/repos/A/D/H', 'status': 403, },
+ { 'path': '/repos/A/D/H/', 'status': 403, },
+ { 'path': '/repos/A/D/H/chi', 'status': 403, },
+ # auth is configured but no access is allowed without the ALLOW header
+ { 'path': '', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass},
+ # try with the wrong password for user1
+ { 'path': '', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass},
+ # auth is configured but no access is allowed without the ALLOW header
+ { 'path': '', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass},
+ # try with the wrong password for user2
+ { 'path': '', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass},
+ # anon is not allowed even with ALLOW header
+ { 'path': '', 'status': 401, 'headers': allow_header },
+ { 'path': '/', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 401, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'headers': allow_header },
+ # auth is configured and user1 is allowed access repo including H
+ { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H,
+ 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header },
+ # try with the wrong password for user1
+ { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header },
+ # auth is configured and user2 is not allowed access to H
+ { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H,
+ 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header },
+ # try with the wrong password for user2
+ { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+ { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header },
+
+ )
+
+ verify_gets(test_area_url, sallrall_tests)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ anon,
+ mixed,
+ mixed_noauthwhenanon,
+ authn,
+ authn_anonoff,
+ authn_lcuser,
+ authn_group,
+ authn_sallrany,
+ authn_sallrall,
+ ]
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/mod_dav_svn_tests.py b/subversion/tests/cmdline/mod_dav_svn_tests.py
new file mode 100644
index 0000000..db30533
--- /dev/null
+++ b/subversion/tests/cmdline/mod_dav_svn_tests.py
@@ -0,0 +1,663 @@
+#!/usr/bin/env python
+#
+# mod_dav_svn_tests.py: testing mod_dav_svn
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, logging, base64, functools
+
+try:
+ # Python <3.0
+ import httplib
+except ImportError:
+ # Python >=3.0
+ import http.client as httplib
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+######################################################################
+# Helper routines
+
+def compare(lhs, rhs):
+ """Implements cmp() for Python 2 and 3 alike"""
+ if lhs == None:
+ if rhs == None:
+ return 0
+ else:
+ return -1
+ else:
+ if rhs == None:
+ return 1
+ else:
+ return (lhs > rhs) - (lhs < rhs)
+
+def compare_dict(lhs, rhs):
+ """Implements dictionary comparison for Python 2 and 3 alike"""
+ lhs_sorted = sorted(lhs, key=lambda x:sorted(x.keys()))
+ rhs_sorted = sorted(rhs, key=lambda x:sorted(x.keys()))
+ return (lhs_sorted > rhs_sorted) - (lhs_sorted < rhs_sorted)
+
+def compare_xml_elem(a, b):
+ """Recursively compare two xml.etree.ElementTree.Element objects.
+ Return a 3-tuple made out of (cmp, elem_a, elem_b), where cmp is
+ the integer result of the comparison (negative, zero or positive),
+ and elem_a and elem_b point to mismatching elements. Iff cmp is
+ zero, elem_a and elem_b are None. """
+
+ # Compare tags, attributes, inner text, tail attribute and the
+ # number of child elements.
+ res = compare(a.tag, b.tag)
+ if res != 0:
+ return res, a, b
+ # Don't care about the order of the attributes.
+ res = compare_dict(a.attrib, b.attrib)
+ if res != 0:
+ return res, a, b
+ res = compare(a.text, b.text)
+ if res != 0:
+ return res, a, b
+ res = compare(a.tail, b.tail)
+ if res != 0:
+ return res, a, b
+ res = compare(len(a), len(b))
+ if res != 0:
+ return res, a, b
+
+ # Prior to recursing, order child elements using the same comparator.
+ # Right now we don't care about the order of the elements. For instance,
+ # <D:response>'s in PROPFIND *need* to be compared without a particular
+ # order, since the server returns them in an unstable order of the hash
+ # iteration.
+ def sortcmp(x, y):
+ return compare_xml_elem(x, y)[0]
+
+ a_children = sorted(list(a), key=functools.cmp_to_key(sortcmp))
+ b_children = sorted(list(b), key=functools.cmp_to_key(sortcmp))
+
+ for a_child, b_child in zip(a_children, b_children):
+ res = compare_xml_elem(a_child, b_child)
+ if res[0] != 0:
+ return res
+
+ # Elements are equal.
+ return 0, None, None
+
+def verify_xml_response(expected_xml, actual_xml):
+ """Parse and compare two XML responses, raise svntest.Failure
+ in case EXPECTED_XML doesn't match ACTUAL_XML. """
+
+ import xml.etree.ElementTree as ET
+
+ expected_root = ET.fromstring(expected_xml)
+ actual_root = ET.fromstring(actual_xml)
+ res, expected_elem, actual_elem = compare_xml_elem(expected_root,
+ actual_root)
+ if res != 0:
+ # The actual response doesn't match our expectations; dump it for
+ # debugging purposes, and highlight the mismatching xml element.
+ logger.warn("Response:\n%s" % actual_xml)
+ raise svntest.Failure("Unexpected response part\n"
+ " Expected: '%s'\n Actual: '%s'\n"
+ % (ET.tostring(expected_elem),
+ ET.tostring(actual_elem)))
+
+######################################################################
+# Tests
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def cache_control_header(sbox):
+ "verify 'Cache-Control' headers on responses"
+
+ sbox.build(create_wc=False, read_only=True)
+
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ }
+
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # GET /repos/iota
+ # Response depends on the youngest revision in the repository, and
+ # can't be cached; expect to see Cache-Control: max-age=0.
+ h.request('GET', sbox.repo_url + '/iota', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=0',
+ r.getheader('Cache-Control'))
+ r.read()
+
+ # GET /repos/A/
+ # Response depends on the youngest revision in the repository, and
+ # can't be cached; expect to see Cache-Control: max-age=0.
+ h.request('GET', sbox.repo_url + '/A/', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=0',
+ r.getheader('Cache-Control'))
+ r.read()
+
+ # GET /repos/A/?p=1
+ # Response for a pegged directory is a subject for authz filtering, and
+ # can't be cached; expect to see Cache-Control: max-age=0.
+ h.request('GET', sbox.repo_url + '/A/?p=1', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=0',
+ r.getheader('Cache-Control'))
+ r.read()
+
+ # GET /repos/iota?r=1
+ # Response for a file URL with ?r=WORKINGREV is mutable, because the
+ # line of history for this file can be replaced in the future (hence,
+ # the same request will start producing another response). Expect to
+ # see Cache-Control: max-age=0.
+ h.request('GET', sbox.repo_url + '/iota?r=1', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=0',
+ r.getheader('Cache-Control'))
+ r.read()
+
+ # GET /repos/iota?p=1
+ # Response for a pegged file is immutable; expect to see Cache-Control
+ # with non-zero max-age.
+ h.request('GET', sbox.repo_url + '/iota?p=1', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=604800',
+ r.getheader('Cache-Control'))
+ r.read()
+
+ # GET /repos/iota?p=1&r=1
+ # Response for a file URL with both ?p=PEG_REV and ?r=WORKINGREV is
+ # immutable; expect to see Cache-Control with non-zero max-age.
+ h.request('GET', sbox.repo_url + '/iota?p=1&r=1', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=604800',
+ r.getheader('Cache-Control'))
+ r.read()
+
+
+ # GET /repos/!svn/rvr/1/iota
+ # Response is immutable; expect to see Cache-Control with non-zero max-age.
+ h.request('GET', sbox.repo_url + '/!svn/rvr/1/iota', None, headers)
+ r = h.getresponse()
+ if r.status != httplib.OK:
+ raise svntest.Failure('Request failed: %d %s' % (r.status, r.reason))
+ svntest.verify.compare_and_display_lines(None, 'Cache-Control',
+ 'max-age=604800',
+ r.getheader('Cache-Control'))
+ r.read()
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def simple_propfind(sbox):
+ "verify simple PROPFIND responses"
+
+ sbox.build(create_wc=False, read_only=True)
+ repo_uripath = '/' + svntest.wc.svn_uri_quote(
+ svntest.main.pristine_greek_repos_dir.replace(os.path.sep, '/'))
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # PROPFIND /repos/!svn/rvr/1, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<prop><resourcetype xmlns="DAV:"/></prop>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype><D:collection/></lp1:resourcetype>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+ # PROPFIND /repos/!svn/rvr/1, Depth = 1
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '1',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<prop><resourcetype xmlns="DAV:"/></prop>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype><D:collection/></lp1:resourcetype>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/A/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype><D:collection/></lp1:resourcetype>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/iota</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype/>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+ # PROPFIND /repos/!svn/rvr/1/A/B/F, Depth = 1
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '1',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<prop><resourcetype xmlns="DAV:"/></prop>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1/A/B/F', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/A/B/F/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype><D:collection/></lp1:resourcetype>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+ # PROPFIND /repos/!svn/rvr/1/iota, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<prop><resourcetype xmlns="DAV:"/></prop>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1/iota', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/iota</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype/>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def propfind_multiple_props(sbox):
+ "verify multi-prop PROPFIND response"
+
+ sbox.build(create_wc=False, read_only=True)
+ repo_uripath = '/' + svntest.wc.svn_uri_quote(
+ svntest.main.pristine_greek_repos_dir.replace(os.path.sep, '/'))
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # PROPFIND /repos/!svn/rvr/1/iota, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8" ?>\n'
+ '<D:propfind xmlns:D="DAV:">\n'
+ '<D:prop xmlns:S="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:resourcetype/>\n'
+ '<S:md5-checksum/>\n'
+ '</D:prop>\n'
+ '</D:propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1/iota', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" '
+ 'xmlns:ns1="http://subversion.tigris.org/xmlns/dav/" '
+ 'xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/iota</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype/>\n'
+ '<lp2:md5-checksum>'
+ '2d18c5e57e84c5b8a5e9a6e13fa394dc'
+ '</lp2:md5-checksum>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def propfind_404(sbox):
+ "verify PROPFIND for non-existing property"
+
+ sbox.build(create_wc=False, read_only=True)
+ repo_uripath = '/' + svntest.wc.svn_uri_quote(
+ svntest.main.pristine_greek_repos_dir.replace(os.path.sep, '/'))
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # PROPFIND /repos/!svn/rvr/1, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<prop><nonexistingprop xmlns="DAV:"/></prop>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:g0="DAV:">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<g0:nonexistingprop/>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 404 Not Found</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def propfind_allprop(sbox):
+ "verify allprop PROPFIND response"
+
+ sbox.build()
+ repo_uripath = '/' + svntest.wc.svn_uri_quote(
+ sbox.repo_dir.replace(os.path.sep, '/'))
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ # Ensure stable date and uuid
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ 'd7130b12-92f6-45c9-9217-b9f0472c3fab')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '1',
+ 'svn:date', '2015-01-01T00:00:00.0Z',
+ sbox.wc_dir)
+
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # PROPFIND /repos/!svn/rvr/1, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<allprop/>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/1', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:S="http://subversion.tigris.org/xmlns/svn/" '
+ 'xmlns:C="http://subversion.tigris.org/xmlns/custom/" '
+ 'xmlns:V="http://subversion.tigris.org/xmlns/dav/" '
+ 'xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/1/</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<lp1:resourcetype><D:collection/></lp1:resourcetype>\n'
+ '<lp1:getcontenttype>' +
+ 'text/html; charset=UTF-8' +
+ '</lp1:getcontenttype>\n'
+ '<lp1:getetag>W/"1//"</lp1:getetag>\n'
+ '<lp1:creationdate>2015-01-01T00:00:00.0Z</lp1:creationdate>\n'
+ '<lp1:getlastmodified>' +
+ 'Thu, 01 Jan 2015 00:00:00 GMT' +
+ '</lp1:getlastmodified>\n'
+ '<lp1:checked-in>'
+ '<D:href>' + repo_uripath + '/!svn/ver/1/</D:href>'
+ '</lp1:checked-in>\n'
+ '<lp1:version-controlled-configuration>'
+ '<D:href>' + repo_uripath + '/!svn/vcc/default</D:href>'
+ '</lp1:version-controlled-configuration>\n'
+ '<lp1:version-name>1</lp1:version-name>\n'
+ '<lp1:creator-displayname>jrandom</lp1:creator-displayname>\n'
+ '<lp2:baseline-relative-path/>\n'
+ '<lp2:repository-uuid>' +
+ 'd7130b12-92f6-45c9-9217-b9f0472c3fab' +
+ '</lp2:repository-uuid>\n'
+ '<lp2:deadprop-count>0</lp2:deadprop-count>\n'
+ '<D:lockdiscovery/>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+
+@SkipUnless(svntest.main.is_ra_type_dav)
+def propfind_propname(sbox):
+ "verify propname PROPFIND response"
+
+ sbox.build()
+ sbox.simple_propset('a', 'b', 'iota')
+ sbox.simple_commit()
+ repo_uripath = '/' + svntest.wc.svn_uri_quote(
+ sbox.repo_dir.replace(os.path.sep, '/'))
+
+ h = svntest.main.create_http_connection(sbox.repo_url)
+
+ # PROPFIND /repos/!svn/rvr/2/iota, Depth = 0
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode(b'jconstant:rayjandom').decode(),
+ 'Depth': '0',
+ }
+ req_body = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<propfind xmlns="DAV:">\n'
+ '<propname/>\n'
+ '</propfind>\n'
+ )
+ h.request('PROPFIND', sbox.repo_url + '/!svn/rvr/2/iota', req_body, headers)
+ r = h.getresponse()
+ if r.status != httplib.MULTI_STATUS:
+ raise svntest.Failure('Unexpected status: %d %s' % (r.status, r.reason))
+
+ expected_response = (
+ '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<D:multistatus xmlns:D="DAV:" xmlns:ns0="DAV:">\n'
+ '<D:response xmlns:S="http://subversion.tigris.org/xmlns/svn/" '
+ 'xmlns:C="http://subversion.tigris.org/xmlns/custom/" '
+ 'xmlns:V="http://subversion.tigris.org/xmlns/dav/" '
+ 'xmlns:lp1="DAV:" '
+ 'xmlns:lp2="http://subversion.tigris.org/xmlns/dav/">\n'
+ '<D:href>' + repo_uripath + '/!svn/rvr/2/iota</D:href>\n'
+ '<D:propstat>\n'
+ '<D:prop>\n'
+ '<C:a/>\n'
+ '<lp1:resourcetype/>\n'
+ '<lp1:getcontentlength/>\n'
+ '<lp1:getcontenttype/>\n'
+ '<lp1:getetag/>\n'
+ '<lp1:creationdate/>\n'
+ '<lp1:getlastmodified/>\n'
+ '<lp1:checked-in/>\n'
+ '<lp1:version-controlled-configuration/>\n'
+ '<lp1:version-name/>\n'
+ '<lp1:creator-displayname/>\n'
+ '<lp2:baseline-relative-path/>\n'
+ '<lp2:md5-checksum/>\n'
+ '<lp2:repository-uuid/>\n'
+ '<lp2:deadprop-count/>\n'
+ '<lp2:sha1-checksum/>\n'
+ '<D:supportedlock/>\n'
+ '<D:lockdiscovery/>\n'
+ '</D:prop>\n'
+ '<D:status>HTTP/1.1 200 OK</D:status>\n'
+ '</D:propstat>\n'
+ '</D:response>\n'
+ '</D:multistatus>\n'
+ )
+ actual_response = r.read()
+ verify_xml_response(expected_response, actual_response)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ cache_control_header,
+ simple_propfind,
+ propfind_multiple_props,
+ propfind_404,
+ propfind_allprop,
+ propfind_propname,
+ ]
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/move_tests.py b/subversion/tests/cmdline/move_tests.py
new file mode 100755
index 0000000..1542af9
--- /dev/null
+++ b/subversion/tests/cmdline/move_tests.py
@@ -0,0 +1,1795 @@
+#!/usr/bin/env python
+#
+# move_tests.py: testing the local move tracking
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+
+# General modules
+import os, re, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc, actions, verify
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+def build_incoming_changes_file(sbox, source, dest):
+ "Build up revs to receive incoming changes over our local file move"
+
+ # r1 = greek tree sandbox
+
+ # r2 = Modify source of moved file
+ sbox.simple_append(source, "modified\n")
+ sbox.simple_commit(message="Modify source of moved file")
+
+ # r3 = Delete source of moved file
+ sbox.simple_rm(source)
+ sbox.simple_commit(message="Delete source of moved file")
+
+ # r4 = Replace source of moved file
+ # To get a replace update from r2 to r4.
+ sbox.simple_add_text("This is the replaced file.\n", source)
+ sbox.simple_commit(message="Replace source of moved file")
+
+ # r5 = Add destination of moved file
+ sbox.simple_add_text("This is the destination file.\n", dest)
+ sbox.simple_commit(message="Add destination of moved file")
+
+ # r6 = Modify destination of moved file
+ sbox.simple_append(dest, "modified\n")
+ sbox.simple_commit(message="Modify destination of moved file")
+
+ # r7 = Delete destination of moved file
+ sbox.simple_rm(dest)
+ sbox.simple_commit(message="Delete destination of moved file")
+
+ # r8 = Copy destination of moved file
+ sbox.simple_copy('A/mu', dest)
+ sbox.simple_commit(message="Copy destination of moved file")
+
+ # r9 = Replace destination of moved file
+ sbox.simple_rm(dest)
+ sbox.simple_add_text("This is the destination file.\n", dest)
+ sbox.simple_commit(message="Replace destination of moved file")
+
+ # r10 = Add property on destination of moved file.
+ sbox.simple_propset("foo", "bar", dest)
+ sbox.simple_commit(message="Add property on destination of moved file")
+
+ # r11 = Modify property on destination of moved file.
+ sbox.simple_propset("foo", "baz", dest)
+ sbox.simple_commit(message="Modify property on destination of moved file")
+
+ # r12 = Delete property on destination of moved file.
+ sbox.simple_propdel("foo", dest)
+ sbox.simple_commit(message="Delete property on destination of moved file")
+
+ # r13 = Remove destination again (not needed for any test just cleanup).
+ sbox.simple_rm(dest)
+ sbox.simple_commit(message="Remove destination (cleanup)")
+
+ # r14 = Add property on source of moved file.
+ sbox.simple_propset("foo", "bar", source)
+ sbox.simple_commit(message="Add property on source of moved file")
+
+ # r15 = Modify property on source of moved file.
+ sbox.simple_propset("foo", "baz", source)
+ sbox.simple_commit(message="Modify property on source of moved file")
+
+ # r16 = Delete property on source of moved file.
+ sbox.simple_propdel("foo", source)
+ sbox.simple_commit(message="Delete property on source of moved file")
+
+ # r17 = Move that is identical to our local move.
+ sbox.simple_move(source, dest)
+ sbox.simple_commit(message="Identical move to our local move")
+
+def move_file_test(sbox, source, dest, move_func, test):
+ """Execute a series of actions to test local move tracking. sbox is the
+ sandbox we're working in, source is the source of the move, dest is the
+ destination for the move and tests is various other parameters of the move
+ testing. In particular:
+ start_rev: revision to update to before starting
+ start_output: validate the output of the start update against this.
+ start_disk: validate the on disk state after the start update against this.
+ start_status: validate the wc status after the start update against this.
+ end_rev: revision to update to, bringing in some update you want to test.
+ up_output: validate the output of the end update agianst this.
+ up_disk: validate the on disk state after the end update against this.
+ up_status: validate the wc status after the end update against this.
+ revert_paths: validate the paths reverted.
+ resolves: A directory of resolve accept arguments to test, the whole test will
+ be run for each. The value is a directory with the following keys:
+ output: validate the output of the resolve command against this.
+ error: validate the error of the resolve command against this.
+ status: validate the wc status after the resolve against this.
+ revert_paths: override the paths reverted check in the test."""
+
+ wc_dir = sbox.wc_dir
+
+ source_path = sbox.ospath(source)
+ dest_path = sbox.ospath(dest)
+
+ # Deal with if there's no resolves key, as in we're not going to
+ # do a resolve.
+ if not 'resolves' in test or not test['resolves']:
+ test['resolves'] = {None: None}
+
+ # Do the test for every type of resolve provided.
+ for resolve_accept in test['resolves'].keys():
+
+ # update to start_rev
+ svntest.actions.run_and_verify_update(wc_dir, test['start_output'],
+ test['start_disk'], test['start_status'],
+ [], False,
+ '-r', test['start_rev'], wc_dir)
+ # execute the move
+ move_func(test['start_rev'])
+
+ # update to end_rev, which will create a conflict
+ # TODO: Limit the property checks to only when we're doing something with
+ # properties.
+ svntest.actions.run_and_verify_update(wc_dir, test['up_output'],
+ test['up_disk'], test['up_status'],
+ [], True,
+ '-r', test['end_rev'], wc_dir)
+
+ revert_paths = None
+ if 'revert_paths' in test:
+ revert_paths = test['revert_paths']
+
+ # resolve the conflict
+ # TODO: Switch to using run_and_verify_resolve, can't use it right now because
+ # it's not friendly with the output of resolutions right now.
+ if resolve_accept:
+ resolve = test['resolves'][resolve_accept]
+ if not 'output' in resolve:
+ resolve['output'] = None
+ if not 'error' in resolve:
+ resolve['error'] = []
+ if not 'disk' in resolve:
+ resolve['disk'] = None
+ if 'revert_paths' in resolve:
+ revert_paths = resolve['revert_paths']
+ svntest.actions.run_and_verify_svn(resolve['output'], resolve['error'],
+ 'resolve', '--accept', resolve_accept,
+ '-R', wc_dir)
+
+ # TODO: This should be moved into the run_and_verify_resolve mentioned
+ # above.
+ if resolve['status']:
+ svntest.actions.run_and_verify_status(wc_dir, resolve['status'])
+
+ # TODO: This should be moved into the run_and_verify_resolve mentioned
+ # above.
+ if resolve['disk']:
+ svntest.actions.verify_disk(wc_dir, resolve['disk'], True)
+
+ # revert to preprare for the next test
+ svntest.actions.run_and_verify_revert(revert_paths, '-R', wc_dir)
+
+# tests is an array of test dictionaries that move_file_test above will take
+def move_file_tests(sbox, source, dest, move_func, tests):
+ for test in tests:
+ move_file_test(sbox, source, dest, move_func, test)
+
+def build_simple_file_move_tests(sbox, source, dest):
+ """Given a sandbox, source and destination build the array of tests for
+ a file move"""
+
+ wc_dir = sbox.wc_dir
+ source_path = sbox.ospath(source)
+ dest_path = sbox.ospath(dest)
+
+ # Build the tests list
+ tests = []
+
+ # move and update with incoming change to source (r1-2).
+ test = {}
+ test['start_rev'] = 1
+ test['end_rev'] = 2
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the file 'lambda'.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest,
+ treeconflict='C')
+ test['up_status'].add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mc = {}
+ mc['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ mc['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ mc['status'].tweak(source, status='D ', moved_to=dest)
+ mc['status'].add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mc['disk'] = test['up_disk'].copy()
+ mc['disk'].tweak(dest, contents="This is the file 'lambda'.\nmodified\n")
+ # working breaks the move
+ working = {}
+ working['output'] = svntest.verify.ExpectedOutput(
+ [
+ "Breaking move with source path '%s'\n" % source_path,
+ "Tree conflict at '%s' marked as resolved.\n" % source_path,
+ ]
+ )
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ')
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming deletion of source (r2-3)
+ test = {}
+ test['start_rev'] = 2
+ test['end_rev'] = 3
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the file 'lambda'.\nmodified\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='! ', treeconflict='C', wc_rev=None)
+ test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't say it broke the move it should.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ # move is broken now
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['status'].remove(source)
+ working['disk'] = test['up_disk']
+ working['revert_paths'] = [dest_path]
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [dest_path, source_path]
+ tests.append(test)
+
+ # move and update with incoming replacement of source (r2-4)
+ test = {}
+ test['start_rev'] = 2
+ test['end_rev'] = 4
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', prev_status=' ', treeconflict='A',
+ prev_treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the file 'lambda'.\nmodified\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ # XXX: Is entry_status=' ' really right here?
+ test['up_status'].tweak(source, status='! ', treeconflict='C', entry_status=' ')
+ test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Broke the move but doesn't notify that it does.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ # XXX: Not sure this status is really correct here
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='! ')
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming add of dest (r4-5)
+ test = {}
+ test['start_rev'] = 4
+ test['end_rev'] = 5
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't say what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming add of dest (r4-6)
+ # we're going 4-6 because we're not testing a replacement move
+ test = {}
+ test['start_rev'] = 4
+ test['end_rev'] = 6
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ working['accept'] = 'working'
+ # XXX: Doesn't say what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming delete of dest (r4-7)
+ # Since we're not testing a replacement move the incoming delete has to
+ # be done starting from a rev where the file doesn't exist. So it ends
+ # up being a no-op update. So this test might be rather pointless.
+ test = {}
+ test['start_rev'] = 4
+ test['end_rev'] = 7
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, { })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='A ', copied='+',
+ wc_rev='-', moved_from=source)})
+ # no conflict so no resolve.
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming copy to dest (r7-8)
+ test = {}
+ test['start_rev'] = 7
+ test['end_rev'] = 8
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't say what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming replace to dest (r7-9)
+ test = {}
+ test['start_rev'] = 7
+ test['end_rev'] = 9
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't say what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property addition to dest (r7-10)
+ test = {}
+ test['start_rev'] = 7
+ test['end_rev'] = 10
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Didn't tell us what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property modification to dest (r7-11)
+ test = {}
+ test['start_rev'] = 7
+ test['end_rev'] = 11
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't tell you what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property deletion to dest (r7-12)
+ test = {}
+ test['start_rev'] = 7
+ test['end_rev'] = 12
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest)
+ test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C',
+ wc_rev='-', moved_from=source)})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ # XXX: Doesn't tell you what it did.
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % dest_path, match_all=False
+ )
+ # working converts the move into a replacement
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ', moved_to=dest)
+ working['status'].add({dest: Item(status='R ', moved_from=source,
+ copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property addition to source (r13-14)
+ test = {}
+ test['start_rev'] = 13
+ test['end_rev'] = 14
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest, treeconflict='C')
+ test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-',
+ moved_from=source)})
+ mc = {}
+ # TODO: Should check that the output includes that the update was applied to
+ # the destination
+ mc['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ mc['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ mc['status'].tweak(source, status='D ', moved_to=dest)
+ mc['status'].add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mc['disk'] = test['up_disk'].copy()
+ mc['disk'].tweak(dest, props={u'foo': u'bar'})
+ working = {}
+ working['output'] = svntest.verify.ExpectedOutput(
+ [
+ "Breaking move with source path '%s'\n" % source_path,
+ "Tree conflict at '%s' marked as resolved.\n" % source_path
+ ]
+ )
+ # XXX: working breaks the move? Is that right?
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ')
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property modification to source (r14-15)
+ test = {}
+ test['start_rev'] = 14
+ test['end_rev'] = 15
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n", props={u'foo': u'bar'})
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest, treeconflict='C')
+ test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-',
+ moved_from=source)})
+ mc = {}
+ # TODO: Should check that the output includes that the update was applied to
+ # the destination
+ mc['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ mc['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ mc['status'].tweak(source, status='D ', moved_to=dest)
+ mc['status'].add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mc['disk'] = test['up_disk'].copy()
+ mc['disk'].tweak(dest, props={u'foo': u'baz'})
+ working = {}
+ working['output'] = svntest.verify.ExpectedOutput(
+ [
+ "Breaking move with source path '%s'\n" % source_path,
+ "Tree conflict at '%s' marked as resolved.\n" % source_path
+ ]
+ )
+ # XXX: working breaks the move? Is that right?
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ')
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming property deletion to source (r15-16)
+ test = {}
+ test['start_rev'] = 15
+ test['end_rev'] = 16
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n", props={'foo': 'baz'})
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='D ', moved_to=dest, treeconflict='C')
+ test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-',
+ moved_from=source)})
+ mc = {}
+ # TODO: Should check that the output includes that the update was applied to
+ # the destination
+ mc['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ mc['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ mc['status'].tweak(source, status='D ', moved_to=dest)
+ mc['status'].add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mc['disk'] = test['up_disk'].copy()
+ mc['disk'].tweak(dest, props={})
+ working = {}
+ working['output'] = svntest.verify.ExpectedOutput(
+ [
+ "Breaking move with source path '%s'\n" % source_path,
+ "Tree conflict at '%s' marked as resolved.\n" % source_path
+ ]
+ )
+ # XXX: working breaks the move? Is that right?
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].tweak(source, status='D ')
+ working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')})
+ working['disk'] = test['up_disk']
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [source_path, dest_path]
+ tests.append(test)
+
+ # move and update with incoming identical move (r16-17)
+ # XXX: It'd be really nice if we actually recognized this and the wc
+ # showed no conflict at all on udpate.
+ test = {}
+ test['start_rev'] = 16
+ test['end_rev'] = 17
+ test['start_output'] = None
+ test['start_disk'] = None
+ test['start_status'] = None
+ test['up_output'] = svntest.wc.State(wc_dir, {
+ source : Item(status=' ', treeconflict='C'),
+ dest : Item(status=' ', treeconflict='C'),
+ })
+ test['up_disk'] = svntest.main.greek_state.copy()
+ test['up_disk'].add({
+ dest: Item("This is the replaced file.\n")
+ })
+ test['up_disk'].remove(source)
+ test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ test['up_status'].tweak(source, status='! ', treeconflict='C', wc_rev=None)
+ test['up_status'].add({dest: Item(status='R ', copied='+', wc_rev='-',
+ treeconflict='C')})
+ # mine-conflict doesn't work.
+ mc = {}
+ mc['error'] = svntest.verify.RegexOutput(".*: .*: W195024:.*", match_all=False)
+ mc['status'] = test['up_status']
+ mc['disk'] = test['up_disk']
+ working = {}
+ working['output'] = svntest.verify.ExpectedOutput(
+ "Tree conflict at '%s' marked as resolved.\n" % source_path, match_all=False
+ )
+ # move is broken now
+ working['status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev'])
+ working['status'].add({dest: Item(status='R ', copied='+', wc_rev='-')})
+ working['status'].remove(source)
+ working['disk'] = test['up_disk']
+ working['revert_paths'] = [dest_path]
+ test['resolves'] = {'mine-conflict': mc,
+ 'working': working}
+ test['revert_paths'] = [dest_path, source_path]
+ tests.append(test)
+
+ return tests
+
+def build_simple_file_move_func(sbox, source, dest):
+ wc_dir = sbox.wc_dir
+ source_path = sbox.ospath(source)
+ dest_path = sbox.ospath(dest)
+
+ # Setup the move function
+ def move_func(rev):
+ # execute the move
+ svntest.actions.run_and_verify_svn(None, [], "move",
+ source_path, dest_path)
+ if move_func.extra_mv_tests:
+ mv_status = svntest.actions.get_virginal_state(wc_dir, rev)
+ mv_status.tweak(source, status='D ', moved_to=dest)
+ mv_status.add({dest: Item(status='A ', moved_from=source,
+ copied='+', wc_rev='-')})
+ mv_info_src = [
+ {
+ 'Path' : re.escape(source_path),
+ 'Moved To' : re.escape(sbox.ospath(dest)),
+ }
+ ]
+ mv_info_dst = [
+ {
+ 'Path' : re.escape(dest_path),
+ 'Moved From' : re.escape(sbox.ospath(source)),
+ }
+ ]
+
+ # check the status output.
+ svntest.actions.run_and_verify_status(wc_dir, mv_status)
+
+ # check the info output
+ svntest.actions.run_and_verify_info(mv_info_src, source_path)
+ svntest.actions.run_and_verify_info(mv_info_dst, dest_path)
+ move_func.extra_mv_tests = False
+
+ # Do the status and info tests the first time through
+ # No reason to repeat these tests for each of the variations below
+ # since the move is exactly the same.
+ move_func.extra_mv_tests = True
+
+ return move_func
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+#
+# See http://wiki.apache.org/subversion/LocalMoves
+
+def lateral_move_file_test(sbox):
+ "lateral (rename) move of a file test"
+ sbox.build()
+
+ # Plan to test moving A/B/lambda to A/B/lambda-moved
+ source = 'A/B/lambda'
+ dest = 'A/B/lambda-moved'
+
+ # Build the revisions to do the updates via
+ build_incoming_changes_file(sbox, source, dest)
+
+ # Get function to implement the actual move
+ move_func = build_simple_file_move_func(sbox, source, dest)
+
+ # Get the test plan
+ tests = build_simple_file_move_tests(sbox, source, dest)
+
+ # Actually run the tests
+ move_file_tests(sbox, source, dest, move_func, tests)
+
+def sibling_move_file_test(sbox):
+ "sibling move of a file test"
+ sbox.build()
+
+ # Plan to test moving A/B/lambda to A/C/lambda
+ source = 'A/B/lambda'
+ dest = 'A/C/lambda'
+
+ # Build the revisions to do the updates via
+ build_incoming_changes_file(sbox, source, dest)
+
+ # Get function to implement the actual move
+ move_func = build_simple_file_move_func(sbox, source, dest)
+
+ # Get the test plan
+ tests = build_simple_file_move_tests(sbox, source, dest)
+
+ # Actually run the tests
+ move_file_tests(sbox, source, dest, move_func, tests)
+
+def shallower_move_file_test(sbox):
+ "shallower move of a file test"
+ sbox.build()
+
+ # Plan to test moving A/B/lambda to A/lambda
+ source = 'A/B/lambda'
+ dest = 'A/lambda'
+
+ # Build the revisions to do the updates via
+ build_incoming_changes_file(sbox, source, dest)
+
+ # Get function to implement the actual move
+ move_func = build_simple_file_move_func(sbox, source, dest)
+
+ # Get the test plan
+ tests = build_simple_file_move_tests(sbox, source, dest)
+
+ # Actually run the tests
+ move_file_tests(sbox, source, dest, move_func, tests)
+
+def deeper_move_file_test(sbox):
+ "deeper move of a file test"
+ sbox.build()
+
+ # Plan to test moving A/B/lambda to A/B/F/lambda
+ source = 'A/B/lambda'
+ dest = 'A/B/F/lambda'
+
+ # Build the revisions to do the updates via
+ build_incoming_changes_file(sbox, source, dest)
+
+ # Get function to implement the actual move
+ move_func = build_simple_file_move_func(sbox, source, dest)
+
+ # Get the test plan
+ tests = build_simple_file_move_tests(sbox, source, dest)
+
+ # Actually run the tests
+ move_file_tests(sbox, source, dest, move_func, tests)
+
+
+def property_merge(sbox):
+ "test property merging on move-update"
+
+ # pristine local incoming outcome revert
+ # 1 p1 v2 p2 v2 p1 v2, p2 v2 p2 v2
+ # 2 p1 v1 p1 v2 p2 v2 p1 v2, p2 v2 p1 v1 p2 v2
+ # 3 p1 v1 p1 v2 p1 v2 p1 v2 p1 v2
+ # 4 p1 v2 p1 v3 p1 v2 conflict p1 v3
+ # 5 p1 v1 p1 v2 p1 v3 p1 v2 conflict p1 v3
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_mkdir('A/C/D1')
+ sbox.simple_mkdir('A/C/D2')
+ sbox.simple_mkdir('A/C/D3')
+ sbox.simple_mkdir('A/C/D4')
+ sbox.simple_mkdir('A/C/D5')
+ sbox.simple_add_text('content of f1', 'A/C/f1')
+ sbox.simple_add_text('content of f2', 'A/C/f2')
+ sbox.simple_add_text('content of f3', 'A/C/f3')
+ sbox.simple_add_text('content of f4', 'A/C/f4')
+ sbox.simple_add_text('content of f5', 'A/C/f5')
+ sbox.simple_propset('key1', 'value1',
+ 'A/C/D2', 'A/C/D3', 'A/C/D5',
+ 'A/C/f2', 'A/C/f3', 'A/C/f5')
+ sbox.simple_commit()
+ sbox.simple_propset('key2', 'value2',
+ 'A/C/D1', 'A/C/D2',
+ 'A/C/f1', 'A/C/f2')
+ sbox.simple_propset('key1', 'value2',
+ 'A/C/D3',
+ 'A/C/f3')
+ sbox.simple_propset('key1', 'value3',
+ 'A/C/D4', 'A/C/D5',
+ 'A/C/f4', 'A/C/f5')
+ sbox.simple_commit()
+ sbox.simple_update('', 2)
+ sbox.simple_propset('key1', 'value2',
+ 'A/C/D1', 'A/C/D2', 'A/C/D3', 'A/C/D4', 'A/C/D5',
+ 'A/C/f1', 'A/C/f2', 'A/C/f3', 'A/C/f4', 'A/C/f5')
+ sbox.simple_move('A/C', 'A/C2')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/C', status='D ', moved_to='A/C2')
+ expected_status.add({
+ 'A/C/D1' : Item(status='D ', wc_rev=2),
+ 'A/C/D2' : Item(status='D ', wc_rev=2),
+ 'A/C/D3' : Item(status='D ', wc_rev=2),
+ 'A/C/D4' : Item(status='D ', wc_rev=2),
+ 'A/C/D5' : Item(status='D ', wc_rev=2),
+ 'A/C/f1' : Item(status='D ', wc_rev=2),
+ 'A/C/f2' : Item(status='D ', wc_rev=2),
+ 'A/C/f3' : Item(status='D ', wc_rev=2),
+ 'A/C/f4' : Item(status='D ', wc_rev=2),
+ 'A/C/f5' : Item(status='D ', wc_rev=2),
+ 'A/C2' : Item(status='A ', copied='+', wc_rev='-', moved_from='A/C'),
+ 'A/C2/D1' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/D2' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/D3' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/D4' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/D5' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/f1' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/f2' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/f3' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/f4' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/C2/f5' : Item(status=' M', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ sbox.simple_update()
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/C'))
+
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('A/C2',
+ 'A/C2/D1', 'A/C2/D2', 'A/C2/D3', 'A/C2/D4', 'A/C2/D5',
+ 'A/C2/f1', 'A/C2/f2', 'A/C2/f3', 'A/C2/f4', 'A/C2/f5',
+ wc_rev='-')
+ expected_status.tweak('A/C2/D3',
+ 'A/C2/f3',
+ status=' ')
+ expected_status.tweak('A/C2/D4', 'A/C2/D5',
+ 'A/C2/f4', 'A/C2/f5',
+ status=' C')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/C')
+ expected_disk.add({
+ 'A/C2' : Item(),
+ 'A/C2/D1' : Item(props={'key1' : 'value2', 'key2' : 'value2'}),
+ 'A/C2/D2' : Item(props={'key1' : 'value2', 'key2' : 'value2'}),
+ 'A/C2/D3' : Item(props={'key1' : 'value2'}),
+ 'A/C2/D4' : Item(props={'key1' : 'value2'}),
+ 'A/C2/D5' : Item(props={'key1' : 'value2'}),
+ 'A/C2/f1' : Item(contents='content of f1',
+ props={'key1' : 'value2', 'key2' : 'value2'}),
+ 'A/C2/f2' : Item(contents='content of f2',
+ props={'key1' : 'value2', 'key2' : 'value2'}),
+ 'A/C2/f3' : Item(contents='content of f3',
+ props={'key1' : 'value2'}),
+ 'A/C2/f4' : Item(contents='content of f4',
+ props={'key1' : 'value2'}),
+ 'A/C2/f5' : Item(contents='content of f5',
+ props={'key1' : 'value2'}),
+ 'A/C2/D4/dir_conflicts.prej' : Item(contents=
+"""Trying to add new property 'key1'
+but the property already exists.
+<<<<<<< (local property value)
+value2||||||| (incoming 'changed from' value)
+=======
+value3>>>>>>> (incoming 'changed to' value)
+"""),
+ 'A/C2/D5/dir_conflicts.prej' : Item(contents=
+"""Trying to change property 'key1'
+but the property has already been locally changed to a different value.
+<<<<<<< (local property value)
+value2||||||| (incoming 'changed from' value)
+value1=======
+value3>>>>>>> (incoming 'changed to' value)
+"""),
+ 'A/C2/f4.prej' : Item(contents=
+"""Trying to add new property 'key1'
+but the property already exists.
+<<<<<<< (local property value)
+value2||||||| (incoming 'changed from' value)
+=======
+value3>>>>>>> (incoming 'changed to' value)
+"""),
+ 'A/C2/f5.prej' : Item(contents=
+"""Trying to change property 'key1'
+but the property has already been locally changed to a different value.
+<<<<<<< (local property value)
+value2||||||| (incoming 'changed from' value)
+value1=======
+value3>>>>>>> (incoming 'changed to' value)
+"""),
+ })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ sbox.simple_revert('A/C2/D1', 'A/C2/D2', 'A/C2/D4', 'A/C2/D5',
+ 'A/C2/f1', 'A/C2/f2', 'A/C2/f4', 'A/C2/f5')
+
+ expected_status.tweak('A/C2/D1', 'A/C2/D2', 'A/C2/D4', 'A/C2/D5',
+ 'A/C2/f1', 'A/C2/f2', 'A/C2/f4', 'A/C2/f5',
+ status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.remove('A/C2/D4/dir_conflicts.prej',
+ 'A/C2/D5/dir_conflicts.prej',
+ 'A/C2/f4.prej',
+ 'A/C2/f5.prej')
+ expected_disk.tweak('A/C2/D1',
+ 'A/C2/f1',
+ props={'key2' : 'value2'})
+ expected_disk.tweak('A/C2/D2',
+ 'A/C2/f2',
+ props={'key1' : 'value1', 'key2' : 'value2'})
+ expected_disk.tweak('A/C2/D4', 'A/C2/D5',
+ 'A/C2/f4', 'A/C2/f5',
+ props={'key1' : 'value3'})
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+
+@Issue(4356)
+def move_missing(sbox):
+ "move a missing directory"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ svntest.main.safe_rmtree(sbox.ospath('A/D/G'))
+
+ expected_err = '.*Can\'t move \'.*G\' to \'.*R\':.*'
+
+ # This move currently fails halfway between adding the dest and
+ # deleting the source
+ svntest.actions.run_and_verify_svn(None, expected_err,
+ 'mv', sbox.ospath('A/D/G'),
+ sbox.ospath('R'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G', 'A/D/G/tau', 'A/D/G/pi', 'A/D/G/rho',
+ status='! ', entry_status=' ')
+
+ # Verify that the status processing doesn't crash
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # The issue is a crash when the destination is present
+ os.mkdir(sbox.ospath('R'))
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def nested_replaces(sbox):
+ "nested replaces"
+
+ sbox.build(create_wc=False, empty=True)
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+ ospath = sbox.ospath
+
+ ## r1: setup
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', repo_url,
+ '-m', 'r1: create tree',
+ 'mkdir', 'A', 'mkdir', 'A/B', 'mkdir', 'A/B/C',
+ 'mkdir', 'X', 'mkdir', 'X/Y', 'mkdir', 'X/Y/Z',
+ # sentinel files
+ 'put', os.devnull, 'A/a',
+ 'put', os.devnull, 'A/B/b',
+ 'put', os.devnull, 'A/B/C/c',
+ 'put', os.devnull, 'X/x',
+ 'put', os.devnull, 'X/Y/y',
+ 'put', os.devnull, 'X/Y/Z/z')
+
+ svntest.main.run_svn(None, 'checkout', '-q', repo_url, wc_dir)
+ r1_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ 'A/B' : Item(status=' ', wc_rev='1'),
+ 'A/B/C' : Item(status=' ', wc_rev='1'),
+ 'X' : Item(status=' ', wc_rev='1'),
+ 'X/Y' : Item(status=' ', wc_rev='1'),
+ 'X/Y/Z' : Item(status=' ', wc_rev='1'),
+ 'A/a' : Item(status=' ', wc_rev='1'),
+ 'A/B/b' : Item(status=' ', wc_rev='1'),
+ 'A/B/C/c' : Item(status=' ', wc_rev='1'),
+ 'X/x' : Item(status=' ', wc_rev='1'),
+ 'X/Y/y' : Item(status=' ', wc_rev='1'),
+ 'X/Y/Z/z' : Item(status=' ', wc_rev='1'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, r1_status)
+
+ ## r2: juggling
+ moves = [
+ ('A', 'A2'),
+ ('X', 'X2'),
+ ('A2/B/C', 'X'),
+ ('X2/Y/Z', 'A'),
+ ('A2/B', 'A/B'),
+ ('X2/Y', 'X/Y'),
+ ('A2', 'X/Y/Z'),
+ ('X2', 'A/B/C'),
+ ]
+ for src, dst in moves:
+ svntest.main.run_svn(None, 'mv', ospath(src), ospath(dst))
+ r2_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status='R ', copied='+', moved_from='X/Y/Z', moved_to='X/Y/Z', wc_rev='-'),
+ 'A/B' : Item(status='A ', copied='+', moved_from='X/Y/Z/B', wc_rev='-', entry_status='R '),
+ 'A/B/C' : Item(status='R ', copied='+', moved_from='X', moved_to='X', wc_rev='-'),
+ 'A/B/C/Y' : Item(status='D ', copied='+', wc_rev='-', moved_to='X/Y'),
+ 'A/B/C/Y/y' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B/C/Y/Z' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B/C/Y/Z/z':Item(status='D ', copied='+', wc_rev='-'),
+ 'X' : Item(status='R ', copied='+', moved_from='A/B/C', moved_to='A/B/C', wc_rev='-'),
+ 'X/Y' : Item(status='A ', copied='+', moved_from='A/B/C/Y', wc_rev='-', entry_status='R '),
+ 'X/Y/Z' : Item(status='R ', copied='+', moved_from='A', moved_to='A', wc_rev='-'),
+ 'X/Y/Z/B' : Item(status='D ', copied='+', wc_rev='-', moved_to='A/B'),
+ 'X/Y/Z/B/b' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/Y/Z/B/C' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/Y/Z/B/C/c':Item(status='D ', copied='+', wc_rev='-'),
+ 'A/a' : Item(status='D ', wc_rev='1'),
+ 'A/B/b' : Item(status='D ', wc_rev='1'),
+ 'A/B/C/c' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/x' : Item(status='D ', wc_rev='1'),
+ 'X/Y/y' : Item(status='D ', wc_rev='1'),
+ 'X/Y/Z/z' : Item(status='D ', copied='+', wc_rev='-'),
+ 'X/c' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/z' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/b' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/Y/y' : Item(status=' ', copied='+', wc_rev='-'),
+ 'X/Y/Z/a' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/C/x' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, r2_status)
+
+ svntest.main.run_svn(None, 'commit', '-m', 'r2: juggle the tree', wc_dir)
+ escaped = svntest.main.ensure_list(map(re.escape, [
+ ' R /A (from /X/Y/Z:1)',
+ ' A /A/B (from /A/B:1)',
+ ' R /A/B/C (from /X:1)',
+ ' R /X (from /A/B/C:1)',
+ ' A /X/Y (from /X/Y:1)',
+ ' R /X/Y/Z (from /A:1)',
+ ' D /X/Y/Z/B',
+ ' D /A/B/C/Y',
+ ]))
+ expected_output = svntest.verify.UnorderedRegexListOutput(escaped
+ + [ '^-', '^r2', '^-', '^Changed paths:', ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-qvr2', repo_url)
+
+ ## Test updating to r1.
+ svntest.main.run_svn(None, 'update', '-r1', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, r1_status)
+
+def setup_move_many(sbox):
+ "helper function which creates a wc with node A/A/A which is moved 3 times"
+
+ sbox.simple_rm('A', 'iota')
+ sbox.simple_mkdir('A',
+ 'A/A',
+ 'A/A/A',
+ 'A/A/A/A',
+ 'B',
+ 'B/A',
+ 'B/A/A',
+ 'B/A/A/A',
+ 'C',
+ 'C/A',
+ 'C/A/A',
+ 'C/A/A/A')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ sbox.simple_move('A/A/A', 'AAA_1')
+
+ sbox.simple_rm('A')
+ sbox.simple_move('B', 'A')
+
+ sbox.simple_move('A/A/A', 'AAA_2')
+
+ sbox.simple_rm('A/A')
+ sbox.simple_move('C/A', 'A/A')
+
+ sbox.simple_move('A/A/A', 'AAA_3')
+
+def move_many_status(wc_dir):
+ "obtain standard status after setup_move_many"
+
+ return svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='2'),
+
+ 'AAA_1' : Item(status='A ', copied='+', moved_from='A/A/A', wc_rev='-'),
+ 'AAA_1/A' : Item(status=' ', copied='+', wc_rev='-'),
+
+ 'AAA_2' : Item(status='A ', copied='+', moved_from='A/A/A', wc_rev='-'),
+ 'AAA_2/A' : Item(status=' ', copied='+', wc_rev='-'),
+
+ 'AAA_3' : Item(status='A ', copied='+', moved_from='A/A/A', wc_rev='-'),
+ 'AAA_3/A' : Item(status=' ', copied='+', wc_rev='-'),
+
+ 'A' : Item(status='R ', copied='+', moved_from='B', wc_rev='-'),
+ 'A/A' : Item(status='R ', copied='+', moved_from='C/A', wc_rev='-'),
+ 'A/A/A' : Item(status='D ', copied='+', wc_rev='-', moved_to='AAA_3'),
+ 'A/A/A/A' : Item(status='D ', copied='+', wc_rev='-'),
+
+ 'B' : Item(status='D ', wc_rev='2', moved_to='A'),
+ 'B/A' : Item(status='D ', wc_rev='2'),
+ 'B/A/A' : Item(status='D ', wc_rev='2'),
+ 'B/A/A/A' : Item(status='D ', wc_rev='2'),
+
+ 'C' : Item(status=' ', wc_rev='2'),
+ 'C/A' : Item(status='D ', wc_rev='2', moved_to='A/A'),
+ 'C/A/A' : Item(status='D ', wc_rev='2'),
+ 'C/A/A/A' : Item(status='D ', wc_rev='2'),
+ })
+
+def move_many_update_delete(sbox):
+ "move many and delete-on-update"
+
+ sbox.build()
+ setup_move_many(sbox)
+
+ wc_dir = sbox.wc_dir
+
+ # Verify start situation
+ expected_status = move_many_status(wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # And now create a tree conflict
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/B',
+ '-m', '')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'B' : Item(status=' ', treeconflict='C'),
+ })
+
+
+ expected_status.tweak('', 'C', 'C/A', 'C/A/A', 'C/A/A/A', wc_rev='3')
+ expected_status.tweak('A', moved_from=None)
+ expected_status.remove('B/A', 'B/A/A', 'B/A/A/A')
+ expected_status.tweak('B', status='! ', treeconflict='C', wc_rev=None, moved_to=None)
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+ # Would be nice if we could run the resolver as a separate step,
+ # but 'svn resolve' just fails for any value but working
+
+def move_many_update_add(sbox):
+ "move many and add-on-update"
+
+ sbox.build()
+ setup_move_many(sbox)
+
+ wc_dir = sbox.wc_dir
+
+ # Verify start situation
+ expected_status = move_many_status(wc_dir)
+ #svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # And now create a tree conflict
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', sbox.repo_url + '/B/A/A/BB',
+ '-m', '')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'B' : Item(status=' ', treeconflict='C'),
+ 'B/A' : Item(status=' ', treeconflict='U'),
+ 'B/A/A' : Item(status=' ', treeconflict='U'),
+ 'B/A/A/BB' : Item(status=' ', treeconflict='A'),
+ # And while resolving
+ 'A/A' : Item(status=' ', treeconflict='C'),
+ 'A/A/A' : Item(status=' ', treeconflict='C'),
+ 'AAA_2/BB' : Item(status='A '),
+ })
+
+ expected_status.tweak('',
+ 'B', 'B/A', 'B/A/A', 'B/A/A/A',
+ 'C', 'C/A', 'C/A/A', 'C/A/A/A',
+ wc_rev='3')
+
+ expected_status.add({
+ 'A/A/A/BB' : Item(status='D ', copied='+', wc_rev='-'),
+ 'B/A/A/BB' : Item(status='D ', wc_rev='3'),
+ 'AAA_2/BB' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status,
+ [], False,
+ wc_dir, '--accept', 'mine-conflict')
+
+ # And another one
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', sbox.repo_url + '/C/A/A/BB',
+ '-m', '')
+
+ expected_status.tweak('',
+ 'B', 'B/A', 'B/A/A', 'B/A/A/A',
+ 'C', 'C/A', 'C/A/A', 'C/A/A/A',
+ wc_rev='4')
+
+ expected_status.add({
+ 'B/A/A/BB' : Item(status='D ', wc_rev='4'),
+ 'C/A/A/BB' : Item(status='D ', wc_rev='4'),
+ 'AAA_3/BB' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/A/A' : Item(status=' ', treeconflict='C'),
+ 'C/A' : Item(status=' ', treeconflict='C'),
+ 'C/A/A' : Item(status=' ', treeconflict='U'),
+ 'C/A/A/BB' : Item(status=' ', treeconflict='A'),
+ 'AAA_3/BB' : Item(status='A '),
+ })
+
+ # This currently triggers an assertion failure
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status,
+ [], False,
+ wc_dir, '--accept', 'mine-conflict')
+
+@Issue(4437)
+def move_del_moved(sbox):
+ "delete moved node, still a move"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_mkdir('A/NEW')
+ sbox.simple_move('A/mu', 'A/NEW/mu')
+ sbox.simple_rm('A/NEW/mu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='D ')
+ expected_status.add({
+ 'A/NEW' : Item(status='A ', wc_rev='-')
+ })
+
+ # A/mu still reports that it is moved to A/NEW/mu, while it is already
+ # deleted there.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def copy_move_commit(sbox):
+ "copy, move and commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ #repro
+ # Prepare
+ # - Create folder aaa
+ # - Add file bbb.sql
+ # create table bbb (Id int not null)
+ # - Commit
+ # Repro Issue 2
+ # - Copy folder aaa under same parent folder (i.e. as a sibling). (using Ctrl drag/drop).
+ # Creates Copy of aaa
+ # - Rename Copy of aaa to eee
+ # - Commit
+ # Get error need to update
+ # - Update
+ # - Commit
+ # Get error need to update
+
+ sbox.simple_copy('A/D/G', 'A/D/GG')
+ sbox.simple_move('A/D/GG', 'A/D/GG-moved')
+ sbox.simple_commit('A/D/GG-moved')
+
+def move_to_from_external(sbox):
+ "move to and from an external"
+
+ sbox.build()
+ sbox.simple_propset('svn:externals', '^/A/D/G GG', '')
+ sbox.simple_update()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'move',
+ sbox.ospath('GG/tau'),
+ sbox.ospath('tau'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'move',
+ sbox.ospath('iota'),
+ sbox.ospath('GG/tau'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'Commit both',
+ sbox.ospath(''),
+ sbox.ospath('GG'))
+
+def revert_del_root_of_move(sbox):
+ "revert delete root of move"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_copy('A/mu', 'A/B/E/mu')
+ sbox.simple_copy('A/mu', 'A/B/F/mu')
+ sbox.simple_commit()
+ sbox.simple_update('', 1)
+ sbox.simple_move('A/B/E', 'E')
+ sbox.simple_rm('A/B')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ 'A/B/E' : Item(status=' ', treeconflict='U'),
+ 'A/B/E/mu' : Item(status=' ', treeconflict='A'),
+ 'A/B/F' : Item(status=' ', treeconflict='U'),
+ 'A/B/F/mu' : Item(status=' ', treeconflict='A'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/B', status='D ', treeconflict='C')
+ expected_status.tweak('A/B/E', status='D ', moved_to='E')
+ expected_status.tweak('A/B/F', 'A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta',
+ status='D ')
+ expected_status.add({
+ 'A/B/F/mu' : Item(status='D ', wc_rev='2'),
+ 'A/B/E/mu' : Item(status='D ', wc_rev='2'),
+ 'E' : Item(status='A ', copied='+', moved_from='A/B/E', wc_rev='-'),
+ 'E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, None,
+ expected_status)
+
+ expected_output = [
+ "Reverted '%s'\n" % sbox.ospath('A/B'), # Reverted
+ " C %s\n" % sbox.ospath('A/B/E') # New tree conflict
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B'),
+ '--depth', 'empty')
+
+ expected_status.tweak('A/B', status=' ', treeconflict=None)
+ expected_status.tweak('A/B/E', treeconflict='C')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def move_conflict_details(sbox):
+ "move conflict details"
+
+ sbox.build()
+
+ sbox.simple_append('A/B/E/new', 'new\n')
+ sbox.simple_add('A/B/E/new')
+ sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n')
+ sbox.simple_rm('A/B/E/beta', 'A/B/F')
+ sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B')
+ sbox.simple_mkdir('A/B/E/new-dir1')
+ sbox.simple_mkdir('A/B/E/new-dir2')
+ sbox.simple_mkdir('A/B/E/new-dir3')
+ sbox.simple_rm('A/B/lambda')
+ sbox.simple_mkdir('A/B/lambda')
+ sbox.simple_commit()
+
+ sbox.simple_update('', 1)
+
+ sbox.simple_move('A/B', 'B')
+
+ sbox.simple_update('', 2)
+
+ expected_info = [
+ {
+ "Moved To": re.escape(sbox.ospath("B")),
+ "Tree conflict": re.escape(
+ 'local dir moved away, incoming dir edit upon update' +
+ ' Source left: (dir) ^/A/B@1' +
+ ' Source right: (dir) ^/A/B@2')
+ }
+ ]
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('A/B'))
+
+ sbox.simple_propset('key', 'vAl', 'B')
+ sbox.simple_move('B/E/beta', 'beta')
+ sbox.simple_propset('a', 'b', 'B/F', 'B/lambda')
+ sbox.simple_append('B/E/alpha', 'other\nnew\nlines')
+ sbox.simple_mkdir('B/E/new')
+ sbox.simple_mkdir('B/E/new-dir1')
+ sbox.simple_append('B/E/new-dir2', 'something')
+ sbox.simple_append('B/E/new-dir3', 'something')
+ sbox.simple_add('B/E/new-dir3')
+
+
+ expected_output = [
+ " C %s\n" % sbox.ospath('B'), # Property conflicted
+ " U %s\n" % sbox.ospath('B/E'), # Just updated
+ "C %s\n" % sbox.ospath('B/E/alpha'), # Text conflicted
+ " C %s\n" % sbox.ospath('B/E/beta'),
+ " C %s\n" % sbox.ospath('B/E/new'),
+ " C %s\n" % sbox.ospath('B/E/new-dir1'),
+ " C %s\n" % sbox.ospath('B/E/new-dir2'),
+ " C %s\n" % sbox.ospath('B/E/new-dir3'),
+ " C %s\n" % sbox.ospath('B/F'),
+ " C %s\n" % sbox.ospath('B/lambda'),
+ "Updated to revision 2.\n",
+ "Tree conflict at '%s' marked as resolved.\n" % sbox.ospath('A/B')
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'resolve', sbox.ospath('A/B'),
+ '--depth', 'empty',
+ '--accept', 'mine-conflict')
+
+ expected_info = [
+ {
+ "Path" : re.escape(sbox.ospath('B')),
+ "Conflicted Properties" : "key",
+ "Conflict Details": re.escape(
+ 'incoming dir edit upon update' +
+ ' Source left: (dir) ^/A/B@1' +
+ ' Source right: (dir) ^/A/B@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E')),
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/alpha')),
+ "Conflict Previous Base File" : '.*alpha.*',
+ "Conflict Previous Working File" : '.*alpha.*',
+ "Conflict Current Base File": '.*alpha.*',
+ "Conflict Details": re.escape(
+ 'incoming file edit upon update' +
+ ' Source left: (file) ^/A/B/E/alpha@1' +
+ ' Source right: (file) ^/A/B/E/alpha@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/beta')),
+ "Tree conflict": re.escape(
+ 'local file moved away, incoming file delete or move upon update' +
+ ' Source left: (file) ^/A/B/E/beta@1' +
+ ' Source right: (none) ^/A/B/E/beta@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new')),
+ "Tree conflict": re.escape(
+ 'local dir add, incoming file add upon update' +
+ ' Source left: (none) ^/A/B/E/new@1' +
+ ' Source right: (file) ^/A/B/E/new@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new-dir1')),
+ "Tree conflict": re.escape(
+ 'local dir add, incoming dir add upon update' +
+ ' Source left: (none) ^/A/B/E/new-dir1@1' +
+ ' Source right: (dir) ^/A/B/E/new-dir1@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new-dir2')),
+ "Tree conflict": re.escape(
+ 'local file unversioned, incoming dir add upon update' +
+ ' Source left: (none) ^/A/B/E/new-dir2@1' +
+ ' Source right: (dir) ^/A/B/E/new-dir2@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/E/new-dir3')),
+ "Tree conflict": re.escape(
+ 'local file add, incoming dir add upon update' +
+ ' Source left: (none) ^/A/B/E/new-dir3@1' +
+ ' Source right: (dir) ^/A/B/E/new-dir3@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/F')),
+ "Tree conflict": re.escape(
+ 'local dir edit, incoming dir delete or move upon update' +
+ ' Source left: (dir) ^/A/B/F@1' +
+ ' Source right: (none) ^/A/B/F@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('B/lambda')),
+ "Tree conflict": re.escape(
+ 'local file edit, incoming replace with dir upon update' +
+ ' Source left: (file) ^/A/B/lambda@1' +
+ ' Source right: (dir) ^/A/B/lambda@2')
+ },
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('B'),
+ '--depth', 'infinity')
+
+def move_conflict_markers(sbox):
+ "move conflict markers"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_propset('key','val', 'iota', 'A/B/E', 'A/B/E/beta')
+ sbox.simple_commit()
+ sbox.simple_update('', 1)
+ sbox.simple_propset('key','false', 'iota', 'A/B/E', 'A/B/E/beta')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' C'),
+ 'A/B/E/beta' : Item(status=' C'),
+ 'iota' : Item(status=' C'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('iota', 'A/B/E', 'A/B/E/beta', status=' C')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/E/dir_conflicts.prej' : Item(contents=
+ "Trying to add new property 'key'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "false||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "val>>>>>>> (incoming 'changed to' value)\n"),
+ 'A/B/E/beta.prej' : Item(contents=
+ "Trying to add new property 'key'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "false||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "val>>>>>>> (incoming 'changed to' value)\n"),
+ 'iota.prej' : Item(contents=
+ "Trying to add new property 'key'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "false||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "val>>>>>>> (incoming 'changed to' value)\n"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ sbox.simple_move('iota', 'A/iotb')
+ sbox.simple_move('A/B/E', 'E')
+
+ expected_status.tweak('iota', status='D ', moved_to='A/iotb')
+ expected_status.tweak('A/B/E', status='D ', moved_to='E')
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ')
+ expected_status.add({
+ 'A/iotb' : Item(status='A ', copied='+', moved_from='iota', wc_rev='-'),
+ 'E' : Item(status='A ', copied='+', moved_from='A/B/E', wc_rev='-'),
+ 'E/beta' : Item(status=' M', copied='+', wc_rev='-'),
+ 'E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ expected_disk.remove('iota', 'iota.prej',
+ 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/E/dir_conflicts.prej',
+ 'A/B/E/beta.prej')
+ expected_disk.add({
+ 'A/iotb' : Item(contents="This is the file 'iota'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+#######################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ lateral_move_file_test,
+ sibling_move_file_test,
+ shallower_move_file_test,
+ deeper_move_file_test,
+ property_merge,
+ move_missing,
+ nested_replaces,
+ move_many_update_delete,
+ move_many_update_add,
+ move_del_moved,
+ copy_move_commit,
+ move_to_from_external,
+ revert_del_root_of_move,
+ move_conflict_details,
+ move_conflict_markers,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/patch_tests.py b/subversion/tests/cmdline/patch_tests.py
new file mode 100755
index 0000000..5d41dd6
--- /dev/null
+++ b/subversion/tests/cmdline/patch_tests.py
@@ -0,0 +1,7884 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# patch_tests.py: some basic patch tests
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import base64
+import os
+import re
+import sys
+import tempfile
+import textwrap
+import zlib
+import posixpath
+import filecmp
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+########################################################################
+#Tests
+
+def patch(sbox):
+ "basic patch"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ "\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new (revision 0)\n",
+ "+++ new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='U '),
+ 'iota' : Item(status='U '),
+ 'new' : Item(status='A '),
+ 'A/mu' : Item(status='U '),
+ 'A/B/E/beta' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak(status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+def patch_absolute_paths(sbox):
+ "patch containing absolute paths"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = os.path.abspath(sbox.get_tempname('my.patch'))
+
+ os.chdir(wc_dir)
+
+ # A patch with absolute paths.
+ # The first diff points inside the working copy and should apply.
+ # The second diff does not point inside the working copy so application
+ # should fail.
+ abs = os.path.abspath('.')
+ if sys.platform == 'win32':
+ abs = abs.replace("\\", "/")
+ unidiff_patch = [
+ "diff -ur A/B/E/alpha.orig A/B/E/alpha\n"
+ "--- %s/A/B/E/alpha.orig\tThu Apr 16 19:49:53 2009\n" % abs,
+ "+++ %s/A/B/E/alpha\tThu Apr 16 19:50:30 2009\n" % abs,
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'alpha'.\n",
+ "+Whoooo whooooo whoooooooo!\n",
+ "diff -ur A/B/lambda.orig A/B/lambda\n"
+ "--- /A/B/lambda.orig\tThu Apr 16 19:49:53 2009\n",
+ "+++ /A/B/lambda\tThu Apr 16 19:51:25 2009\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'lambda'.\n",
+ "+It's the file 'lambda', who would have thought!\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ lambda_path = os.path.join(os.path.sep, 'A', 'B', 'lambda')
+ expected_output = [
+ 'U %s\n' % os.path.join('A', 'B', 'E', 'alpha'),
+ 'Skipped missing target: \'%s\'\n' % lambda_path,
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1)
+
+ alpha_contents = "This is the file 'alpha'.\nWhoooo whooooo whoooooooo!\n"
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha', contents=alpha_contents)
+
+ expected_status = svntest.actions.get_virginal_state('', 1)
+ expected_status.tweak('A/B/E/alpha', status='M ')
+
+ expected_skip = wc.State('', {
+ lambda_path: Item(verb='Skipped missing target'),
+ })
+
+ svntest.actions.run_and_verify_patch('', patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_offset(sbox):
+ "patch with offset searching"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = os.path.abspath(sbox.get_tempname('my.patch'))
+ mu_path = sbox.ospath('A/mu')
+ iota_path = sbox.ospath('iota')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ # The missing line here will cause the first hunk to match early
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n",
+ ]
+
+ # iota's content will make both a late and early match possible.
+ # The hunk to be applied is replicated here for reference:
+ # @@ -5,6 +5,7 @@
+ # iota
+ # iota
+ # iota
+ # +x
+ # iota
+ # iota
+ # iota
+ #
+ # This hunk wants to be applied at line 5, but that isn't
+ # possible because line 8 ("zzz") does not match "iota".
+ # The early match happens at line 2 (offset 3 = 5 - 2).
+ # The late match happens at line 9 (offset 4 = 9 - 5).
+ # Subversion will pick the early match in this case because it
+ # is closer to line 5.
+ iota_contents = [
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "zzz\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n"
+ ]
+
+ # Set mu and iota contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ svntest.main.file_write(iota_path, ''.join(iota_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota (revision XYZ)\n",
+ "+++ iota (working copy)\n",
+ "@@ -5,6 +5,7 @@\n",
+ " iota\n",
+ " iota\n",
+ " iota\n",
+ "+x\n",
+ " iota\n",
+ " iota\n",
+ " iota\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ iota_contents = [
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "x\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "zzz\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ "iota\n",
+ ]
+
+ os.chdir(wc_dir)
+
+ expected_output = [
+ 'U %s\n' % os.path.join('A', 'mu'),
+ '> applied hunk @@ -6,6 +6,9 @@ with offset -1\n',
+ '> applied hunk @@ -14,11 +17,8 @@ with offset 4\n',
+ 'U iota\n',
+ '> applied hunk @@ -5,6 +5,7 @@ with offset -3\n',
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.tweak('iota', contents=''.join(iota_contents))
+
+ expected_status = svntest.actions.get_virginal_state('', 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('iota', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch('', patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_chopped_leading_spaces(sbox):
+ "patch with chopped leading spaces"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ "\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new (revision 0)\n",
+ "+++ new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ 'U %s\n' % sbox.ospath('iota'),
+ 'A %s\n' % sbox.ospath('new'),
+ 'U %s\n' % sbox.ospath('A/mu'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+
+def patch_strip1(sbox):
+ "patch with --strip 1"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: b/A/D/gamma\n",
+ "===================================================================\n",
+ "--- a/A/D/gamma\t(revision 1)\n",
+ "+++ b/A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ "Index: x/iota\n",
+ "===================================================================\n",
+ "--- x/iota\t(revision 1)\n",
+ "+++ x/iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ "\n",
+ "Index: /new\n",
+ "===================================================================\n",
+ "--- /new (revision 0)\n",
+ "+++ /new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "--- x/A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ x/A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- /A/B/E/beta (revision 1)\n",
+ "+++ /A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ 'U %s\n' % sbox.ospath('iota'),
+ 'A %s\n' % sbox.ospath('new'),
+ 'U %s\n' % sbox.ospath('A/mu'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--strip', '1')
+
+def patch_no_index_line(sbox):
+ "patch with no index lines"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ gamma_path = sbox.ospath('A/D/gamma')
+ iota_path = sbox.ospath('iota')
+
+ gamma_contents = [
+ "\n",
+ "Another line before\n",
+ "A third line before\n",
+ "This is the file 'gamma'.\n",
+ "A line after\n",
+ "Another line after\n",
+ "A third line after\n",
+ ]
+
+ svntest.main.file_write(gamma_path, ''.join(gamma_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ unidiff_patch = [
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1,7 +1,7 @@\n",
+ " \n",
+ " Another line before\n",
+ " A third line before\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ " A line after\n",
+ " Another line after\n",
+ " A third line after\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = [
+ "\n",
+ "Another line before\n",
+ "A third line before\n",
+ "It is the file 'gamma'.\n",
+ "A line after\n",
+ "Another line after\n",
+ "A third line after\n",
+ ]
+ iota_contents = [
+ "This is the file 'iota'.\n",
+ "Some more bytes\n",
+ ]
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ 'U %s\n' % sbox.ospath('iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=''.join(gamma_contents))
+ expected_disk.tweak('iota', contents=''.join(iota_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ', wc_rev=2)
+ expected_status.tweak('iota', status='M ', wc_rev=1)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_add_new_dir(sbox):
+ "patch with missing dirs"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # The first diff is adding 'new' with two missing dirs. The second is
+ # adding 'new' with one missing dir to a 'A/B/E' that is locally deleted
+ # (should be skipped). The third is adding 'new' to 'A/C' that is locally
+ # deleted (should be skipped too). The fourth is adding 'new' with a
+ # directory that is unversioned (should be skipped as well).
+ unidiff_patch = [
+ "Index: new\n",
+ "===================================================================\n",
+ "--- X/Y/new\t(revision 0)\n",
+ "+++ X/Y/new\t(revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- A/B/E/Y/new\t(revision 0)\n",
+ "+++ A/B/E/Y/new\t(revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- A/C/new\t(revision 0)\n",
+ "+++ A/C/new\t(revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- A/Z/new\t(revision 0)\n",
+ "+++ A/Z/new\t(revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ ]
+
+ C_path = sbox.ospath('A/C')
+ E_path = sbox.ospath('A/B/E')
+
+ svntest.main.safe_rmtree(C_path)
+ svntest.main.safe_rmtree(E_path)
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ A_B_E_Y_new_path = sbox.ospath('A/B/E/Y/new')
+ A_C_new_path = sbox.ospath('A/C/new')
+ A_Z_new_path = sbox.ospath('A/Z/new')
+ expected_output = [
+ 'A %s\n' % sbox.ospath('X'),
+ 'A %s\n' % sbox.ospath('X/Y'),
+ 'A %s\n' % sbox.ospath('X/Y/new'),
+ 'Skipped missing target: \'%s\'\n' % A_B_E_Y_new_path,
+ 'Skipped missing target: \'%s\'\n' % A_C_new_path,
+ 'Skipped missing target: \'%s\'\n' % A_Z_new_path,
+ ] + svntest.main.summary_of_conflicts(skipped_paths=3)
+
+ # Create the unversioned obstructing directory
+ os.mkdir(os.path.dirname(A_Z_new_path))
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X/Y/new' : Item(contents='new\n'),
+ 'A/Z' : Item()
+ })
+ expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/C')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev=0),
+ 'X/Y' : Item(status='A ', wc_rev=0),
+ 'X/Y/new' : Item(status='A ', wc_rev=0),
+ 'A/B/E' : Item(status='! ', wc_rev=1),
+ 'A/B/E/alpha': Item(status='! ', wc_rev=1),
+ 'A/B/E/beta' : Item(status='! ', wc_rev=1),
+ 'A/C' : Item(status='! ', wc_rev=1),
+ })
+
+ expected_skip = wc.State(
+ '',
+ {A_Z_new_path : Item(verb='Skipped missing target'),
+ A_B_E_Y_new_path : Item(verb='Skipped missing target'),
+ A_C_new_path : Item(verb='Skipped missing target')})
+
+ svntest.actions.run_and_verify_patch(wc_dir,
+ patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+def patch_remove_empty_dirs(sbox):
+ "patch deleting all children of a directory"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Contents of B:
+ # A/B/lamba
+ # A/B/F
+ # A/B/E/{alpha,beta}
+ # Before patching we've deleted F, which means that B is empty after patching and
+ # should be removed.
+ #
+ # Contents of H:
+ # A/D/H/{chi,psi,omega}
+ # Before patching, chi has been removed by a non-svn operation which means it has
+ # status missing. The patch deletes the other two files but should not delete H.
+
+ unidiff_patch = [
+ "Index: psi\n",
+ "===================================================================\n",
+ "--- A/D/H/psi\t(revision 0)\n",
+ "+++ A/D/H/psi\t(revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'psi'.\n",
+ "Index: omega\n",
+ "===================================================================\n",
+ "--- A/D/H/omega\t(revision 0)\n",
+ "+++ A/D/H/omega\t(revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'omega'.\n",
+ "Index: lambda\n",
+ "===================================================================\n",
+ "--- A/B/lambda\t(revision 0)\n",
+ "+++ A/B/lambda\t(revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'lambda'.\n",
+ "Index: alpha\n",
+ "===================================================================\n",
+ "--- A/B/E/alpha\t(revision 0)\n",
+ "+++ A/B/E/alpha\t(revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'alpha'.\n",
+ "Index: beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta\t(revision 0)\n",
+ "+++ A/B/E/beta\t(revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ F_path = sbox.ospath('A/B/F')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', F_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # We should be able to handle one path beeing missing.
+ os.remove(sbox.ospath('A/D/H/chi'))
+
+ expected_output = [
+ 'D %s\n' % sbox.ospath('A/D/H/psi'),
+ 'D %s\n' % sbox.ospath('A/D/H/omega'),
+ 'D %s\n' % sbox.ospath('A/B/lambda'),
+ 'D %s\n' % sbox.ospath('A/B/E/alpha'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ 'D %s\n' % sbox.ospath('A/B/E'),
+ 'D %s\n' % sbox.ospath('A/B'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/H/chi',
+ 'A/D/H/psi',
+ 'A/D/H/omega',
+ 'A/B/lambda',
+ 'A/B',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta',
+ 'A/B/F')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H/chi', status='! ')
+ expected_status.tweak('A/D/H/omega', 'A/D/H/psi', 'A/B', 'A/B/E',
+ 'A/B/E/beta', 'A/B/E/alpha', 'A/B/lambda',
+ 'A/B/F', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir,
+ patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+
+def patch_reject(sbox):
+ "patch which is rejected"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Set gamma contents
+ gamma_contents = "Hello there! I'm the file 'gamma'.\n"
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.file_write(gamma_path, gamma_contents)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is really the file 'gamma'.\n",
+ "+It is really the file 'gamma'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/D/gamma'),
+ '> rejected hunk @@ -1,1 +1,1 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+
+ reject_file_contents = [
+ "--- A/D/gamma\n",
+ "+++ A/D/gamma\n",
+ "@@ -1,1 +1,1 @@\n",
+ "-This is really the file 'gamma'.\n",
+ "+It is really the file 'gamma'.\n",
+ ]
+ expected_disk.add({'A/D/gamma.svnpatch.rej' :
+ Item(contents=''.join(reject_file_contents))})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+ # ### not yet
+ #expected_status.tweak('A/D/gamma', status='C ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_keywords(sbox):
+ "patch containing keywords"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Set gamma contents
+ gamma_contents = "$Rev$\nHello there! I'm the file 'gamma'.\n"
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.file_write(gamma_path, gamma_contents)
+ # Expand the keyword
+ svntest.main.run_svn(None, 'propset', 'svn:keywords', 'Rev',
+ sbox.ospath('A/D/gamma'))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma (revision 3)\n",
+ "+++ A/D/gamma (working copy)\n",
+ "@@ -1,2 +1,3 @@\n",
+ " $Rev$\n",
+ " Hello there! I'm the file 'gamma'.\n",
+ "+booo\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ gamma_contents = "$Rev: 2 $\nHello there! I'm the file 'gamma'.\nbooo\n"
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents,
+ props={'svn:keywords' : 'Rev'})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_with_fuzz(sbox):
+ "patch with fuzz"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ mu_path = sbox.ospath('A/mu')
+
+ # We have replaced a couple of lines to cause fuzz. Those lines contains
+ # the word fuzz
+ mu_contents = [
+ "Line replaced for fuzz = 1\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "Line replaced for fuzz = 2 with only the second context line changed\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "This line is inserted to cause an offset of +1\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Line replaced for fuzz = 2\n",
+ "Line replaced for fuzz = 2\n",
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ unidiff_patch = [
+ "Index: mu\n",
+ "===================================================================\n",
+ "--- A/mu\t(revision 0)\n",
+ "+++ A/mu\t(revision 0)\n",
+ "@@ -1,6 +1,7 @@\n",
+ " Dear internet user,\n",
+ " \n",
+ " We wish to congratulate you over your email success in our computer\n",
+ "+A new line here\n",
+ " Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ " in which email addresses were used. All participants were selected\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ "@@ -7,7 +8,9 @@\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ "+Another new line\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "+A third new line\n",
+ " file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ "@@ -19,6 +20,7 @@\n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "+A fourth new line\n",
+ " \n",
+ " Again, we wish to congratulate you over your email success in our\n"
+ " computer Balloting. [No trailing newline here]"
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ mu_contents = [
+ "Line replaced for fuzz = 1\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "A new line here\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "Line replaced for fuzz = 2 with only the second context line changed\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "Another new line\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "A third new line\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "This line is inserted to cause an offset of +1\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "A fourth new line\n",
+ "\n",
+ "Line replaced for fuzz = 2\n",
+ "Line replaced for fuzz = 2\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -1,6 +1,7 @@ with fuzz 1\n',
+ '> applied hunk @@ -7,7 +8,9 @@ with fuzz 2\n',
+ '> applied hunk @@ -19,6 +20,7 @@ with offset 1 and fuzz 2\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_reverse(sbox):
+ "patch in reverse"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "+This is the file 'gamma'.\n",
+ "-It is the file 'gamma'.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1,2 +1 @@\n",
+ " This is the file 'iota'.\n",
+ "-Some more bytes\n",
+ "\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new (revision 0)\n",
+ "+++ new (revision 0)\n",
+ "@@ -1 +0,0 @@\n",
+ "-new\n",
+ "\n",
+ "--- A/mu 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu.orig 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,9 +6,6 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "-It is a promotional program aimed at encouraging internet users;\n",
+ "-therefore you do not need to buy ticket to enter for it.\n",
+ "-\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -17,8 +14,11 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "+and PROMOTION DATE: 13th June. 2009\n",
+ "-and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "+\n",
+ "+Again, we wish to congratulate you over your email success in our\n",
+ "+computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (working copy)\n",
+ "+++ A/B/E/beta (revision 1)\n",
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ 'U %s\n' % sbox.ospath('iota'),
+ 'A %s\n' % sbox.ospath('new'),
+ 'U %s\n' % sbox.ospath('A/mu'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--reverse-diff')
+
+def patch_no_svn_eol_style(sbox):
+ "patch target with no svn:eol-style"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ eols = [crlf, '\015', '\n', '\012']
+ for target_eol in eols:
+ for patch_eol in eols:
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ target_eol,
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ target_eol,
+ "in which email addresses were used. All participants were selected",
+ target_eol,
+ "through a computer ballot system drawn from over 100,000 company",
+ target_eol,
+ "and 50,000,000 individual email addresses from all over the world.",
+ target_eol,
+ "It is a promotional program aimed at encouraging internet users;",
+ target_eol,
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+
+ unidiff_patch = [
+ "Index: mu",
+ patch_eol,
+ "===================================================================",
+ patch_eol,
+ "--- A/mu\t(revision 0)",
+ patch_eol,
+ "+++ A/mu\t(revision 0)",
+ patch_eol,
+ "@@ -1,5 +1,6 @@",
+ patch_eol,
+ " We wish to congratulate you over your email success in our computer",
+ patch_eol,
+ " Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ patch_eol,
+ "+A new line here",
+ patch_eol,
+ " in which email addresses were used. All participants were selected",
+ patch_eol,
+ " through a computer ballot system drawn from over 100,000 company",
+ patch_eol,
+ " and 50,000,000 individual email addresses from all over the world.",
+ patch_eol,
+ ]
+
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ patch_eol,
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ patch_eol,
+ "A new line here",
+ patch_eol,
+ "in which email addresses were used. All participants were selected",
+ patch_eol,
+ "through a computer ballot system drawn from over 100,000 company",
+ patch_eol,
+ "and 50,000,000 individual email addresses from all over the world.",
+ patch_eol,
+ "It is a promotional program aimed at encouraging internet users;",
+ target_eol,
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=1)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch2(wc_dir,
+ patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True, keep_eol_style)
+
+ expected_output = ["Reverted '" + mu_path + "'\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '-R', wc_dir)
+
+def patch_with_svn_eol_style(sbox):
+ "patch target with svn:eol-style"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ eols = [crlf, '\015', '\n', '\012']
+ eol_styles = ['CRLF', 'CR', 'native', 'LF']
+ rev = 1
+ for target_eol, target_eol_style in zip(eols, eol_styles):
+ for patch_eol in eols:
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ target_eol,
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ target_eol,
+ "in which email addresses were used. All participants were selected",
+ target_eol,
+ "through a computer ballot system drawn from over 100,000 company",
+ target_eol,
+ "and 50,000,000 individual email addresses from all over the world.",
+ target_eol,
+ "It is a promotional program aimed at encouraging internet users;",
+ target_eol,
+ ]
+
+ # Set mu contents
+ svntest.main.run_svn(None, 'rm', mu_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'delete mu', mu_path)
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ svntest.main.run_svn(None, 'add', mu_path)
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', target_eol_style,
+ mu_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'set eol-style', mu_path)
+
+ unidiff_patch = [
+ "Index: mu",
+ patch_eol,
+ "===================================================================",
+ patch_eol,
+ "--- A/mu\t(revision 0)",
+ patch_eol,
+ "+++ A/mu\t(revision 0)",
+ patch_eol,
+ "@@ -1,5 +1,6 @@",
+ patch_eol,
+ " We wish to congratulate you over your email success in our computer",
+ patch_eol,
+ " Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ patch_eol,
+ "+A new line here",
+ patch_eol,
+ " in which email addresses were used. All participants were selected",
+ patch_eol,
+ " through a computer ballot system drawn from over 100,000 company",
+ patch_eol,
+ " and 50,000,000 individual email addresses from all over the world.",
+ patch_eol,
+ ]
+
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ target_eol,
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ target_eol,
+ "A new line here",
+ target_eol,
+ "in which email addresses were used. All participants were selected",
+ target_eol,
+ "through a computer ballot system drawn from over 100,000 company",
+ target_eol,
+ "and 50,000,000 individual email addresses from all over the world.",
+ target_eol,
+ "It is a promotional program aimed at encouraging internet users;",
+ target_eol,
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents),
+ props={'svn:eol-style' : target_eol_style})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ rev += 2
+ expected_status.tweak('A/mu', status='M ', wc_rev=rev)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch2(wc_dir,
+ patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ keep_eol_style) # keep-eol-style
+
+ expected_output = ["Reverted '" + mu_path + "'\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir)
+
+def patch_with_svn_eol_style_uncommitted(sbox):
+ "patch target with uncommitted svn:eol-style"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ eols = [crlf, '\015', '\n', '\012']
+ eol_styles = ['CRLF', 'CR', 'native', 'LF']
+ for target_eol, target_eol_style in zip(eols, eol_styles):
+ for patch_eol in eols:
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ '\n',
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ '\n',
+ "in which email addresses were used. All participants were selected",
+ '\n',
+ "through a computer ballot system drawn from over 100,000 company",
+ '\n',
+ "and 50,000,000 individual email addresses from all over the world.",
+ '\n',
+ "It is a promotional program aimed at encouraging internet users;",
+ '\n',
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', target_eol_style,
+ mu_path)
+
+ unidiff_patch = [
+ "Index: mu",
+ patch_eol,
+ "===================================================================",
+ patch_eol,
+ "--- A/mu\t(revision 0)",
+ patch_eol,
+ "+++ A/mu\t(revision 0)",
+ patch_eol,
+ "@@ -1,5 +1,6 @@",
+ patch_eol,
+ " We wish to congratulate you over your email success in our computer",
+ patch_eol,
+ " Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ patch_eol,
+ "+A new line here",
+ patch_eol,
+ " in which email addresses were used. All participants were selected",
+ patch_eol,
+ " through a computer ballot system drawn from over 100,000 company",
+ patch_eol,
+ " and 50,000,000 individual email addresses from all over the world.",
+ patch_eol,
+ ]
+
+ mu_contents = [
+ "We wish to congratulate you over your email success in our computer",
+ target_eol,
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw",
+ target_eol,
+ "A new line here",
+ target_eol,
+ "in which email addresses were used. All participants were selected",
+ target_eol,
+ "through a computer ballot system drawn from over 100,000 company",
+ target_eol,
+ "and 50,000,000 individual email addresses from all over the world.",
+ target_eol,
+ "It is a promotional program aimed at encouraging internet users;",
+ target_eol,
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents),
+ props={'svn:eol-style' : target_eol_style})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='MM', wc_rev=1)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch2(wc_dir,
+ patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ keep_eol_style) # keep-eol-style
+
+ expected_output = ["Reverted '" + mu_path + "'\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir)
+
+def patch_with_ignore_whitespace(sbox):
+ "ignore whitespace when patching"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company \n",
+ "and 50,000,000\t\tindividual email addresses from all over the world. \n",
+ " \n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch with leading and trailing spaces removed and tabs transformed
+ # to spaces. The patch should match and the hunks should be written to the
+ # target as-is.
+
+ unidiff_patch = [
+ "Index: A/mu\n",
+ "===================================================================\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ "BATCH NUMBERS :\n",
+ "EULO/1007/444/606/08;\n",
+ "SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ "BATCH NUMBERS :\n",
+ "EULO/1007/444/606/08;\n",
+ "SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ "--ignore-whitespace",)
+
+def patch_replace_locally_deleted_file(sbox):
+ "patch that replaces a locally deleted file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Locally delete mu
+ svntest.main.run_svn(None, 'rm', mu_path)
+
+ # Apply patch that re-creates mu
+
+ unidiff_patch = [
+ "===================================================================\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ mu_contents = "new\n"
+
+ expected_output = [
+ 'A %s\n' % mu_path,
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='R ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+# Regression test for #3643
+def patch_no_eol_at_eof(sbox):
+ "patch with no eol at eof"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+
+ iota_contents = [
+ "One line\n",
+ "Another line\n",
+ "A third line \n",
+ "This is the file 'iota'.\n",
+ "A line after\n",
+ "Another line after\n",
+ "The last line with missing eol",
+ ]
+
+ svntest.main.file_write(iota_path, ''.join(iota_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ unidiff_patch = [
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1,7 +1,7 @@\n",
+ " One line\n",
+ " Another line\n",
+ " A third line \n",
+ "-This is the file 'iota'.\n",
+ "+It is the file 'iota'.\n",
+ " A line after\n",
+ " Another line after\n",
+ " The last line with missing eol\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ iota_contents = [
+ "One line\n",
+ "Another line\n",
+ "A third line \n",
+ "It is the file 'iota'.\n",
+ "A line after\n",
+ "Another line after\n",
+ "The last line with missing eol\n",
+ ]
+ expected_output = [
+ 'U %s\n' % sbox.ospath('iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=''.join(iota_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_with_properties(sbox):
+ "patch with properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+
+ modified_prop_contents = "This is the property 'modified'.\n"
+ deleted_prop_contents = "This is the property 'deleted'.\n"
+
+ # Set iota prop contents
+ svntest.main.run_svn(None, 'propset', 'modified', modified_prop_contents,
+ iota_path)
+ svntest.main.run_svn(None, 'propset', 'deleted', deleted_prop_contents,
+ iota_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "Property changes on: iota\n",
+ "-------------------------------------------------------------------\n",
+ "Modified: modified\n",
+ "## -1 +1 ##\n",
+ "-This is the property 'modified'.\n",
+ "+The property 'modified' has changed.\n",
+ "Added: added\n",
+ "## -0,0 +1 ##\n",
+ "+This is the property 'added'.\n",
+ "Deleted: deleted\n",
+ "## -1 +0,0 ##\n",
+ "-This is the property 'deleted'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch), 'wb')
+
+ modified_prop_contents = "The property 'modified' has changed.\n"
+ added_prop_contents = "This is the property 'added'.\n"
+
+ expected_output = [
+ ' U %s\n' % sbox.ospath('iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props={'modified' : modified_prop_contents,
+ 'added' : added_prop_contents})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' M', wc_rev='2')
+
+ expected_skip = wc.State(wc_dir, { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+ # And repeat
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' G')
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Reverse
+ expected_output.tweak('iota', status=' U')
+ expected_status.tweak('iota', status=' ')
+ expected_disk.tweak('iota',
+ props={'deleted': "This is the property 'deleted'.\n",
+ 'modified': "This is the property 'modified'.\n"})
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Repeat
+ expected_output.tweak('iota', status=' G')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And now try against a not existing target
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', sbox.ospath('iota'))
+ expected_output.remove('iota')
+ expected_disk.remove('iota')
+ expected_status.tweak('iota', status='D ')
+ expected_skip.add({
+ 'iota' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+
+def patch_same_twice(sbox):
+ "apply the same patch twice"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+ beta_path = sbox.ospath('A/B/E/beta')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ "\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new (revision 0)\n",
+ "+++ new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/D/gamma'),
+ 'U %s\n' % sbox.ospath('iota'),
+ 'A %s\n' % sbox.ospath('new'),
+ 'U %s\n' % sbox.ospath('A/mu'),
+ 'D %s\n' % beta_path,
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+ # apply the patch again
+ expected_output = [
+ 'G %s\n' % sbox.ospath('A/D/gamma'),
+ '> hunk @@ -1,1 +1,1 @@ already applied\n',
+ 'G %s\n' % sbox.ospath('iota'),
+ # The iota patch inserts a line after the first line in the file,
+ # with no trailing context. Originally, Subversion applied this patch
+ # multiple times, which matched the behaviour of Larry Wall's patch
+ # implementation.
+ '> hunk @@ -1,1 +1,2 @@ already applied\n',
+ 'G %s\n' % sbox.ospath('new'),
+ '> hunk @@ -0,0 +1,1 @@ already applied\n',
+ 'G %s\n' % sbox.ospath('A/mu'),
+ '> hunk @@ -6,6 +6,9 @@ already applied\n',
+ '> hunk @@ -14,11 +17,8 @@ already applied\n',
+ 'G %s\n' % sbox.ospath('A/B/E/beta'),
+ '> hunk @@ -1,1 +0,0 @@ already applied\n',
+ ]
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_dir_properties(sbox):
+ "patch with dir properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ B_path = sbox.ospath('A/B')
+
+ modified_prop_contents = "This is the property 'modified'.\n"
+ deleted_prop_contents = "This is the property 'deleted'.\n"
+
+ # Set the properties
+ svntest.main.run_svn(None, 'propset', 'modified', modified_prop_contents,
+ wc_dir)
+ svntest.main.run_svn(None, 'propset', 'deleted', deleted_prop_contents,
+ B_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ '.' : Item(verb='Sending'),
+ 'A/B' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', wc_rev=2)
+ expected_status.tweak('A/B', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: .\n",
+ "===================================================================\n",
+ "--- .\t(revision 1)\n",
+ "+++ .\t(working copy)\n",
+ "\n",
+ "Property changes on: .\n",
+ "-------------------------------------------------------------------\n",
+ "Modified: modified\n",
+ "## -1 +1 ##\n",
+ "-This is the property 'modified'.\n",
+ "+The property 'modified' has changed.\n",
+ "Added: svn:ignore\n",
+ "## -0,0 +1,3 ##\n",
+ "+*.o\n",
+ "+.libs\n",
+ "+*.lo\n",
+ "Index: A/B\n",
+ "===================================================================\n",
+ "--- A/B\t(revision 1)\n",
+ "+++ A/B\t(working copy)\n",
+ "\n",
+ "Property changes on: A/B\n",
+ "-------------------------------------------------------------------\n",
+ "Deleted: deleted\n",
+ "## -1 +0,0 ##\n",
+ "-This is the property 'deleted'.\n",
+ "Added: svn:executable\n",
+ "## -0,0 +1 ##\n",
+ "+*\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ modified_prop_contents = "The property 'modified' has changed.\n"
+ ignore_prop_contents = "*.o\n.libs\n*.lo\n"
+
+ expected_output = [
+ ' U %s\n' % wc_dir,
+ ' C %s\n' % sbox.ospath('A/B'),
+ '> rejected hunk ## -0,0 +1,1 ## (svn:executable)\n',
+ ] + svntest.main.summary_of_conflicts(prop_conflicts=1)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ '' : Item(props={'modified' : modified_prop_contents,
+ 'svn:ignore' : ignore_prop_contents}),
+ 'A/B.svnpatch.rej' : Item(contents="--- A/B\n+++ A/B\n" +
+ "Property: svn:executable\n" +
+ "## -0,0 +1,1 ##\n+*\n"),
+ })
+ expected_disk.tweak('A/B', props={})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', status=' M', wc_rev=2)
+ expected_status.tweak('A/B', status=' M', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_add_path_with_props(sbox):
+ "patch that adds paths with props"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+
+ # Apply patch that adds two files, one of which is empty.
+ # Both files have properties.
+
+ unidiff_patch = [
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new\t(revision 0)\n",
+ "+++ new\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'new'\n",
+ "\n",
+ "Property changes on: new\n",
+ "-------------------------------------------------------------------\n",
+ "Added: added\n",
+ "## -0,0 +1 ##\n",
+ "+This is the property 'added'.\n",
+ "Index: X\n",
+ "===================================================================\n",
+ "--- X\t(revision 0)\n",
+ "+++ X\t(working copy)\n",
+ "\n",
+ "Property changes on: X\n",
+ "-------------------------------------------------------------------\n",
+ "Added: added\n",
+ "## -0,0 +1 ##\n",
+ "+This is the property 'added'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ added_prop_contents = "This is the property 'added'.\n"
+
+ expected_output = [
+ 'A %s\n' % sbox.ospath('new'),
+ 'A %s\n' % sbox.ospath('X'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'new': Item(contents="This is the file 'new'\n",
+ props={'added' : added_prop_contents})})
+ expected_disk.add({'X': Item(contents="",
+ props={'added' : added_prop_contents})})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'new': Item(status='A ', wc_rev='0')})
+ expected_status.add({'X': Item(status='A ', wc_rev='0')})
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_prop_offset(sbox):
+ "property patch with offset searching"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = os.path.abspath(sbox.get_tempname('my.patch'))
+ iota_path = sbox.ospath('iota')
+
+ prop1_content = ''.join([
+ "Dear internet user,\n",
+ # The missing line here will cause the first hunk to match early
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n",
+ ])
+
+ # prop2's content will make both a late and early match possible.
+ # The hunk to be applied is replicated here for reference:
+ # ## -5,6 +5,7 ##
+ # property
+ # property
+ # property
+ # +x
+ # property
+ # property
+ # property
+ #
+ # This hunk wants to be applied at line 5, but that isn't
+ # possible because line 8 ("zzz") does not match "property".
+ # The early match happens at line 2 (offset 3 = 5 - 2).
+ # The late match happens at line 9 (offset 4 = 9 - 5).
+ # Subversion will pick the early match in this case because it
+ # is closer to line 5.
+ prop2_content = ''.join([
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "zzz\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n"
+ ])
+
+ # Set iota prop contents
+ svntest.main.run_svn(None, 'propset', 'prop1', prop1_content,
+ iota_path)
+ svntest.main.run_svn(None, 'propset', 'prop2', prop2_content,
+ iota_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota (revision XYZ)\n",
+ "+++ iota (working copy)\n",
+ "\n",
+ "Property changes on: iota\n",
+ "-------------------------------------------------------------------\n",
+ "Modified: prop1\n",
+ "## -6,6 +6,9 ##\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "## -14,11 +17,8 ##\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Modified: prop2\n",
+ "## -5,6 +5,7 ##\n",
+ " property\n",
+ " property\n",
+ " property\n",
+ "+x\n",
+ " property\n",
+ " property\n",
+ " property\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ prop1_content = ''.join([
+ "Dear internet user,\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ "These extra lines will cause the second hunk to match late\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ])
+
+ prop2_content = ''.join([
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "x\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "zzz\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ "property\n",
+ ])
+
+ os.chdir(wc_dir)
+
+ # Changing two properties so output order not well defined.
+ expected_output = svntest.verify.UnorderedOutput([
+ ' U iota\n',
+ '> applied hunk ## -6,6 +6,9 ## with offset -1 (prop1)\n',
+ '> applied hunk ## -14,11 +17,8 ## with offset 4 (prop1)\n',
+ '> applied hunk ## -5,6 +5,7 ## with offset -3 (prop2)\n',
+ ])
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props = {'prop1' : prop1_content,
+ 'prop2' : prop2_content})
+
+ expected_status = svntest.actions.get_virginal_state('', 1)
+ expected_status.tweak('iota', status=' M', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch('', patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_prop_with_fuzz(sbox):
+ "property patch with fuzz"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ mu_path = sbox.ospath('A/mu')
+
+ # We have replaced a couple of lines to cause fuzz. Those lines contains
+ # the word fuzz
+ prop_contents = ''.join([
+ "Line replaced for fuzz = 1\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "Line replaced for fuzz = 2 with only the second context line changed\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "This line is inserted to cause an offset of +1\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Line replaced for fuzz = 2\n",
+ "Line replaced for fuzz = 2\n",
+ ])
+
+ # Set mu prop contents
+ svntest.main.run_svn(None, 'propset', 'prop', prop_contents,
+ mu_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ unidiff_patch = [
+ "Index: mu\n",
+ "===================================================================\n",
+ "--- A/mu\t(revision 0)\n",
+ "+++ A/mu\t(revision 0)\n",
+ "\n",
+ "Property changes on: mu\n",
+ "Modified: prop\n",
+ "## -1,6 +1,7 ##\n",
+ " Dear internet user,\n",
+ " \n",
+ " We wish to congratulate you over your email success in our computer\n",
+ "+A new line here\n",
+ " Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ " in which email addresses were used. All participants were selected\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ "## -7,7 +8,9 ##\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ "+Another new line\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "+A third new line\n",
+ " file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ "## -19,6 +20,7 ##\n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "+A fourth new line\n",
+ " \n",
+ " Again, we wish to congratulate you over your email success in our\n"
+ " computer Balloting. [No trailing newline here]"
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ prop_contents = ''.join([
+ "Line replaced for fuzz = 1\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "A new line here\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "Line replaced for fuzz = 2 with only the second context line changed\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "Another new line\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "A third new line\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "This line is inserted to cause an offset of +1\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "A fourth new line\n",
+ "\n",
+ "Line replaced for fuzz = 2\n",
+ "Line replaced for fuzz = 2\n",
+ ])
+
+ expected_output = [
+ ' U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk ## -1,6 +1,7 ## with fuzz 1 (prop)\n',
+ '> applied hunk ## -7,7 +8,9 ## with fuzz 2 (prop)\n',
+ '> applied hunk ## -19,6 +20,7 ## with offset 1 and fuzz 2 (prop)\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', props = {'prop' : prop_contents})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' M', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_git_empty_files(sbox):
+ "patch that contains empty files"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ new_path = sbox.ospath('new')
+
+ unidiff_patch = [
+ "Index: new\n",
+ "===================================================================\n",
+ "diff --git a/new b/new\n",
+ "new file mode 100644\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "diff --git a/iota b/iota\n",
+ "deleted file mode 100644\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'A %s\n' % sbox.ospath('new'),
+ 'D %s\n' % sbox.ospath('iota'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'new' : Item(contents="")})
+ expected_disk.remove('iota')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('iota', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_old_target_names(sbox):
+ "patch using old target names"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "--- A/mu 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu.new 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_reverse_revert(sbox):
+ "revert a patch by reverse patching"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents_pre_patch = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents_pre_patch), 'wb')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file 'gamma'.\n",
+ "+It is the file 'gamma'.\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes\n",
+ "\n",
+ "Index: new\n",
+ "===================================================================\n",
+ "--- new (revision 0)\n",
+ "+++ new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch), 'wb')
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\nSome more bytes\n"
+ new_contents = "new\n"
+ mu_contents_post_patch = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='U '),
+ 'iota' : Item(status='U '),
+ 'new' : Item(status='A '),
+ 'A/mu' : Item(status='U '),
+ 'A/B/E/beta' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma', contents=gamma_contents)
+ expected_disk.tweak('iota', contents=iota_contents)
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents_post_patch))
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/gamma', status='M ')
+ expected_status.tweak('iota', status='M ')
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State(wc_dir, { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Try again
+ expected_output.tweak(status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Applying the same patch in reverse should undo local mods
+ expected_output = wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='U '),
+ 'iota' : Item(status='U '),
+ 'new' : Item(status='D '),
+ 'A/mu' : Item(status='U '),
+ 'A/B/E/beta' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents_pre_patch))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ ### svn patch should check whether the deleted file has the same
+ ### content as the file added by the patch and revert the deletion
+ ### instead of causing a replacement.
+ expected_status.tweak('A/B/E/beta', status='R ')
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And again
+ expected_output.tweak(status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+def patch_one_property(sbox, trailing_eol):
+ """Helper. Apply a patch that sets the property 'k' to 'v\n' or to 'v',
+ and check the results."""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: .\n",
+ "===================================================================\n",
+ "diff --git a/subversion/branches/1.6.x b/subversion/branches/1.6.x\n",
+ "--- a/subversion/branches/1.6.x\t(revision 1033278)\n",
+ "+++ b/subversion/branches/1.6.x\t(working copy)\n",
+ "\n",
+ "Property changes on: subversion/branches/1.6.x\n",
+ "___________________________________________________________________\n",
+ "Modified: svn:mergeinfo\n",
+ " Merged /subversion/trunk:r964349\n",
+ "Added: k\n",
+ "## -0,0 +1 ##\n",
+ "+v\n",
+ ]
+
+ if trailing_eol:
+ value = "v\n"
+ else:
+ value = "v"
+ unidiff_patch += ['\ No newline at end of property\n']
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch), 'wb')
+
+ expected_output = [
+ ' U %s\n' % os.path.join(wc_dir),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'': Item(props={'k' : value})})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', status=' M')
+
+ expected_skip = wc.State('.', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--strip', '3')
+
+ svntest.actions.check_prop('k', wc_dir, [value.encode()])
+
+def patch_strip_cwd(sbox):
+ "patch --strip propchanges cwd"
+ return patch_one_property(sbox, True)
+
+@Issue(3814)
+def patch_set_prop_no_eol(sbox):
+ "patch doesn't append newline to properties"
+ return patch_one_property(sbox, False)
+
+# Regression test for issue #3697
+@Issue(3697)
+def patch_add_symlink(sbox):
+ "patch that adds a symlink"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Apply patch
+
+ unidiff_patch = [
+ "Index: iota_symlink\n",
+ "===================================================================\n",
+ "--- iota_symlink\t(revision 0)\n",
+ "+++ iota_symlink\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+link iota\n",
+ "\\ No newline at end of file\n"
+ "\n",
+ "Property changes on: iota_symlink\n",
+ "-------------------------------------------------------------------\n",
+ "Added: svn:special\n",
+ "## -0,0 +1 ##\n",
+ "+*\n",
+ "\\ No newline at end of property\n"
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota_symlink' : Item(status='A ')
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'iota_symlink': Item(contents="This is the file 'iota'.\n",
+ props={'svn:special' : '*'})})
+ if not svntest.main.is_posix_os():
+ expected_disk.tweak('iota_symlink', contents='link iota')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'iota_symlink': Item(status='A ', wc_rev='0')})
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # And again
+ expected_output.tweak('iota_symlink', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True)
+
+ # Reverse
+ expected_output.tweak('iota_symlink', status='D ')
+ expected_disk.remove('iota_symlink')
+ expected_status.remove('iota_symlink')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And again
+ expected_output.tweak('iota_symlink', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+def patch_moved_away(sbox):
+ "patch a file that was moved away"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ mu_path = sbox.ospath('A/mu')
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 13th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ "\n",
+ "Again, we wish to congratulate you over your email success in our\n"
+ "computer Balloting.\n"
+ ]
+
+ # Set mu contents
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Move mu away
+ sbox.simple_move("A/mu", "A/mu2")
+
+ # Apply patch
+ unidiff_patch = [
+ "--- A/mu.orig 2009-06-24 15:23:55.000000000 +0100\n",
+ "+++ A/mu 2009-06-24 15:21:23.000000000 +0100\n",
+ "@@ -6,6 +6,9 @@\n",
+ " through a computer ballot system drawn from over 100,000 company\n",
+ " and 50,000,000 individual email addresses from all over the world.\n",
+ " \n",
+ "+It is a promotional program aimed at encouraging internet users;\n",
+ "+therefore you do not need to buy ticket to enter for it.\n",
+ "+\n",
+ " Your email address drew and have won the sum of 750,000 Euros\n",
+ " ( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ " file with\n",
+ "@@ -14,11 +17,8 @@\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "-and PROMOTION DATE: 13th June. 2009\n",
+ "+and PROMOTION DATE: 14th June. 2009\n",
+ " \n",
+ " To claim your winning prize, you are to contact the appointed\n",
+ " agent below as soon as possible for the immediate release of your\n",
+ " winnings with the below details.\n",
+ "-\n",
+ "-Again, we wish to congratulate you over your email success in our\n",
+ "-computer Balloting.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ mu_contents = [
+ "Dear internet user,\n",
+ "\n",
+ "We wish to congratulate you over your email success in our computer\n",
+ "Balloting. This is a Millennium Scientific Electronic Computer Draw\n",
+ "in which email addresses were used. All participants were selected\n",
+ "through a computer ballot system drawn from over 100,000 company\n",
+ "and 50,000,000 individual email addresses from all over the world.\n",
+ "\n",
+ "It is a promotional program aimed at encouraging internet users;\n",
+ "therefore you do not need to buy ticket to enter for it.\n",
+ "\n",
+ "Your email address drew and have won the sum of 750,000 Euros\n",
+ "( Seven Hundred and Fifty Thousand Euros) in cash credited to\n",
+ "file with\n",
+ " REFERENCE NUMBER: ESP/WIN/008/05/10/MA;\n",
+ " WINNING NUMBER : 14-17-24-34-37-45-16\n",
+ " BATCH NUMBERS :\n",
+ " EULO/1007/444/606/08;\n",
+ " SERIAL NUMBER: 45327\n",
+ "and PROMOTION DATE: 14th June. 2009\n",
+ "\n",
+ "To claim your winning prize, you are to contact the appointed\n",
+ "agent below as soon as possible for the immediate release of your\n",
+ "winnings with the below details.\n",
+ ]
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu2'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/mu2': Item(contents=''.join(mu_contents))})
+ expected_disk.remove('A/mu')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/mu2' : Item(status='A ', copied='+', wc_rev='-', moved_from='A/mu'),
+ })
+
+ expected_status.tweak('A/mu', status='D ', wc_rev=2, moved_to='A/mu2')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+@Issue(3991)
+def patch_lacking_trailing_eol(sbox):
+ "patch file lacking trailing eol"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+
+ # Prepare
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Apply patch
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ # TODO: -1 +1
+ "@@ -1 +1,2 @@\n",
+ " This is the file 'iota'.\n",
+ "+Some more bytes", # No trailing \n on this line!
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ gamma_contents = "It is the file 'gamma'.\n"
+ iota_contents = "This is the file 'iota'.\n"
+ new_contents = "new\n"
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('iota'),
+ ]
+
+ # Expect a newline to be appended
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=iota_contents + "Some more bytes\n")
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='M ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+@Issue(4003)
+def patch_deletes_prop(sbox):
+ "patch deletes prop, directly and via reversed add"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+
+ svntest.main.run_svn(None, 'propset', 'propname', 'propvalue',
+ iota_path)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Apply patch
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "\n",
+ "Property changes on: iota\n",
+ "___________________________________________________________________\n",
+ "Deleted: propname\n",
+ "## -1 +0,0 ##\n",
+ "-propvalue\n",
+ ]
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ # Expect the original state of the working copy in r1, exception
+ # that iota is at r2 now.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' M')
+ expected_status.tweak('iota', wc_rev=2)
+ expected_skip = wc.State('', { })
+ expected_output = [
+ ' U %s\n' % sbox.ospath('iota'),
+ ]
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+ # Revert any local mods, then try to reverse-apply a patch which
+ # *adds* the property.
+ svntest.main.run_svn(None, 'revert', iota_path)
+
+ # Apply patch
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "\n",
+ "Property changes on: iota\n",
+ "___________________________________________________________________\n",
+ "Added: propname\n",
+ "## -0,0 +1 ##\n",
+ "+propvalue\n",
+ ]
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--reverse-diff')
+
+@Issue(4004)
+def patch_reversed_add_with_props(sbox):
+ "reverse patch new file+props atop uncommitted"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Add a new file which also has props set on it.
+ newfile_path = sbox.ospath('newfile')
+ newfile_contents = ["This is the file 'newfile'.\n"]
+ svntest.main.file_write(newfile_path, ''.join(newfile_contents))
+ svntest.main.run_svn(None, 'add', newfile_path)
+ svntest.main.run_svn(None, 'propset', 'propname', 'propvalue',
+ newfile_path)
+
+ # Generate a patch file from our current diff (rooted at the working
+ # copy root).
+ cwd = os.getcwd()
+ try:
+ os.chdir(wc_dir)
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff')
+ finally:
+ os.chdir(cwd)
+ svntest.main.file_write(patch_file_path, ''.join(diff_output))
+
+ # Okay, now commit up.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ })
+
+ # Now, we'll try to reverse-apply the very diff we just created. We
+ # expect the original state of the working copy in r1.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_skip = wc.State('', { })
+ expected_output = [
+ 'D %s\n' % newfile_path,
+ ]
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--reverse-diff')
+
+@Issue(4004)
+def patch_reversed_add_with_props2(sbox):
+ "reverse patch new file+props"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Add a new file which also has props set on it.
+ newfile_path = sbox.ospath('newfile')
+ newfile_contents = ["This is the file 'newfile'.\n"]
+ svntest.main.file_write(newfile_path, ''.join(newfile_contents))
+ svntest.main.run_svn(None, 'add', newfile_path)
+ svntest.main.run_svn(None, 'propset', 'propname', 'propvalue',
+ newfile_path)
+
+ # Generate a patch file from our current diff (rooted at the working
+ # copy root).
+ cwd = os.getcwd()
+ try:
+ os.chdir(wc_dir)
+ exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff')
+ finally:
+ os.chdir(cwd)
+ svntest.main.file_write(patch_file_path, ''.join(diff_output))
+
+ # Okay, now commit up.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'newfile' : Item(wc_rev=2, status=' ')})
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now, we'll try to reverse-apply the very diff we just created. We
+ # expect the original state of the working copy in r1 plus 'newfile'
+ # scheduled for deletion.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'newfile' : Item(status='D ', wc_rev=2)})
+ expected_skip = wc.State('', { })
+ expected_output = [
+ 'D %s\n' % newfile_path,
+ ]
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1, # dry-run
+ '--reverse-diff')
+
+def patch_dev_null(sbox):
+ "patch with /dev/null filenames"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Git (and maybe other tools) use '/dev/null' as the old path for
+ # newly added files, and as the new path for deleted files.
+ # The path selection algorithm in 'svn patch' must detect this and
+ # avoid using '/dev/null' as a patch target.
+ unidiff_patch = [
+ "Index: new\n",
+ "===================================================================\n",
+ "--- /dev/null\n",
+ "+++ new (revision 0)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new\n",
+ "\n",
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ /dev/null\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ new_contents = "new\n"
+ expected_output = [
+ 'A %s\n' % sbox.ospath('new'),
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'new' : Item(contents=new_contents)})
+ expected_disk.remove('A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'new' : Item(status='A ', wc_rev=0)})
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+@Issue(4049)
+def patch_delete_and_skip(sbox):
+ "patch that deletes and skips"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = os.path.abspath(sbox.get_tempname('my.patch'))
+
+ os.chdir(wc_dir)
+
+ # We need to use abspaths to trigger the segmentation fault.
+ abs = os.path.abspath('.')
+ if sys.platform == 'win32':
+ abs = abs.replace("\\", "/")
+
+ outside_wc = os.path.join(os.pardir, 'X')
+ if sys.platform == 'win32':
+ outside_wc = outside_wc.replace("\\", "/")
+
+ unidiff_patch = [
+ "Index: %s/A/B/E/alpha\n" % abs,
+ "===================================================================\n",
+ "--- %s/A/B/E/alpha\t(revision 1)\n" % abs,
+ "+++ %s/A/B/E/alpha\t(working copy)\n" % abs,
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'alpha'.\n",
+ "Index: %s/A/B/E/beta\n" % abs,
+ "===================================================================\n",
+ "--- %s/A/B/E/beta\t(revision 1)\n" % abs,
+ "+++ %s/A/B/E/beta\t(working copy)\n" % abs,
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ "Index: %s/A/B/E/out-of-reach\n" % abs,
+ "===================================================================\n",
+ "--- %s/iota\t(revision 1)\n" % outside_wc,
+ "+++ %s/iota\t(working copy)\n" % outside_wc,
+ "\n",
+ "Property changes on: iota\n",
+ "___________________________________________________________________\n",
+ "Added: propname\n",
+ "## -0,0 +1 ##\n",
+ "+propvalue\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ skipped_path = os.path.join(os.pardir, 'X', 'iota')
+ expected_output = [
+ 'D %s\n' % os.path.join('A', 'B', 'E', 'alpha'),
+ 'D %s\n' % os.path.join('A', 'B', 'E', 'beta'),
+ 'D %s\n' % os.path.join('A', 'B', 'E'),
+ 'Skipped missing target: \'%s\'\n' % skipped_path,
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha')
+ expected_disk.remove('A/B/E/beta')
+ expected_disk.remove('A/B/E')
+
+ expected_status = svntest.actions.get_virginal_state('', 1)
+ expected_status.tweak('A/B/E', status='D ')
+ expected_status.tweak('A/B/E/alpha', status='D ')
+ expected_status.tweak('A/B/E/beta', status='D ')
+
+ expected_skip = wc.State(
+ '',
+ {skipped_path: Item(verb='Skipped missing target')})
+
+ svntest.actions.run_and_verify_patch('', patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_target_no_eol_at_eof(sbox):
+ "patch target with no eol at eof"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+
+ iota_contents = [
+ "This is the file iota."
+ ]
+
+ mu_contents = [
+ "context\n",
+ "context\n",
+ "context\n",
+ "context\n",
+ "This is the file mu.\n",
+ "context\n",
+ "context\n",
+ "context\n",
+ "context", # no newline at end of file
+ ]
+
+ svntest.main.file_write(iota_path, ''.join(iota_contents))
+ svntest.main.file_write(mu_path, ''.join(mu_contents))
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ unidiff_patch = [
+ "Index: A/mu\n",
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -2,8 +2,8 @@ context\n",
+ " context\n",
+ " context\n",
+ " context\n",
+ "-This is the file mu.\n",
+ "+It is really the file mu.\n",
+ " context\n",
+ " context\n",
+ " context\n",
+ " context\n",
+ "\\ No newline at end of file\n",
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 2)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-This is the file iota.\n",
+ "\\ No newline at end of file\n",
+ "+It is really the file 'iota'.\n",
+ "\\ No newline at end of file\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ iota_contents = [
+ "It is really the file 'iota'."
+ ]
+ mu_contents = [
+ "context\n",
+ "context\n",
+ "context\n",
+ "context\n",
+ "It is really the file mu.\n",
+ "context\n",
+ "context\n",
+ "context\n",
+ "context", # no newline at end of file
+ ]
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ 'U %s\n' % sbox.ospath('iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=''.join(iota_contents))
+ expected_disk.tweak('A/mu', contents=''.join(mu_contents))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='M ', wc_rev=2)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+def patch_add_and_delete(sbox):
+ "patch add multiple levels and delete"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ unidiff_patch = [
+ "Index: foo\n",
+ "===================================================================\n",
+ "--- P/Q/foo\t(revision 0)\n"
+ "+++ P/Q/foo\t(working copy)\n"
+ "@@ -0,0 +1 @@\n",
+ "+This is the file 'foo'.\n",
+ "Index: iota\n"
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n"
+ "+++ iota\t(working copy)\n"
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'iota'.\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'A %s\n' % sbox.ospath('P'),
+ 'A %s\n' % sbox.ospath('P/Q'),
+ 'A %s\n' % sbox.ospath('P/Q/foo'),
+ 'D %s\n' % sbox.ospath('iota'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk.add({'P/Q/foo' : Item(contents="This is the file 'foo'.\n")})
+ expected_status.tweak('iota', status='D ')
+ expected_status.add({
+ 'P' : Item(status='A ', wc_rev=0),
+ 'P/Q' : Item(status='A ', wc_rev=0),
+ 'P/Q/foo' : Item(status='A ', wc_rev=0),
+ })
+ expected_skip = wc.State('', { })
+
+ # Failed with "The node 'P' was not found" when erroneously checking
+ # whether 'P/Q' should be deleted.
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+
+def patch_git_with_index_line(sbox):
+ "apply git patch with 'index' line"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ unidiff_patch = [
+ "diff --git a/src/tools/ConsoleRunner/hi.txt b/src/tools/ConsoleRunner/hi.txt\n",
+ "new file mode 100644\n",
+ "index 0000000..c82a38f\n",
+ "--- /dev/null\n",
+ "+++ b/src/tools/ConsoleRunner/hi.txt\n",
+ "@@ -0,0 +1 @@\n",
+ "+hihihihihihi\n",
+ "\ No newline at end of file\n",
+ ]
+
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'A %s\n' % sbox.ospath('src'),
+ 'A %s\n' % sbox.ospath('src/tools'),
+ 'A %s\n' % sbox.ospath('src/tools/ConsoleRunner'),
+ 'A %s\n' % sbox.ospath('src/tools/ConsoleRunner/hi.txt'),
+ ]
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'src' : Item(status='A ', wc_rev=0),
+ 'src/tools' : Item(status='A ', wc_rev=0),
+ 'src/tools/ConsoleRunner' : Item(status='A ', wc_rev=0),
+ 'src/tools/ConsoleRunner/hi.txt' : Item(status='A ', wc_rev=0),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'src' : Item(),
+ 'src/tools' : Item(),
+ 'src/tools/ConsoleRunner' : Item(),
+ 'src/tools/ConsoleRunner/hi.txt' :
+ Item(contents="hihihihihihi")
+ })
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ None, # expected err
+ 1, # check-props
+ 1) # dry-run
+
+@Issue(4273)
+def patch_change_symlink_target(sbox):
+ "patch changes symlink target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, '\n'.join([
+ "Index: link",
+ "===================================================================",
+ "--- link\t(revision 1)",
+ "+++ link\t(working copy)",
+ "@@ -1 +1 @@",
+ "-link foo",
+ "\\ No newline at end of file",
+ "+link bardame",
+ "\\ No newline at end of file",
+ "",
+ ]))
+
+ # r2 - Try as plain text with how we encode the symlink
+ svntest.main.file_write(sbox.ospath('link'), 'link foo')
+ sbox.simple_add('link')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'link' : Item(verb='Adding'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ patch_output = [
+ 'U %s\n' % sbox.ospath('link'),
+ ]
+
+ svntest.actions.run_and_verify_svn(patch_output, [],
+ 'patch', patch_file_path, wc_dir)
+
+ # r3 - Store result
+ expected_output = svntest.wc.State(wc_dir, {
+ 'link' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # r4 - Now as symlink
+ sbox.simple_rm('link')
+ sbox.simple_add_symlink('foo', 'link')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'link' : Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ svntest.actions.run_and_verify_svn(patch_output, [],
+ 'patch', patch_file_path, wc_dir)
+
+ # TODO: when it passes, verify that the on-disk 'link' is correct ---
+ # symlink to 'bar' (or "link bar" on non-HAVE_SYMLINK platforms)
+
+ # BH: easy check for node type: a non symlink would show as obstructed
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'link' : Item(status='M ', wc_rev='4'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def patch_replace_dir_with_file_and_vv(sbox):
+ "replace dir with file and file with dir"
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join([
+ # Delete all files in D and descendants to delete D itself
+ "Index: A/D/G/pi\n",
+ "===================================================================\n",
+ "--- A/D/G/pi\t(revision 1)\n",
+ "+++ A/D/G/pi\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'pi'.\n",
+ "Index: A/D/G/rho\n",
+ "===================================================================\n",
+ "--- A/D/G/rho\t(revision 1)\n",
+ "+++ A/D/G/rho\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'rho'.\n",
+ "Index: A/D/G/tau\n",
+ "===================================================================\n",
+ "--- A/D/G/tau\t(revision 1)\n",
+ "+++ A/D/G/tau\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'tau'.\n",
+ "Index: A/D/H/chi\n",
+ "===================================================================\n",
+ "--- A/D/H/chi\t(revision 1)\n",
+ "+++ A/D/H/chi\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'chi'.\n",
+ "Index: A/D/H/omega\n",
+ "===================================================================\n",
+ "--- A/D/H/omega\t(revision 1)\n",
+ "+++ A/D/H/omega\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'omega'.\n",
+ "Index: A/D/H/psi\n",
+ "===================================================================\n",
+ "--- A/D/H/psi\t(revision 1)\n",
+ "+++ A/D/H/psi\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'psi'.\n",
+ "Index: A/D/gamma\n",
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)\n",
+ "+++ A/D/gamma\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'gamma'.\n",
+ # Delete iota
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'iota'.\n",
+
+ # Add A/D as file
+ "Index: A/D\n",
+ "===================================================================\n",
+ "--- A/D\t(revision 0)\n",
+ "+++ A/D\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+New file\n",
+ "\ No newline at end of file\n",
+
+ # Add iota as directory
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ "\n",
+ "Property changes on: iota\n",
+ "___________________________________________________________________\n",
+ "Added: k\n",
+ "## -0,0 +1 ##\n",
+ "+v\n",
+ "\ No newline at end of property\n",
+ ]))
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/G' : Item(status='D '),
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ 'A/D/gamma' : Item(status='D '),
+ 'A/D' : Item(status='A ', prev_status='D '),
+ 'iota' : Item(status='A ', prev_status='D '),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi',
+ 'A/D/gamma', 'A/D/G', 'A/D/H')
+ expected_status.tweak('A/D', status='R ')
+ expected_status.tweak('iota', status='RM')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi',
+ 'A/D/gamma', 'A/D', 'A/D/G', 'A/D/H')
+ expected_disk.add({
+ 'A/D' : Item(contents="New file"),
+ 'iota' : Item(contents="", props={u'k': u'v'}),
+ })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+@Issue(4297)
+def single_line_mismatch(sbox):
+ "single line replacement mismatch"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join([
+ "Index: test\n",
+ "===================================================================\n",
+ "--- test\t(revision 1)\n",
+ "+++ test\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-foo\n",
+ "\\ No newline at end of file\n",
+ "+bar\n",
+ "\\ No newline at end of file\n"
+ ]))
+
+ # r2 - Try as plain text with how we encode the symlink
+ svntest.main.file_write(sbox.ospath('test'), 'line')
+ sbox.simple_add('test')
+ sbox.simple_commit()
+
+ # And now this patch should fail, as 'line' doesn't equal 'foo'
+ # But yet it shows up as deleted instead of conflicted
+ expected_output = wc.State(wc_dir, {
+ 'test' : Item(status='C ')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'test' : Item(status=' ', wc_rev='2'),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'test' : Item(contents="line"),
+ 'test.svnpatch.rej' : Item(contents="--- test\n"
+ "+++ test\n"
+ "@@ -1,1 +1,1 @@\n"
+ "-foo\n"
+ "\\ No newline at end of file\n"
+ "+bar\n"
+ "\\ No newline at end of file\n"),
+ })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+@Issue(3644)
+def patch_empty_file(sbox):
+ "apply a patch to an empty file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join([
+ # patch a file containing just '\n' to 'replacement\n'
+ "Index: lf.txt\n",
+ "===================================================================\n",
+ "--- lf.txt\t(revision 2)\n",
+ "+++ lf.txt\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "-\n"
+ "+replacement\n",
+
+ # patch a new file 'new.txt\n'
+ "Index: new.txt\n",
+ "===================================================================\n",
+ "--- new.txt\t(revision 0)\n",
+ "+++ new.txt\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+new file\n",
+
+ # patch a file containing 0 bytes to 'replacement\n'
+ "Index: empty.txt\n",
+ "===================================================================\n",
+ "--- empty.txt\t(revision 2)\n",
+ "+++ empty.txt\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+replacement\n",
+ ]))
+
+ sbox.simple_add_text('', 'empty.txt')
+ sbox.simple_add_text('\n', 'lf.txt')
+ sbox.simple_commit()
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('lf.txt'),
+ 'A %s\n' % sbox.ospath('new.txt'),
+ 'U %s\n' % sbox.ospath('empty.txt'),
+ # Not sure if this line is necessary, but it doesn't hurt
+ '> applied hunk @@ -0,0 +1,1 @@ with offset 0\n',
+ ]
+
+ # Current result: lf.txt patched ok, new created, empty succeeds with offset.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'lf.txt' : Item(contents="replacement\n"),
+ 'new.txt' : Item(contents="new file\n"),
+ 'empty.txt' : Item(contents="replacement\n"),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_status = None
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@Issue(3362)
+def patch_apply_no_fuz(sbox):
+ "svn diff created patch should apply without fuz"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ test1_body = '\n'.join([
+ "line_1",
+ "line_2",
+ "line_3",
+ "line_4",
+ "line_5",
+ "line_6",
+ "line_7",
+ "line_8",
+ "line_9",
+ "line_10",
+ "line_11",
+ "line_12",
+ "line_13",
+ "line_14",
+ "line_15",
+ "line_16",
+ "line_17",
+ "line_18",
+ "line_19",
+ "line_20",
+ "line_21",
+ "line_22",
+ "line_23",
+ "line_24",
+ "line_25",
+ "line_26",
+ "line_27",
+ "line_28",
+ "line_29",
+ "line_30",
+ ""
+ ])
+ svntest.main.file_write(sbox.ospath('test.txt'), test1_body, 'wb')
+ test2_body = '\n'.join([
+ "line_1a",
+ "line_1b",
+ "line_1c",
+ "line_1",
+ "line_2",
+ "line_3",
+ "line_4",
+ "line_5a",
+ "line_5b",
+ "line_5c",
+ "line_6",
+ "line_7",
+ "line_8",
+ "line_9",
+ "line_10",
+ "line_11a",
+ "line_11b",
+ "line_11c",
+ "line_12",
+ "line_13",
+ "line_14",
+ "line_15",
+ "line_16",
+ "line_17",
+ "line_18",
+ "line_19a",
+ "line_19b",
+ "line_19c",
+ "line_20",
+ "line_21",
+ "line_22",
+ "line_23",
+ "line_24",
+ "line_25",
+ "line_26",
+ "line_27a",
+ "line_27b",
+ "line_27c",
+ "line_28",
+ "line_29",
+ "line_30",
+ ""
+ ])
+ svntest.main.file_write(sbox.ospath('test_v2.txt'), test2_body, 'wb')
+
+ sbox.simple_add('test.txt', 'test_v2.txt')
+
+ result, out_text, err_text = svntest.main.run_svn(None,
+ 'diff',
+ '--old',
+ sbox.ospath('test.txt'),
+ '--new',
+ sbox.ospath('test_v2.txt'))
+
+ patch_path = sbox.get_tempname('patch.diff')
+ svntest.main.file_write(patch_path, ''.join(out_text), 'wb')
+
+ expected_output = wc.State(wc_dir, {
+ 'test.txt' : Item(status='U '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'test.txt' : Item(contents=test2_body),
+ 'test_v2.txt' : Item(contents=test2_body),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'test_v2.txt' : Item(status='A ', wc_rev='-'),
+ 'test.txt' : Item(status='A ', wc_rev='-'),
+ })
+
+ expected_skip = wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+@Issue(4315)
+def patch_lacking_trailing_eol_on_context(sbox):
+ "patch file lacking trailing eol on context"
+
+ # Apply a patch where a hunk (the only hunk, in this case) ends with a
+ # context line that has no EOL, where this context line is going to
+ # match an existing line that *does* have an EOL.
+ #
+ # Around trunk@1443700, 'svn patch' wrongly removed an EOL from the
+ # target file at that position.
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+
+ # Prepare
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Prepare the patch
+ unidiff_patch = [
+ "Index: iota\n",
+ "===================================================================\n",
+ "--- iota\t(revision 1)\n",
+ "+++ iota\t(working copy)\n",
+ # TODO: -1 +1
+ "@@ -1 +1,2 @@\n",
+ "+Some more bytes\n",
+ " This is the file 'iota'.", # No trailing \n on this context line!
+ ]
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ iota_contents = "This is the file 'iota'.\n"
+
+ expected_output = [ 'U %s\n' % sbox.ospath('iota') ]
+
+ # Test where the no-EOL context line is the last line in the target.
+ expected_disk.tweak('iota', contents="Some more bytes\n" + iota_contents)
+ expected_status.tweak('iota', status='M ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Test where the no-EOL context line is a non-last line in the target.
+ sbox.simple_revert('iota')
+ sbox.simple_append('iota', "Another line.\n")
+ expected_disk.tweak('iota', contents="Some more bytes\n" + iota_contents +
+ "Another line.\n")
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status='U ')
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_with_custom_keywords(sbox):
+ """patch with custom keywords"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu', '$Qq$\nAB\nZZ\n', truncate=True)
+ sbox.simple_propset('svn:keywords', 'Qq=%R', 'A/mu')
+ sbox.simple_commit()
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents='$Qq: %s $\nAB\nZZ\n' % sbox.repo_url)
+ svntest.actions.verify_disk(sbox.wc_dir, expected_disk)
+
+ unidiff_patch = [
+ "Index: A/mu\n",
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -1,3 +1,3 @@\n",
+ " $Qq$\n",
+ "-AB\n",
+ "+ABAB\n",
+ " ZZ\n"
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [ 'U %s\n' % sbox.ospath('A/mu') ]
+ expected_disk.tweak('A/mu',
+ contents='$Qq: %s $\nABAB\nZZ\n' % sbox.repo_url)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('A/mu', status='M ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_git_rename(sbox):
+ """--git patch with rename header"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # a simple --git rename patch
+ unidiff_patch = [
+ "diff --git a/iota b/iota2\n",
+ "similarity index 100%\n",
+ "rename from iota\n",
+ "rename to iota2\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status='D '),
+ 'iota2' : Item(status='A ')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_disk.add({'iota2' : Item(contents="This is the file 'iota'.\n")})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota2' : Item(status='A ', copied='+', wc_rev='-', moved_from='iota'),
+ })
+ expected_status.tweak('iota', status='D ', wc_rev=1, moved_to='iota2')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output = wc.State(wc_dir, {
+ 'iota2' : Item(status='G ')
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Reverse
+ expected_output = wc.State(wc_dir, {
+ 'iota2' : Item(status='D '),
+ 'iota' : Item(status='A '),
+ })
+ expected_disk.remove('iota2')
+ expected_disk.add({
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+ expected_status.remove('iota2')
+ expected_status.tweak('iota', moved_to=None, status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Retry reverse
+ # svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ # expected_output, expected_disk,
+ # expected_status, expected_skip,
+ # [], True, True,
+ # '--reverse-diff')
+
+@Issue(4533)
+def patch_hunk_avoid_reorder(sbox):
+ """avoid reordering hunks"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n', truncate=True)
+ sbox.simple_commit()
+
+ # two hunks, first matches at offset +18, second matches at both -13
+ # and +18 but we want the second match as it is after the first
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -20,6 +20,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -20,6 +20,7 @@ with offset 18\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ sbox.simple_revert('A/mu')
+
+ # change patch so second hunk matches at both -14 and +17, we still
+ # want the second match
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -21,6 +21,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -21,6 +21,7 @@ with offset 17\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ sbox.simple_revert('A/mu')
+
+@Issue(4533)
+def patch_hunk_avoid_reorder2(sbox):
+ """avoid reordering hunks 2"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n', truncate=True)
+ sbox.simple_commit()
+
+ # two hunks, first matches at offset +18, second matches at both -13
+ # change patch so second hunk matches at both -12 and +19, we still
+ # want the second match
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -13,6 +13,7 @@\n",
+ " MM\n",
+ " NN\n",
+ " OO\n",
+ "+11111\n",
+ " PP\n",
+ " QQ\n",
+ " RR\n",
+ "@@ -19,6 +19,7 @@\n",
+ " TT\n",
+ " UU\n",
+ " VV\n",
+ "+22222\n",
+ " WW\n",
+ " XX\n",
+ " YY\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -13,6 +13,7 @@ with offset 18\n',
+ '> applied hunk @@ -19,6 +19,7 @@ with offset 19\n'
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n'
+ 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ '33333\n' '33333\n' '33333\n'
+ 'MM\n' 'NN\n' 'OO\n' '11111\n' 'PP\n' 'QQ\n' 'RR\n'
+ 'SS\n' 'TT\n' 'UU\n' 'VV\n' '22222\n' 'WW\n' 'XX\n'
+ 'YY\n' 'ZZ\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@Issue(4533)
+def patch_hunk_reorder(sbox):
+ """hunks that reorder"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n' 'GG\n'
+ 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n' 'MM\n' 'NN\n',
+ truncate=True)
+ sbox.simple_commit()
+
+ # Two hunks match in opposite order
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -2,6 +2,7 @@\n",
+ " II\n",
+ " JJ\n",
+ " KK\n",
+ "+11111\n",
+ " LL\n",
+ " MM\n",
+ " NN\n",
+ "@@ -9,6 +10,7 @@\n",
+ " BB\n",
+ " CC\n",
+ " DD\n",
+ "+22222\n",
+ " EE\n",
+ " FF\n",
+ " GG\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -9,6 +10,7 @@ with offset -7\n',
+ '> applied hunk @@ -2,6 +2,7 @@ with offset 7\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' '22222\n' 'EE\n' 'FF\n' 'GG\n'
+ 'HH\n' 'II\n' 'JJ\n' 'KK\n' '11111\n' 'LL\n' 'MM\n' 'NN\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # In the following case the reordered hunk2 is smaller offset
+ # magnitude than hunk2 at the end and the reorder is preferred.
+ sbox.simple_revert('A/mu')
+ sbox.simple_append('A/mu',
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n',
+ truncate=True)
+ sbox.simple_commit()
+
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -28,7 +28,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -44,7 +44,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk2\n",
+ "+hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -44,7 +44,7 @@ with offset -32\n',
+ '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n',
+ ]
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n')
+
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ sbox.simple_revert('A/mu')
+
+ # In this case the reordered hunk2 is further than hunk2 at the end
+ # and the reordered is not preferred.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -28,7 +28,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -110,7 +110,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk2\n",
+ "+hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n',
+ '> applied hunk @@ -110,7 +110,7 @@ with offset 26\n',
+ ]
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 2 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 100 +
+ '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n')
+
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@XFail()
+def patch_hunk_overlap(sbox):
+ """hunks that overlap"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n'
+ 'GG\n' 'HH\n' 'II\n', truncate=True)
+ sbox.simple_commit()
+
+ # Two hunks that overlap when applied, GNU patch can apply both hunks.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 1)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -2,6 +2,7 @@\n",
+ " BB\n",
+ " CC\n",
+ " DD\n",
+ "+11111\n",
+ " EE\n",
+ " FF\n",
+ " GG\n",
+ "@@ -9,6 +10,7 @@\n",
+ " DD\n",
+ " EE\n",
+ " FF\n",
+ "+22222\n",
+ " GG\n",
+ " HH\n",
+ " II\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -9,6 +10,7 @@ with offset -5\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents=
+ 'AA\n' 'BB\n' 'CC\n' 'DD\n' '11111\n' 'EE\n' 'FF\n'
+ '22222\n' 'GG\n' 'HH\n' 'II\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_delete_modified(sbox):
+ """patch delete modified"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # A patch that deletes beta.
+ unidiff_patch = [
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ A/B/E/beta (working copy)\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ # First application deletes beta
+ expected_output = [
+ 'D %s\n' % sbox.ospath('A/B/E/beta'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/beta')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/beta', status='D ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Second application notifies already applied
+ expected_output = [
+ 'G %s\n' % sbox.ospath('A/B/E/beta'),
+ '> hunk @@ -1,1 +0,0 @@ already applied\n',
+ ]
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Third application, with file present even though state is 'D', also skips
+ sbox.simple_append('A/B/E/beta', 'Modified', truncate=True)
+ expected_disk.add({'A/B/E/beta' : Item(contents='Modified')})
+ expected_output = [
+ 'Skipped \'%s\'\n' % sbox.ospath('A/B/E/beta'),
+ ] + svntest.main.summary_of_conflicts(skipped_paths=1)
+ expected_skip = wc.State('', {
+ sbox.ospath('A/B/E/beta') : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Revert and modify beta, fourth application gives a text conflict.
+ sbox.simple_revert('A/B/E/beta')
+ sbox.simple_append('A/B/E/beta', 'Modified', truncate=True)
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/B/E/beta'),
+ '> rejected hunk @@ -1,1 +0,0 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_skip = wc.State('', { })
+ reject_file_contents = [
+ "--- A/B/E/beta\n",
+ "+++ /dev/null\n",
+ "@@ -1,1 +0,0 @@\n",
+ "-This is the file 'beta'.\n",
+ ]
+ expected_disk.add({'A/B/E/beta.svnpatch.rej'
+ : Item(contents=''.join(reject_file_contents))
+ })
+ expected_status.tweak('A/B/E/beta', status='M ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_closest(sbox):
+ "find closest hunk"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -47,7 +47,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk1\n",
+ "+hunk1-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -66,7 +66,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-rejected-hunk2-\n",
+ "+rejected-hunk2-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ "@@ -180,7 +180,7 @@\n",
+ " 1\n",
+ " 2\n",
+ " 3\n",
+ "-hunk3\n",
+ "+hunk3-mod\n",
+ " 4\n",
+ " 5\n",
+ " 6\n",
+ ]
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch))
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +18, prefer +18 gives final offset +22
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 30 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset 22\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/mu.svnpatch.rej' : Item(contents=
+ "--- A/mu\n" +
+ "+++ A/mu\n" +
+ "@@ -66,7 +66,7 @@\n" +
+ " 1\n" +
+ " 2\n" +
+ " 3\n" +
+ "-rejected-hunk2-\n" +
+ "+rejected-hunk2-mod\n" +
+ " 4\n" +
+ " 5\n" +
+ " 6\n")})
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 30 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=2)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +20, prefer -19 gives final offset -15
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 32 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ os.remove(sbox.ospath('A/mu.svnpatch.rej'))
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 32 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=3)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of -19 and +19, prefer -19 gives final offset -15
+ sbox.simple_append('A/mu',
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 31 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ os.remove(sbox.ospath('A/mu.svnpatch.rej'))
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 31 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=4)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of +173 and -173, prefer +173 gives final offset +177
+ sbox.simple_append('A/mu',
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 242 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ os.remove(sbox.ospath('A/mu.svnpatch.rej'))
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> applied hunk @@ -180,7 +180,7 @@ with offset 177\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 242 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=5)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Previous offset for hunk3 is +4, hunk3 matches at relative offsets
+ # of +174 and -173, prefer -173 gives final offset -169
+ sbox.simple_append('A/mu',
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 243 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10,
+ truncate=True)
+ sbox.simple_commit()
+
+ os.remove(sbox.ospath('A/mu.svnpatch.rej'))
+ expected_output = [
+ 'C %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -180,7 +180,7 @@ with offset -169\n',
+ '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n',
+ '> rejected hunk @@ -66,7 +66,7 @@\n',
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ expected_disk.tweak('A/mu', contents=
+ 'x\n' * 10 +
+ '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 33 +
+ '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 50 +
+ '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 243 +
+ '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' +
+ 'x\n' * 10)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='M ', wc_rev=6)
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@SkipUnless(svntest.main.is_posix_os)
+def patch_symlink_traversal(sbox):
+ """symlink traversal behaviour"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+ alpha_contents = "This is the file 'alpha'.\n"
+
+ # A/B/E/unversioned -> alpha
+ # A/B/E/versioned -> alpha
+ # A/B/unversioned -> E (so A/B/unversioned/alpha is A/B/E/alpha)
+ # A/B/versioned -> E (so A/B/versioned/alpha is A/B/E/alpha)
+ os.symlink('alpha', sbox.ospath('A/B/E/unversioned'))
+ os.symlink('alpha', sbox.ospath('A/B/E/versioned'))
+ os.symlink('E', sbox.ospath('A/B/unversioned'))
+ os.symlink('E', sbox.ospath('A/B/versioned'))
+ sbox.simple_add('A/B/E/versioned', 'A/B/versioned')
+
+ prepatch_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ prepatch_status.add({'A/B/E/versioned' : Item(status='A ', wc_rev='-')})
+ prepatch_status.add({'A/B/versioned' : Item(status='A ', wc_rev='-')})
+ svntest.actions.run_and_verify_status(wc_dir, prepatch_status)
+
+ # Patch through unversioned symlink to file
+ unidiff_patch = (
+ "Index: A/B/E/unversioned\n"
+ "===================================================================\n"
+ "--- A/B/E/unversioned\t(revision 2)\n"
+ "+++ A/B/E/unversioned\t(working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = wc.State(wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'A/B/E/unversioned' : Item(contents=alpha_contents)})
+ expected_disk.add({'A/B/E/versioned' : Item(contents=alpha_contents)})
+ expected_disk.add({'A/B/unversioned' : Item()})
+ expected_disk.add({'A/B/versioned' : Item()})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({'A/B/E/versioned' : Item(status='A ', wc_rev='-')})
+ expected_status.add({'A/B/versioned' : Item(status='A ', wc_rev='-')})
+ expected_skip = wc.State(wc_dir, {
+ 'A/B/E/unversioned' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ svntest.actions.run_and_verify_status(wc_dir, prepatch_status)
+
+ # Patch through versioned symlink to file
+ unidiff_patch = (
+ "Index: A/B/E/versioned\n"
+ "===================================================================\n"
+ "--- A/B/E/versioned\t(revision 2)\n"
+ "+++ A/B/E/versioned\t(working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+ reject_contents = (
+ "--- A/B/E/versioned\n"
+ "+++ A/B/E/versioned\n"
+ "@@ -1,1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/versioned' : Item(status='C ')
+ })
+ expected_disk.add({
+ 'A/B/E/versioned.svnpatch.rej' : Item(contents=reject_contents)
+ })
+ expected_skip = wc.State(wc_dir, { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ os.remove(sbox.ospath('A/B/E/versioned.svnpatch.rej'))
+ expected_disk.remove('A/B/E/versioned.svnpatch.rej')
+ svntest.actions.run_and_verify_status(wc_dir, prepatch_status)
+
+ # Patch through unversioned symlink to parent of file
+ unidiff_patch = (
+ "Index: A/B/unversioned/alpha\n"
+ "===================================================================\n"
+ "--- A/B/unversioned/alpha\t(revision 2)\n"
+ "+++ A/B/unversioned/alpha\t(working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = wc.State(wc_dir, {})
+ expected_skip = wc.State(wc_dir, {
+ 'A/B/unversioned/alpha' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ svntest.actions.run_and_verify_status(wc_dir, prepatch_status)
+
+ # Patch through versioned symlink to parent of file
+ unidiff_patch = (
+ "Index: A/B/versioned/alpha\n"
+ "===================================================================\n"
+ "--- A/B/versioned/alpha\t(revision 2)\n"
+ "+++ A/B/versioned/alpha\t(working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = wc.State(wc_dir, {})
+ expected_skip = wc.State(wc_dir, {
+ 'A/B/versioned/alpha' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+ svntest.actions.run_and_verify_status(wc_dir, prepatch_status)
+
+@SkipUnless(svntest.main.is_posix_os)
+def patch_obstructing_symlink_traversal(sbox):
+ """obstructing symlink traversal behaviour"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ alpha_contents = "This is the file 'alpha'.\n"
+ sbox.simple_append('A/B/F/alpha', alpha_contents)
+ sbox.simple_add('A/B/F/alpha')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Unversioned symlink A/B/E -> F obstructing versioned A/B/E so
+ # versioned A/B/E/alpha is A/B/F/alpha
+ svntest.main.safe_rmtree(sbox.ospath('A/B/E'))
+ os.symlink('F', sbox.ospath('A/B/E'))
+
+ unidiff_patch = (
+ "Index: A/B/E/alpha\n"
+ "===================================================================\n"
+ "--- A/B/E/alpha\t(revision 2)\n"
+ "+++ A/B/E/alpha\t(working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'alpha'.\n"
+ "+xx\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ ### Patch applies through the unversioned symlink
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/B/E/alpha'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.add({'A/B/F/alpha' : Item(contents=alpha_contents+"xx\n")})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({'A/B/F/alpha' : Item(status=' ', wc_rev=2)})
+ expected_status.tweak('A/B/E', status='~ ')
+ expected_status.tweak('A/B/E/alpha', 'A/B/F/alpha', status='M ')
+ expected_status.tweak('A/B/E/beta', status='! ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+def patch_binary_file(sbox):
+ "patch a binary file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make the file binary by putting some non ascii chars inside or propset
+ # will return a warning
+ sbox.simple_append('iota', b'\0\202\203\204\205\206\207nsomething\nelse\xFF')
+ sbox.simple_propset('svn:mime-type', 'application/binary', 'iota')
+
+ expected_output = [
+ 'Index: svn-test-work/working_copies/patch_tests-57/iota\n',
+ '===================================================================\n',
+ 'diff --git a/iota b/iota\n',
+ 'GIT binary patch\n',
+ 'literal 48\n',
+ 'zc$^E#$ShU>qLPeMg|y6^R0Z|S{E|d<JuZf(=9bpB_PpZ!+|-hc%)E52)STkf{{Wp*\n',
+ 'B5)uFa\n',
+ '\n',
+ 'literal 25\n',
+ 'ec$^E#$ShU>qLPeMg|y6^R0Z|S{E|d<JuU!m{s;*G\n',
+ '\n',
+ 'Property changes on: iota\n',
+ '___________________________________________________________________\n',
+ 'Added: svn:mime-type\n',
+ '## -0,0 +1 ##\n',
+ '+application/binary\n',
+ '\ No newline at end of property\n',
+ ]
+
+ _, diff_output, _ = svntest.actions.run_and_verify_svn(expected_output, [],
+ 'diff', '--git',
+ wc_dir)
+
+ sbox.simple_revert('iota')
+
+ tmp = sbox.get_tempname()
+ svntest.main.file_write(tmp, ''.join(diff_output))
+
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status='UU'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ props={'svn:mime-type':'application/binary'},
+ contents =
+ b'This is the file \'iota\'.\n' +
+ b'\0\202\203\204\205\206\207nsomething\nelse\xFF')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='MM')
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, tmp,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Ok, now try applying it backwards
+ expected_output.tweak('iota', status='UU')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_patch(wc_dir, tmp,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+def patch_delete_nodes(sbox):
+ "apply deletes via patch"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('A', 'B', 'A/B/E/alpha')
+ sbox.simple_append('A/mu', '\0')
+ sbox.simple_propset('svn:mime-type', 'application/nonsense', 'A/mu')
+
+ sbox.simple_commit() # r2
+ sbox.simple_update()
+
+ expected_skip = wc.State('', { })
+
+ original_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ original_disk = svntest.main.greek_state.copy()
+ original_disk.tweak('A/mu',
+ props={'svn:mime-type':'application/nonsense'},
+ contents = 'This is the file \'mu\'.\n\0')
+ original_disk.tweak('A/B/E/alpha', props={'A':'B'})
+ svntest.actions.run_and_verify_status(wc_dir, original_status)
+ svntest.actions.verify_disk(wc_dir, original_disk, True)
+
+ sbox.simple_rm('A/B/E/alpha', 'A/B/E/beta', 'A/mu')
+
+ _, diff, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '--git', wc_dir)
+
+ patch = sbox.get_tempname('patch')
+ svntest.main.file_write(patch, ''.join(diff))
+
+ deleted_status = original_status.copy()
+ deleted_disk = original_disk.copy()
+ deleted_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/mu')
+ deleted_status.tweak('A/B/E/alpha', 'A/B/E/beta', 'A/mu', status='D ')
+
+
+ svntest.actions.run_and_verify_status(wc_dir, deleted_status)
+ svntest.actions.verify_disk(wc_dir, deleted_disk, True)
+
+ # And now apply the patch from the clean state
+ sbox.simple_revert('A/B/E/alpha', 'A/B/E/beta', 'A/mu')
+
+ # Expect that the hint 'empty dir? -> delete dir' deletes 'E'
+ # ### A smarter diff format might change this in a future version
+ deleted_disk.remove('A/B/E')
+ deleted_status.tweak('A/B/E', status='D ')
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='D '),
+ 'A/B/E' : Item(status='D '),
+ 'A/B/E/beta' : Item(status='D '),
+ 'A/B/E/alpha' : Item(status='D '),
+ })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, deleted_disk,
+ deleted_status, expected_skip,
+ [], False, True)
+
+ # And let's see if we can apply the reverse version of the patch
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ })
+ original_status.tweak('A/mu', status='RM') # New file
+ original_status.tweak('A/B/E', status='R ') # New dir
+ original_status.tweak('A/B/E/alpha', 'A/B/E/beta',
+ status='A ', wc_rev='-',
+ entry_status='R ', entry_rev='2')
+
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, original_disk,
+ original_status, expected_skip,
+ [], True, True, '--reverse-diff')
+
+def patch_delete_missing_eol(sbox):
+ "apply a delete missing an eol"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ delete_patch = [
+ "Index: A/B/E/beta\n",
+ "===================================================================\n",
+ "--- A/B/E/beta (revision 1)\n",
+ "+++ /dev/null\n",
+ "@@ -1 +0,0 @@\n",
+ "-This is the file 'beta'." # No final EOL
+ ]
+
+ patch = sbox.get_tempname('patch')
+ svntest.main.file_write(patch, ''.join(delete_patch))
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/beta' : Item(status='D '),
+ })
+ expected_skip = wc.State(wc_dir, {
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/beta', status='D ')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/beta')
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # Try again? -> Merged
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/beta' : Item(status='G '),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # Reverse
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/beta' : Item(status='A '),
+ })
+ expected_skip = wc.State(wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status.tweak('A/B/E/beta', status='R ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+ # Try again? -> Already applied
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/beta' : Item(status='G '),
+ })
+ expected_skip = wc.State(wc_dir, {
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+def patch_final_eol(sbox):
+ "patch the final eol"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ delete_patch = [
+ 'Index: A/mu\n',
+ '===================================================================\n',
+ '--- A/mu\t(revision 1)\n',
+ '+++ A/mu\t(working copy)\n',
+ '@@ -1 +1 @@\n',
+ '-This is the file \'mu\'.\n',
+ '+This is the file \'mu\'.\n',
+ '\ No newline at end of file\n',
+ 'Index: iota\n',
+ '===================================================================\n',
+ '--- iota\t(revision 1)\n',
+ '+++ iota\t(working copy)\n',
+ '@@ -1 +1 @@\n',
+ '-This is the file \'iota\'.\n',
+ '+This is the file \'iota\'.\n',
+ '\ No newline at end of file' # Missing EOL
+ ]
+
+ patch = sbox.get_tempname('patch')
+ # We explicitly use wb here as this is the eol type added later in the test
+ svntest.main.file_write(patch, ''.join(delete_patch), mode='wb')
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ 'iota' : Item(status='U '),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/mu', status='M ')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents="This is the file 'iota'.")
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.")
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # And again - Still U as patch doesn't check final EOL of source
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # Reverse
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ expected_disk.tweak('iota', contents="This is the file 'iota'.\n")
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.\n")
+ expected_status.tweak('iota', 'A/mu', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+ # And once more
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+ # Change the unmodified form
+ sbox.simple_append('iota', 'This is the file \'iota\'.', truncate=True)
+ sbox.simple_append('A/mu', 'This is the file \'mu\'.', truncate=True)
+ sbox.simple_commit()
+ expected_status.tweak('iota', 'A/mu', wc_rev='2')
+
+ add_patch = [
+ 'Index: A/mu\n',
+ '===================================================================\n',
+ '--- A/mu\t(revision 2)\n',
+ '+++ A/mu\t(working copy)\n',
+ '@@ -1 +1 @@\n',
+ '-This is the file \'mu\'.\n',
+ '\ No newline at end of file\n',
+ '+This is the file \'mu\'.\n',
+ 'Index: iota\n',
+ '===================================================================\n',
+ '--- iota\t(revision 2)\n',
+ '+++ iota\t(working copy)\n',
+ '@@ -1 +1 @@\n',
+ '-This is the file \'iota\'.\n',
+ '\ No newline at end of file\n',
+ '+This is the file \'iota\'.' # Missing eol
+ ]
+
+ svntest.main.file_write(patch, ''.join(add_patch), mode='wb')
+
+ # Apply the patch
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ expected_disk.tweak('iota', contents="This is the file 'iota'.\n")
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.\n")
+ expected_status.tweak('iota', 'A/mu', status='M ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # And again
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True)
+
+ # And in reverse
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ expected_disk.tweak('iota', contents="This is the file 'iota'.")
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.")
+ expected_status.tweak('iota', 'A/mu', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+ # And again
+ expected_output.tweak('iota', 'A/mu', status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+def patch_adds_executability_nocontents(sbox):
+ """patch adds svn:executable, without contents"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = (
+ "diff --git a/iota b/iota\n"
+ "old mode 100644\n"
+ "new mode 100755\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(status=' U')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ # "*" is SVN_PROP_EXECUTABLE_VALUE aka SVN_PROP_BOOLEAN_TRUE
+ expected_disk.tweak('iota', props={'svn:executable': '*'})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' M')
+
+ expected_skip = wc.State(wc_dir, { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # And try it again
+ # This may produce different output but must have the same result
+ expected_output.tweak('iota', status=' G')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+ # And then try it in reverse
+ expected_disk.tweak('iota', props={})
+ expected_status.tweak('iota', status=' ')
+ expected_output.tweak('iota', status=' U')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True, '--reverse-diff')
+
+ # And try it again
+ # This may produce different output but must have the same result
+ expected_output.tweak('iota', status=' G')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True, '--reverse-diff')
+
+def patch_adds_executability_nocontents2(sbox):
+ "patch adds svn:executable, without contents 2"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = (
+ "diff --git a/new b/new\n"
+ "old mode 100644\n"
+ "new mode 100755\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = wc.State(wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_skip = wc.State(wc_dir, {
+ 'new' : Item(verb='Skipped missing target')
+ })
+
+ # This creates 'new', while a skip or reject is expected
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+
+def patch_adds_executability_yescontents(sbox):
+ """patch adds svn:executable, with contents"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ mu_new_contents = (
+ "This is the file 'mu'.\n"
+ "with text mods too\n"
+ )
+
+ unidiff_patch = (
+ "diff --git a/A/mu b/A/mu\n"
+ "old mode 100644\n"
+ "new mode 100755\n"
+ "index 8a0f01c..dfad3ac\n"
+ "--- a/A/mu\n"
+ "+++ b/A/mu\n"
+ "@@ -1 +1,2 @@\n"
+ " This is the file 'mu'.\n"
+ "+with text mods too\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = [
+ 'UU %s\n' % sbox.ospath('A/mu'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ # "*" is SVN_PROP_EXECUTABLE_VALUE aka SVN_PROP_BOOLEAN_TRUE
+ expected_disk.tweak('A/mu', props={'svn:executable': '*'})
+ expected_disk.tweak('A/mu', contents=mu_new_contents)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='MM')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+def patch_deletes_executability(sbox):
+ """patch deletes svn:executable"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ ## Set up the basic state.
+ sbox.simple_propset('svn:executable', 'yes', 'iota')
+ #sbox.simple_commit(target='iota', message="Make 'iota' executable.")
+
+ unidiff_patch = (
+ "diff --git a/iota b/iota\n"
+ "old mode 100755\n"
+ "new mode 100644\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = [
+ ' U %s\n' % sbox.ospath('iota'),
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota') # props=None by default
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' ')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ check_props=True)
+
+def patch_ambiguous_executability_contradiction(sbox):
+ """patch ambiguous svn:executable, bad"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = (
+ "Index: iota\n"
+ "===================================================================\n"
+ "diff --git a/iota b/iota\n"
+ "old mode 100755\n"
+ "new mode 100644\n"
+ "Property changes on: iota\n"
+ "-------------------------------------------------------------------\n"
+ "Added: svn:executable\n"
+ "## -0,0 +1 ##\n"
+ "+*\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = []
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_skip = wc.State('', { })
+
+ error_re_string = r'.*Invalid patch:.*contradicting.*mode.*svn:executable'
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ error_re_string=error_re_string,
+ check_props=True)
+
+def patch_ambiguous_executability_consistent(sbox):
+ """patch ambiguous svn:executable, good"""
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ unidiff_patch = (
+ "Index: iota\n"
+ "===================================================================\n"
+ "diff --git a/iota b/iota\n"
+ "old mode 100644\n"
+ "new mode 100755\n"
+ "Property changes on: iota\n"
+ "-------------------------------------------------------------------\n"
+ "Added: svn:executable\n"
+ "## -0,0 +1 ##\n"
+ "+*\n"
+ )
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, unidiff_patch)
+
+ expected_output = [
+ ' U %s\n' % sbox.ospath('iota'),
+ ]
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props={'svn:executable': '*'})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status=' M')
+
+ expected_skip = wc.State('', { })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ error_re_string=None,
+ check_props=True)
+
+def patch_prop_madness(sbox):
+ "patch property madness"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('mod_s', 'value\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_s_n', 'no-eol',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_l', 'this\nis\na\nvery\nvery\nlong\nvalue.\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_l_n', 'this\nis\na\nvery\nvery\nlong\nvalue.\n'
+ 'without\neol', # No eol at end
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('del', 'value\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('del_n', 'no-eol',
+ 'iota', 'A/mu')
+
+ sbox.simple_commit()
+
+ r2_props = {
+ 'mod_l_n' : 'this\nis\na\nvery\nvery\nlong\nvalue.\nwithout\neol',
+ 'mod_l' : 'this\nis\na\nvery\nvery\nlong\nvalue.\n',
+ 'mod_s' : 'value\n',
+ 'mod_s_n' : 'no-eol',
+ 'del' : 'value\n',
+ 'del_n' : 'no-eol',
+ }
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', 'A/mu', props=r2_props)
+
+ sbox.simple_propset('mod_s', 'other\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_s_n', 'still no eol',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_l', 'this\nis\na\nsomewhat\nlong\nvalue.\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('mod_l_n', 'this\nis\na\nanother\n..\nlong\nvalue.\n'
+ 'without\neol', # No eol at end
+ 'iota', 'A/mu')
+
+ sbox.simple_propdel('del', 'iota', 'A/mu')
+
+ sbox.simple_propdel('del_n', 'iota', 'A/mu')
+
+ sbox.simple_propset('add_s', 'new-value\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('add_s_n', 'new other no eol',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('add_l', 'this\nis\nsomething\n',
+ 'iota', 'A/mu')
+
+ sbox.simple_propset('add_l_n', 'this\nhas\nno\neol', # No eol at end
+ 'iota', 'A/mu')
+
+ _, output, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ new_props = {
+ 'mod_s' : 'other\n',
+ 'mod_s_n' : 'still no eol',
+ 'mod_l' : 'this\nis\na\nsomewhat\nlong\nvalue.\n',
+ 'mod_l_n' : 'this\nis\na\nanother\n..\nlong\nvalue.\nwithout\neol',
+ 'add_s' : 'new-value\n',
+ 'add_s_n' : 'new other no eol',
+ 'add_l' : 'this\nis\nsomething\n',
+ 'add_l_n' : 'this\nhas\nno\neol'
+ }
+
+ expected_status.tweak('iota', 'A/mu', status=' M', wc_rev='2')
+ expected_disk.tweak('iota', 'A/mu', props=new_props)
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+ #svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', wc_dir, '-R')
+
+ patch = sbox.get_tempname('patch')
+ svntest.main.file_write(patch, ''.join(output), mode='wb')
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(status=' U'),
+ 'iota' : Item(status=' U'),
+ })
+ expected_skip= wc.State(wc_dir, {
+ })
+
+ strip_count = wc_dir.count(os.path.sep)+1
+
+ # Patch once
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # Patch again
+ expected_output.tweak('A/mu', 'iota', status=' G')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # Reverse
+ expected_output.tweak('A/mu', 'iota', status=' U')
+ expected_disk.tweak('A/mu', 'iota', props=r2_props)
+ expected_status.tweak('A/mu', 'iota', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff',
+ '--strip', strip_count)
+
+ # And repeat
+ expected_output.tweak('A/mu', 'iota', status=' G')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff',
+ '--strip', strip_count)
+
+ # Ok, and now introduce some conflicts
+
+ sbox.simple_propset('del', 'value', 'iota') # Wrong EOL
+ sbox.simple_propset('del_n', 'regeleinde\n', 'iota') # Wrong EOL+value
+
+ sbox.simple_propset('del', 'waarde', 'A/mu') # Wrong EOL+value
+ sbox.simple_propset('del_n', 'no-eol\n', 'A/mu') # Wrong EOL
+
+ expected_output.tweak('A/mu', 'iota', status=' C')
+ expected_status.tweak('iota', 'A/mu', status=' M')
+
+ iota_props = new_props.copy()
+ iota_props['del_n'] = 'regeleinde\n'
+ mu_props = new_props.copy()
+ mu_props['del'] = 'waarde'
+ expected_disk.tweak('iota', props=iota_props)
+ expected_disk.tweak('A/mu', props=mu_props)
+
+ expected_disk.add({
+ 'A/mu.svnpatch.rej' : Item(contents="--- %s\n"
+ "+++ %s\n"
+ "Property: del\n"
+ "## -1,1 +0,0 ##\n"
+ "-value\n"
+ % (sbox.path('A/mu'),
+ sbox.path('A/mu'))),
+ 'iota.svnpatch.rej' : Item(contents="--- %s\n"
+ "+++ %s\n"
+ "Property: del_n\n"
+ "## -1,1 +0,0 ##\n"
+ "-no-eol\n"
+ "\ No newline at end of property\n"
+ % (sbox.path('iota'),
+ sbox.path('iota'))),
+ })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+def patch_empty_vs_delete(sbox):
+ "patch empty vs delete"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ strip_count = wc_dir.count(os.path.sep)+1
+
+ sbox.simple_append('iota', '', truncate=True)
+
+ _, empty_diff, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ _, empty_git, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir, '--git')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', sbox.ospath('iota'))
+
+ _, del_diff, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ _, del_git, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir, '--git')
+
+ empty_patch = sbox.get_tempname('empty.patch')
+ svntest.main.file_write(empty_patch, ''.join(empty_diff), mode='wb')
+
+ empty_git_patch = sbox.get_tempname('git.empty.patch')
+ svntest.main.file_write(empty_git_patch, ''.join(empty_git), mode='wb')
+
+ del_patch = sbox.get_tempname('del.patch')
+ svntest.main.file_write(del_patch, ''.join(del_diff), mode='wb')
+
+ del_git_patch = sbox.get_tempname('git.del.patch')
+ svntest.main.file_write(del_git_patch, ''.join(del_git), mode='wb')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('iota'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_skip = svntest.wc.State(wc_dir, {})
+
+
+ # Git diff to empty file - Expect empty file
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U ')
+ })
+ expected_disk.tweak('iota', contents='')
+ expected_status.tweak('iota', status='M ')
+ svntest.actions.run_and_verify_patch(wc_dir, empty_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, empty_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('iota'))
+
+ # Ordinary (unified) diff to empty file - Expect deleted
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='D ')
+ })
+ expected_disk.remove('iota')
+ expected_status.tweak('iota', status='D ')
+
+ svntest.actions.run_and_verify_patch(wc_dir, empty_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # Retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, empty_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('iota'))
+
+ # Ordinary diff to deleted
+ expected_output.tweak('iota', status='D ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # Retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('iota'))
+
+ # Git diff to deleted
+ expected_output.tweak('iota', status='D ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # # Not needed. Result of previous test
+ #svntest.actions.run_and_verify_svn(None, [],
+ # 'rm', '--force', sbox.ospath('iota'))
+
+ # Ok, and now let's check what happens on reverse diffs with nothing
+ # there
+
+ # Git empty patch -> skip... target not found
+ expect_no_output = svntest.wc.State(wc_dir, {})
+ expect_skip_iota = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Skipped')
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, empty_git_patch,
+ expect_no_output, expected_disk,
+ expected_status, expect_skip_iota,
+ [], True, True,
+ '--reverse-diff')
+
+ # # Not needed. Result of previous test
+ #svntest.actions.run_and_verify_svn(None, [],
+ # 'rm', '--force', sbox.ospath('iota'))
+
+ # Unified empty patch -> Create iota
+ expected_output.tweak('iota', status='A ')
+ expected_status.tweak('iota', status='R ')
+ expected_disk.add({
+ 'iota' : Item(contents="This is the file 'iota'.\n")
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, empty_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+ # And retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, empty_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', sbox.ospath('iota'))
+
+ expected_output.tweak('iota', status='A ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+ # And retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--force', sbox.ospath('iota'))
+
+ expected_output.tweak('iota', status='A ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+ # And retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, del_git_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+def patch_add_remove_executable(sbox):
+ "add and remove executable file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ eicar_data = 'X5O!P%@AP[4\PZX54(P^)7CC)7}$' \
+ 'EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*\0'
+ other_data = 'X5O!P%@AP[4\PZX54(P^)7CC)7}$' \
+ 'SOME-LESS-INTERESTING-OTHER-TEXT!!!$H+H*\0' \
+ '\0\0\0\0\0\0\0\0'
+
+ # Write out an actual MS-DOS program
+ sbox.simple_add_text(eicar_data, 'eicar.com')
+ sbox.simple_propset('svn:executable', 'x', 'eicar.com')
+
+ _, diff_add, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '--git', wc_dir)
+
+ sbox.simple_commit()
+
+ sbox.simple_append('eicar.com', other_data, truncate=True)
+ sbox.simple_propdel('svn:executable', 'eicar.com')
+
+ _, diff_edit, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '--git', wc_dir)
+
+ sbox.simple_commit()
+ sbox.simple_rm('eicar.com')
+
+ _, diff_rm, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', '--git', wc_dir)
+
+ add_patch = sbox.get_tempname('add.patch')
+ svntest.main.file_write(add_patch, ''.join(diff_add), mode='wb')
+
+ edit_patch = sbox.get_tempname('edit.patch')
+ svntest.main.file_write(edit_patch, ''.join(diff_edit), mode='wb')
+
+ rm_patch = sbox.get_tempname('rm.patch')
+ svntest.main.file_write(rm_patch, ''.join(diff_rm), mode='wb')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'eicar.com' : Item(status='RM', wc_rev=3)
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'eicar.com' : Item(status='A ')
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'eicar.com' : Item(contents=eicar_data,
+ props={'svn:mime-type': 'application/octet-stream',
+ 'svn:executable': '*'}),
+ })
+ expected_skip = svntest.wc.State(wc_dir, {})
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Now apply the edit
+ expected_output.tweak('eicar.com', status='UU')
+ expected_disk.tweak('eicar.com',
+ props={'svn:mime-type': 'application/octet-stream'},
+ contents=other_data)
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Now apply the edit
+ expected_output.tweak('eicar.com', status='D ')
+ expected_disk.remove('eicar.com')
+ expected_status.tweak('eicar.com', status='D ')
+ svntest.actions.run_and_verify_patch(wc_dir, rm_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, rm_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ #And reverse
+ expected_output.tweak('eicar.com', status='A ')
+ expected_disk.add({
+ 'eicar.com' : Item(contents=other_data,
+ props={'svn:mime-type': 'application/octet-stream'}),
+ })
+ expected_status.tweak('eicar.com', status='RM')
+ svntest.actions.run_and_verify_patch(wc_dir, rm_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, rm_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And reverse the edit
+ expected_output.tweak('eicar.com', status='UU')
+ expected_disk.tweak('eicar.com', contents=eicar_data,
+ props={'svn:mime-type': 'application/octet-stream',
+ 'svn:executable': '*'})
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+ # Repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And the add
+ expected_output.tweak('eicar.com', status='D ')
+ expected_disk.remove('eicar.com')
+ expected_status.tweak('eicar.com', status='D ')
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And a final repeat
+ expected_output.tweak('eicar.com', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+def patch_git_symlink(sbox):
+ "patch a git symlink"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_add = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'new file mode 120000\n',
+ 'index 0000000..3ef26e4\n',
+ '--- /dev/null\n',
+ '+++ b/link-to-iota\n',
+ '@@ -0,0 +1 @@\n',
+ '+iota\n',
+ '\ No newline at end of file\n',
+ ]
+
+ patch_edit = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'index 3ef26e4..33e5b38 120000\n',
+ '--- a/link-to-iota\n',
+ '+++ b/link-to-iota\n',
+ '@@ -1 +1 @@\n',
+ '-iota\n',
+ '\ No newline at end of file\n',
+ '+A/mu\n',
+ '\ No newline at end of file\n',
+ ]
+
+ patch_to_file = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'deleted file mode 120000\n',
+ 'index 33e5b38..0000000\n',
+ '--- a/link-to-iota\n',
+ '+++ /dev/null\n',
+ '@@ -1 +0,0 @@\n',
+ '-A/mu\n',
+ '\ No newline at end of file\n',
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'new file mode 100644\n',
+ 'index 0000000..1b130bf\n',
+ '--- /dev/null\n',
+ '+++ b/link-to-iota\n',
+ '@@ -0,0 +1 @@\n',
+ '+This is a real file\n',
+ ]
+
+ add_patch = sbox.get_tempname('add.patch')
+ svntest.main.file_write(add_patch, ''.join(patch_add), mode='wb')
+
+ edit_patch = sbox.get_tempname('edit.patch')
+ svntest.main.file_write(edit_patch, ''.join(patch_edit), mode='wb')
+
+ to_file_patch = sbox.get_tempname('to_file.patch')
+ svntest.main.file_write(to_file_patch, ''.join(patch_to_file), mode='wb')
+
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'link-to-iota' : Item(status='A ', wc_rev='-'),
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'link-to-iota' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'link-to-iota' : Item(contents="This is the file 'iota'.\n",
+ props={'svn:special': '*'}),
+ })
+ if not svntest.main.is_posix_os():
+ expected_disk.tweak('link-to-iota', contents='link iota')
+ expected_skip = svntest.wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again
+ expected_output.tweak('link-to-iota', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Now tweak the link
+ expected_output.tweak('link-to-iota', status='U ')
+ if svntest.main.is_posix_os():
+ expected_disk.tweak('link-to-iota', contents="This is the file 'mu'.\n")
+ else:
+ expected_disk.tweak('link-to-iota', contents='link A/mu')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again
+ expected_output.tweak('link-to-iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And replace the link with a file
+ expected_output.tweak('link-to-iota', status='A ', prev_status='D ')
+ expected_disk.tweak('link-to-iota', contents="This is a real file\n",
+ props={})
+ svntest.actions.run_and_verify_patch(wc_dir, to_file_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again - Delete can't be applied
+ expected_output.tweak('link-to-iota', status='G ', prev_status='C ')
+ expected_disk.add({
+ 'link-to-iota.svnpatch.rej': Item(
+ contents='--- link-to-iota\n'
+ '+++ /dev/null\n'
+ '@@ -1,1 +0,0 @@\n'
+ '-A/mu\n'
+ '\\ No newline at end of file\n'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, to_file_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+def patch_like_git_symlink(sbox):
+ "patch like a git symlink"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ patch_add = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'new file mode 100000\n',
+ 'index 0000000..3ef26e4\n',
+ '--- /dev/null\n',
+ '+++ b/link-to-iota\n',
+ '@@ -0,0 +1 @@\n',
+ '+iota\n',
+ '\ No newline at end of file\n',
+ ]
+
+ patch_edit = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'index 3ef26e4..33e5b38 100000\n',
+ '--- a/link-to-iota\n',
+ '+++ b/link-to-iota\n',
+ '@@ -1 +1 @@\n',
+ '-iota\n',
+ '\ No newline at end of file\n',
+ '+A/mu\n',
+ '\ No newline at end of file\n',
+ ]
+
+ patch_to_file = [
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'deleted file mode 100000\n',
+ 'index 33e5b38..0000000\n',
+ '--- a/link-to-iota\n',
+ '+++ /dev/null\n',
+ '@@ -1 +0,0 @@\n',
+ '-A/mu\n',
+ '\ No newline at end of file\n',
+ 'diff --git a/link-to-iota b/link-to-iota\n',
+ 'new file mode 100644\n',
+ 'index 0000000..1b130bf\n',
+ '--- /dev/null\n',
+ '+++ b/link-to-iota\n',
+ '@@ -0,0 +1 @@\n',
+ '+This is a real file\n',
+ ]
+
+ add_patch = sbox.get_tempname('add.patch')
+ svntest.main.file_write(add_patch, ''.join(patch_add), mode='wb')
+
+ edit_patch = sbox.get_tempname('edit.patch')
+ svntest.main.file_write(edit_patch, ''.join(patch_edit), mode='wb')
+
+ to_file_patch = sbox.get_tempname('to_file.patch')
+ svntest.main.file_write(to_file_patch, ''.join(patch_to_file), mode='wb')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'link-to-iota' : Item(status='A ', wc_rev='-'),
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'link-to-iota' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'link-to-iota' : Item(contents="iota"),
+ })
+ expected_skip = svntest.wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again
+ expected_output.tweak('link-to-iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, add_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Now tweak the link
+ expected_output.tweak('link-to-iota', status='U ')
+ expected_disk.tweak('link-to-iota', contents='A/mu')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again
+ expected_output.tweak('link-to-iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, edit_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And replace the link with a file
+ expected_output.tweak('link-to-iota', status='U ')
+ expected_output.tweak('link-to-iota', status='A ', prev_status='D ')
+ expected_disk.tweak('link-to-iota', contents="This is a real file\n")
+ svntest.actions.run_and_verify_patch(wc_dir, to_file_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And again - Delete can't be applied
+ expected_output.tweak('link-to-iota', status='G ', prev_status='C ')
+ expected_disk.add({
+ 'link-to-iota.svnpatch.rej': Item(
+ contents='--- link-to-iota\n'
+ '+++ /dev/null\n'
+ '@@ -1,1 +0,0 @@\n'
+ '-A/mu\n'
+ '\\ No newline at end of file\n'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, to_file_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+def patch_symlink_changes(sbox):
+ "patch symlink changes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ strip_count = wc_dir.count(os.path.sep)+1
+
+ os.remove(sbox.ospath('iota'))
+ sbox.simple_symlink('A/B/E/beta', 'iota')
+ sbox.simple_propset('svn:special', 'X', 'iota')
+
+ _, diff_tolink, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ _, git_tolink, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir, '--git')
+
+ sbox.simple_commit()
+
+ os.remove(sbox.ospath('iota'))
+ sbox.simple_symlink('A/B/E/alpha', 'iota')
+
+ _, diff_changelink, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ _, git_changelink, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir, '--git')
+
+ tolink_patch = sbox.get_tempname('tolink.patch')
+ svntest.main.file_write(tolink_patch, ''.join(diff_tolink), mode='wb')
+
+ git_tolink_patch = sbox.get_tempname('git_tolink.patch')
+ svntest.main.file_write(git_tolink_patch, ''.join(git_tolink), mode='wb')
+
+ changelink_patch = sbox.get_tempname('changelink.patch')
+ svntest.main.file_write(changelink_patch, ''.join(diff_changelink), mode='wb')
+
+ git_changelink_patch = sbox.get_tempname('git_changelink.patch')
+ svntest.main.file_write(git_changelink_patch, ''.join(git_changelink), mode='wb')
+
+ sbox.simple_revert('iota')
+ sbox.simple_update('', 1)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='MM')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='UU'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props={'svn:special': '*'})
+ expected_skip = svntest.wc.State(wc_dir, {})
+
+ if svntest.main.is_posix_os():
+ expected_disk.tweak('iota', contents="This is the file 'beta'.\n")
+ else:
+ expected_disk.tweak('iota', contents="link A/B/E/beta")
+
+ # Turn into link
+ svntest.actions.run_and_verify_patch(wc_dir, tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # And in git style
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_patch(wc_dir, git_tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak('iota', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+ svntest.actions.run_and_verify_patch(wc_dir, git_tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ sbox.simple_update('', 2) # Go to r2.
+ sbox.simple_revert('iota')
+ expected_status.tweak(wc_rev=2)
+
+ # Turn back into files
+ expected_output.tweak('iota', status='UU')
+ expected_disk.tweak('iota', props={}, contents="This is the file 'iota'.\n")
+ svntest.actions.run_and_verify_patch(wc_dir, tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+
+ # And in git style
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_patch(wc_dir, git_tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Retry
+ expected_output.tweak('iota', status='GG')
+ svntest.actions.run_and_verify_patch(wc_dir, tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count,
+ '--reverse-diff')
+ svntest.actions.run_and_verify_patch(wc_dir, git_tolink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And now just tweak the link
+ expected_output.tweak('iota', status='U ')
+ expected_disk.tweak('iota', props={'svn:special': '*'})
+ expected_status.tweak('iota', status='M ')
+
+ if svntest.main.is_posix_os():
+ expected_disk.tweak('iota', contents="This is the file 'alpha'.\n")
+ else:
+ expected_disk.tweak('iota', contents="link A/B/E/alpha")
+
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_patch(wc_dir, changelink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+ # And in git style
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_patch(wc_dir, git_changelink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak('iota', status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, changelink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+ svntest.actions.run_and_verify_patch(wc_dir, git_changelink_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+def patch_add_one_line(sbox):
+ "patch add just one line"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ diff = [
+ # This is a normal unified diff
+ "Index: A/B/E/alpha",
+ "===================================================================",
+ "--- A/B/E/alpha\t(revision 1)",
+ "+++ A/B/E/alpha\t(working copy)",
+ "@@ -1 +1,2 @@",
+ " This is the file 'alpha'.",
+ "+This is the file 'alpha'.",
+
+ "",
+
+ # This diff is hand crafted, as a generated diff would add the line at
+ # the end
+ "Index: A/B/E/beta",
+ "===================================================================",
+ "--- A/B/E/beta\t(revision 1)",
+ "+++ A/B/E/beta\t(working copy)",
+ "@@ -1 +1,2 @@",
+ "+This is the file 'beta'.",
+ " This is the file 'beta'.",
+ ""
+ ]
+
+ recurse_patch = sbox.get_tempname('recurse.patch')
+ svntest.main.file_write(recurse_patch, '\n'.join(diff), mode='wb')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='M ')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='U '),
+ 'A/B/E/beta' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha', contents="This is the file 'alpha'.\nThis is the file 'alpha'.\n")
+ expected_disk.tweak('A/B/E/beta', contents="This is the file 'beta'.\nThis is the file 'beta'.\n")
+ expected_skip = svntest.wc.State(wc_dir, {})
+
+ svntest.actions.run_and_verify_patch(wc_dir, recurse_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output.tweak(status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, recurse_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ sbox.simple_append('A/B/E/alpha',
+ "This is the file 'alpha'.\n")
+ sbox.simple_append('A/B/E/beta',
+ "This is the file 'beta'.\n")
+
+ # But can we remove the line? - Yes
+ expected_output.tweak(status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, recurse_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Once more?
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, recurse_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # And the last lines? - No...
+ expected_output.tweak(status='G ')
+ svntest.actions.run_and_verify_patch(wc_dir, recurse_patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+def patch_with_mergeinfo(sbox):
+ "patch with mergeinfo"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ strip_count = wc_dir.count(os.path.sep)+1
+
+ sbox.simple_copy('A/B/E', 'E')
+ sbox.simple_append('A/B/E/alpha', 'extra\nlines\n')
+ sbox.simple_commit()
+
+ sbox.simple_propset('a', 'A', 'E') # 'a' < 'svn:mergeinfo'
+ sbox.simple_propset('z', 'Z', 'E') # 'z' > 'svn:mergeinfo'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '^/A/B/E', sbox.ospath('E'))
+
+ _, diff, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir)
+
+ sbox.simple_revert('E', 'E/alpha')
+
+ patch = sbox.get_tempname('recurse.patch')
+ svntest.main.file_write(patch, ''.join(diff), mode='wb')
+
+ expected_output = wc.State(wc_dir, {
+ 'E' : Item(status=' U'),
+ 'E/alpha' : Item(status='U '),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'E' : Item(status=' M', wc_rev='2'),
+ 'E/alpha' : Item(status='M ', wc_rev='2'),
+ 'E/beta' : Item(status=' ', wc_rev='2'),
+ })
+ expected_status.tweak('A/B/E/alpha', wc_rev=2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha', contents="This is the file 'alpha'.\nextra\nlines\n")
+ expected_disk.add({
+ 'E' : Item(props={'a': 'A',
+ # We can't apply 'svn:mergeinfo' (yet)
+ 'z': 'Z'}),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\nextra\nlines\n"),
+ })
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--strip', strip_count)
+
+def patch_move_and_change(sbox):
+ "patch move and change"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/B/E/alpha', 'extra\nlines\n')
+ sbox.simple_propset('k', 'v', 'A/B/E/alpha')
+
+ sbox.simple_move('A/B/E/alpha', 'alpha')
+
+ _, diff, _ = svntest.actions.run_and_verify_svn(None, [],
+ 'diff', wc_dir, '--git')
+
+ patch = sbox.get_tempname('move_and_change.patch')
+ svntest.main.file_write(patch, ''.join(diff), mode='wb')
+
+ # Running the diff reversed doesn't work...
+ # We perform the add before reverting the move...
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='A '),
+ })
+ expected_skip = wc.State(wc_dir, {
+ 'alpha' : Item(verb='Skipped'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', status='R ',
+ moved_to='alpha')
+ expected_status.add({
+ 'alpha' : Item(status='A ', copied='+',
+ moved_from='A/B/E/alpha', wc_rev='-'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'alpha' : Item(contents="This is the file 'alpha'.\nextra\nlines\n",
+ props={'k': 'v'}),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+ # Ok, let's remove the 'delete' portion and try in a clean WC
+ n = diff.index('Index: %s\n' % sbox.path('alpha'))
+ diff = diff[n:]
+ svntest.main.file_write(patch, ''.join(diff), mode='wb')
+
+ sbox.simple_revert('A/B/E/alpha', 'alpha')
+
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status='D '),
+ 'alpha' : Item(status='A '),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_disk.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E/alpha', status='D ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # Retry
+ expected_output = svntest.wc.State(wc_dir, {
+ 'alpha' : Item(status='GG'),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True)
+
+ # And now reverse
+ expected_output = wc.State(wc_dir, {
+ 'alpha' : Item(status='D '),
+ 'A/B/E/alpha' : Item(status='A '),
+ })
+ expected_disk.remove('alpha')
+ expected_disk.add({
+ 'A/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ })
+ expected_status.remove('alpha')
+ expected_status.tweak('A/B/E/alpha', status=' ', moved_to=None)
+ svntest.actions.run_and_verify_patch(wc_dir, patch,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], True, True,
+ '--reverse-diff')
+
+@Issue(4609)
+def missing_trailing_context(sbox):
+ "missing trailing context"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/mu',
+ 'a\n'
+ 'b\n'
+ 'c\n'
+ 'd\n'
+ 'e\n',
+ truncate=True)
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # The hunk is expected to have two lines of trailing context but
+ # only has one.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -1,5 +1,5 @@\n",
+ " a\n",
+ " b\n",
+ "-c\n",
+ "+cc\n",
+ " d\n",
+ ]
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch), 'wb')
+
+ # GNU patch will apply the hunk with fuzz 1 and modify only the 'c' line.
+ # Our patch file finds the length mismatch and applies a penalty.
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -1,4 +1,4 @@ with fuzz 1\n',
+ ]
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', contents =
+ 'a\n'
+ 'b\n'
+ 'cc\n'
+ 'd\n'
+ 'e\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status='M ')
+ expected_skip = wc.State('', { })
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Try reverse patch
+ expected_disk.tweak('A/mu', contents =
+ 'a\n'
+ 'b\n'
+ 'c\n'
+ 'd\n'
+ 'e\n')
+ expected_status.tweak('A/mu', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+ # The hunk is expected to have two lines of trailing context but
+ # only has one.
+ unidiff_patch = [
+ "Index: A/mu\n"
+ "===================================================================\n",
+ "--- A/mu\t(revision 2)\n",
+ "+++ A/mu\t(working copy)\n",
+ "@@ -1,4 +1,4 @@\n",
+ " a\n",
+ " b\n",
+ "-c\n",
+ "+cc\n",
+ " d\n",
+ " e\n",
+ ]
+ patch_file_path = sbox.get_tempname('my2.patch')
+ svntest.main.file_write(patch_file_path, ''.join(unidiff_patch), 'wb')
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('A/mu'),
+ '> applied hunk @@ -1,5 +1,5 @@ with fuzz 1\n',
+ ]
+ expected_disk.tweak('A/mu', contents =
+ 'a\n'
+ 'b\n'
+ 'cc\n'
+ 'd\n'
+ 'e\n')
+ expected_status.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+ # Try reverse patch
+ expected_disk.tweak('A/mu', contents =
+ 'a\n'
+ 'b\n'
+ 'c\n'
+ 'd\n'
+ 'e\n')
+ expected_status.tweak('A/mu', status=' ')
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip,
+ [], False, True, '--reverse-diff')
+
+def patch_missed_trail(sbox):
+ "apply a patch to an empty file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ patch_file_path = sbox.get_tempname('my.patch')
+ svntest.main.file_write(patch_file_path, ''.join([
+ # Add a line to a file with just '\n' with bad header (should be +1,2)
+ "Index: lf.txt\n",
+ "===================================================================\n",
+ "--- lf.txt\t(revision 2)\n",
+ "+++ lf.txt\t(working copy)\n",
+ "@@ -1 +1 @@\n",
+ "\n"
+ "+replacement\n",
+ ]))
+
+ sbox.simple_add_text('\n', 'lf.txt')
+ sbox.simple_commit()
+
+ expected_output = [
+ 'U %s\n' % sbox.ospath('lf.txt'),
+ '> applied hunk @@ -1,1 +1,2 @@ with fuzz 1\n',
+ ]
+
+ # Current result: lf.txt patched ok, new created, empty succeeds with offset.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'lf.txt' : Item(contents="\nreplacement\n"),
+ })
+ expected_skip = wc.State(wc_dir, {})
+ expected_status = None
+
+ svntest.actions.run_and_verify_patch(wc_dir, patch_file_path,
+ expected_output, expected_disk,
+ expected_status, expected_skip)
+
+@XFail()
+def patch_merge(sbox):
+ "patching a specific merge"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ sbox.simple_add_text('A\n'
+ 'B\n'
+ 'C\n'
+ 'J\n'
+ 'K\n'
+ 'L', 'new.txt')
+ sbox.simple_commit()
+
+ remote_patch = sbox.get_tempname('remote.patch')
+ svntest.main.file_write(remote_patch,
+ '--- new.txt\t(revision 6)\n'
+ '+++ new.txt\t(revision 7)\n'
+ '@@ -1,6 +1,9 @@\n'
+ ' A\n'
+ ' B\n'
+ '-C\n'
+ '+ C\n'
+ '+D\n'
+ '+E\n'
+ '+F\n'
+ ' J\n'
+ ' K\n'
+ ' L\n'
+ '\ No newline at end of file', mode='wb')
+
+ expected_skip = wc.State('', { })
+ expected_output = wc.State(wc_dir, {
+ 'new.txt' : Item(status='U '),
+ })
+ svntest.actions.run_and_verify_patch(wc_dir, remote_patch,
+ expected_output, None,
+ None, expected_skip)
+ sbox.simple_commit()
+ sbox.simple_update(revision=2)
+
+ local_patch = sbox.get_tempname('local.patch')
+ svntest.main.file_write(local_patch,
+ '--- new.txt\t(revision 3)\n'
+ '+++ new.txt\t(revision 4)\n'
+ '@@ -1,6 +1,9 @@\n'
+ ' A\n'
+ ' B\n'
+ ' C\n'
+ '+D\n'
+ '+E\n'
+ '+F\n'
+ ' J\n'
+ ' K\n'
+ ' L\n'
+ '\ No newline at end of file', mode='wb')
+
+ svntest.actions.run_and_verify_patch(wc_dir, local_patch,
+ expected_output, None,
+ None, expected_skip)
+
+ # Currently we see D E F doubled, that is certainly bad behavior.
+ # I could imaging that just applying the 'C' line change would be ok,
+ # but most likely a text conflict is the proper thing to do here.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'new.txt' : Item(contents='A\n'
+ 'B\n'
+ ' C\n'
+ 'D\n'
+ 'E\n'
+ 'F\n'
+ #'D\n' # Doubled???
+ #'E\n' # Doubled???
+ #'F\n' # Doubled???
+ 'J\n'
+ 'K\n'
+ 'L')})
+ expected_output.tweak('new.txt', status='G ')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ None, [])
+
+ # Revert to base position
+ sbox.simple_revert('new.txt')
+ sbox.simple_update(revision=2)
+
+ # And now do the same thing as a merge instead of an update
+ expected_output.tweak('new.txt', status='U ')
+ svntest.actions.run_and_verify_patch(wc_dir, local_patch,
+ expected_output, None,
+ None, expected_skip)
+
+ expected_output.tweak('new.txt', status='G ')
+ svntest.actions.run_and_verify_merge(wc_dir, 2, 3, repo_url, repo_url,
+ expected_output, None, None,
+ expected_disk, None,
+ expected_skip)
+
+########################################################################
+#Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ patch,
+ patch_absolute_paths,
+ patch_offset,
+ patch_chopped_leading_spaces,
+ patch_strip1,
+ patch_no_index_line,
+ patch_add_new_dir,
+ patch_remove_empty_dirs,
+ patch_reject,
+ patch_keywords,
+ patch_with_fuzz,
+ patch_reverse,
+ patch_no_svn_eol_style,
+ patch_with_svn_eol_style,
+ patch_with_svn_eol_style_uncommitted,
+ patch_with_ignore_whitespace,
+ patch_replace_locally_deleted_file,
+ patch_no_eol_at_eof,
+ patch_with_properties,
+ patch_same_twice,
+ patch_dir_properties,
+ patch_add_path_with_props,
+ patch_prop_offset,
+ patch_prop_with_fuzz,
+ patch_git_empty_files,
+ patch_old_target_names,
+ patch_reverse_revert,
+ patch_strip_cwd,
+ patch_set_prop_no_eol,
+ patch_add_symlink,
+ patch_moved_away,
+ patch_lacking_trailing_eol,
+ patch_deletes_prop,
+ patch_reversed_add_with_props,
+ patch_reversed_add_with_props2,
+ patch_dev_null,
+ patch_delete_and_skip,
+ patch_target_no_eol_at_eof,
+ patch_add_and_delete,
+ patch_git_with_index_line,
+ patch_change_symlink_target,
+ patch_replace_dir_with_file_and_vv,
+ single_line_mismatch,
+ patch_empty_file,
+ patch_apply_no_fuz,
+ patch_lacking_trailing_eol_on_context,
+ patch_with_custom_keywords,
+ patch_git_rename,
+ patch_hunk_avoid_reorder,
+ patch_hunk_avoid_reorder2,
+ patch_hunk_reorder,
+ patch_hunk_overlap,
+ patch_delete_modified,
+ patch_closest,
+ patch_symlink_traversal,
+ patch_obstructing_symlink_traversal,
+ patch_binary_file,
+ patch_delete_nodes,
+ patch_delete_missing_eol,
+ patch_final_eol,
+ patch_adds_executability_nocontents,
+ patch_adds_executability_nocontents2,
+ patch_adds_executability_yescontents,
+ patch_deletes_executability,
+ patch_ambiguous_executability_contradiction,
+ patch_ambiguous_executability_consistent,
+ patch_prop_madness,
+ patch_empty_vs_delete,
+ patch_add_remove_executable,
+ patch_git_symlink,
+ patch_like_git_symlink,
+ patch_symlink_changes,
+ patch_add_one_line,
+ patch_with_mergeinfo,
+ patch_move_and_change,
+ missing_trailing_context,
+ patch_missed_trail,
+ patch_merge,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/prop_tests.py b/subversion/tests/cmdline/prop_tests.py
new file mode 100755
index 0000000..3709b20
--- /dev/null
+++ b/subversion/tests/cmdline/prop_tests.py
@@ -0,0 +1,2890 @@
+#!/usr/bin/env python
+#
+# prop_tests.py: testing versioned properties
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, stat, subprocess, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+from svntest.main import SVN_PROP_MERGEINFO
+from svntest.main import SVN_PROP_INHERITABLE_IGNORES
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+def is_non_posix_and_non_windows_os():
+ """lambda function to skip revprop_change test"""
+ return (not svntest.main.is_posix_os()) and sys.platform != 'win32'
+
+# this is global so other test files can use it
+binary_mime_type_on_text_file_warning = \
+ "svn: warning:.*is a binary mime-type but file.*looks like text.*"
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+
+def make_local_props(sbox):
+ "write/read props in wc only (ps, pl, pdel, pe)"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add properties to one file and one directory
+ sbox.simple_propset('blue', 'azul', 'A/mu')
+ sbox.simple_propset('green', 'verde', 'A/mu')
+ sbox.simple_propset('editme', 'the foo fighters', 'A/mu')
+ sbox.simple_propset('red', 'rojo', 'A/D/G')
+ sbox.simple_propset('yellow', 'amarillo', 'A/D/G')
+
+ # Make sure they show up as local mods in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' M')
+ expected_status.tweak('A/D/G', status=' M')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Remove one property
+ sbox.simple_propdel('yellow', 'A/D/G')
+
+ svntest.main.use_editor('foo_to_bar')
+ # Edit one property
+ svntest.main.run_svn(None, 'propedit', 'editme',
+ os.path.join(wc_dir, 'A', 'mu'))
+
+ # What we expect the disk tree to look like:
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', props={'blue' : 'azul', 'green' : 'verde',
+ 'editme' : 'the bar fighters'})
+ expected_disk.tweak('A/D/G', props={'red' : 'rojo'})
+
+ # Read the real disk tree. Notice we are passing the (normally
+ # disabled) "load props" flag to this routine. This will run 'svn
+ # proplist' on every item in the working copy!
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Edit without actually changing the property
+ svntest.main.use_editor('identity')
+ svntest.actions.run_and_verify_svn("No changes to property 'editme' on '.*'",
+ [],
+ 'propedit', 'editme',
+ os.path.join(wc_dir, 'A', 'mu'))
+
+
+
+#----------------------------------------------------------------------
+
+def commit_props(sbox):
+ "commit properties"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file and a directory
+ sbox.simple_propset('blue', 'azul', 'A/mu')
+ sbox.simple_propset('red', 'rojo', 'A/D/H')
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/H' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/H', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+
+
+#----------------------------------------------------------------------
+
+@Issue(3951)
+def update_props(sbox):
+ "receive properties via update"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Add a property to a file and a directory
+ sbox.simple_propset('blue', 'azul', 'A/mu')
+ sbox.simple_propset('red', 'rojo', 'A/D/H')
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/H' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/H', wc_rev=2, status=' ')
+
+ # Commit property mods
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Add more properties
+ sbox.simple_propset('blue2', 'azul2', 'A/mu')
+ sbox.simple_propset('red2', 'rojo2', 'A/D/H')
+ expected_status.tweak('A/mu', 'A/D/H', wc_rev=3, status=' ')
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status=' U'),
+ 'A/D/H' : Item(status=' U'),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', props={'blue' : 'azul'})
+ expected_disk.tweak('A/D/H', props={'red' : 'rojo'})
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('A/mu', 'A/D/H', status=' ')
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ # This adds properties to nodes that have none
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], 1,
+ '-r', '2', wc_backup)
+
+ # This adds properties to nodes that have properties
+ expected_status.tweak(wc_rev=3)
+ expected_disk.tweak('A/mu', props={'blue' : 'azul',
+ 'blue2' : 'azul2'})
+ expected_disk.tweak('A/D/H', props={'red' : 'rojo',
+ 'red2' : 'rojo2'})
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], 1,
+ '-r', '3', wc_backup)
+
+
+#----------------------------------------------------------------------
+
+def downdate_props(sbox):
+ "receive property changes as part of a downdate"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+
+ # Add a property to a file
+ sbox.simple_propset('cash-sound', 'cha-ching!', 'iota')
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make some mod (something to commit)
+ svntest.main.file_append(mu_path, "some mod")
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, status=' ')
+ expected_status.tweak('A/mu', wc_rev=3, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' U'),
+ 'A/mu' : Item(status='U '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], 1,
+ '-r', '1', wc_dir)
+
+#----------------------------------------------------------------------
+
+def remove_props(sbox):
+ "commit the removal of props"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file
+ sbox.simple_propset('cash-sound', 'cha-ching!', 'iota')
+
+ # Commit the file
+ sbox.simple_commit('iota')
+
+ # Now, remove the property
+ sbox.simple_propdel('cash-sound', 'iota')
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=3, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def update_conflict_props(sbox):
+ "update with conflicting props"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file and a directory
+ mu_path = sbox.ospath('A/mu')
+ sbox.simple_propset('cash-sound', 'cha-ching!', 'A/mu')
+ A_path = sbox.ospath('A')
+ sbox.simple_propset('foo', 'bar', 'A')
+
+ # Commit the file and directory
+ sbox.simple_commit()
+
+ # Update to rev 1
+ svntest.main.run_svn(None, 'up', '-r', '1', wc_dir)
+
+ # Add conflicting properties
+ sbox.simple_propset('cash-sound', 'beep!', 'A/mu')
+ sbox.simple_propset('foo', 'baz', 'A')
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status=' C'),
+ 'A' : Item(status=' C'),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu', props={'cash-sound' : 'beep!'})
+ expected_disk.tweak('A', props={'foo' : 'baz'})
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', 'A', status=' C')
+
+ extra_files = ['mu.*\.prej', 'dir_conflicts.*\.prej']
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True,
+ extra_files=extra_files)
+
+ # Resolve the conflicts
+ svntest.actions.run_and_verify_resolved([mu_path, A_path])
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', 'A', status=' M')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+@Issue(2608)
+def commit_conflict_dirprops(sbox):
+ "commit with conflicting dirprops"
+
+ # Issue #2608: failure to see conflicting dirprops on root of
+ # repository.
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('foo', 'bar', '')
+
+ # Commit the file and directory
+ sbox.simple_commit()
+
+ # Update to rev 1
+ svntest.main.run_svn(None,
+ 'up', '-r', '1', wc_dir)
+
+ # Add conflicting properties
+ sbox.simple_propset('foo', 'eek', '')
+
+ svntest.actions.run_and_verify_commit(wc_dir, None, None,
+ ".*[oO]ut[- ]of[- ]date.*")
+
+#----------------------------------------------------------------------
+
+# Issue #742: we used to screw up when committing a file replacement
+# that also had properties. It was fixed by teaching
+# svn_wc_props_modified_p and svn_wc_transmit_prop_deltas to *ignore*
+# leftover base-props when a file is scheduled for replacement. (When
+# we svn_wc_add a file, it starts life with no working props.)
+@Issue(742)
+def commit_replacement_props(sbox):
+ "props work when committing a replacement"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to two files
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ sbox.simple_propset('cash-sound', 'cha-ching!', 'iota')
+ sbox.simple_propset('boson', 'W', 'A/B/lambda')
+
+ # Commit (### someday use run_and_verify_commit for better coverage)
+ sbox.simple_commit()
+
+ # Schedule both files for deletion
+ sbox.simple_rm('iota', 'A/B/lambda')
+
+ # Now recreate the files, and schedule them for addition.
+ # Poof, the 'new' files don't have any properties at birth.
+ svntest.main.file_append(iota_path, 'iota TNG')
+ svntest.main.file_append(lambda_path, 'lambda TNG')
+ sbox.simple_add('iota', 'A/B/lambda')
+
+ # Sanity check: the two files should be scheduled for (R)eplacement.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, status='R ')
+ expected_status.tweak('A/B/lambda', wc_rev=2, status='R ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now add a property to lambda. Iota still doesn't have any.
+ sbox.simple_propset('capacitor', 'flux', 'A/B/lambda')
+
+ # Commit, with careful output checking. We're actually going to
+ # scan the working copy for props after the commit.
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Replacing'),
+ 'A/B/lambda' : Item(verb='Replacing'),
+ })
+
+ # Expected status tree: lambda has one prop, iota doesn't.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=3)
+ expected_status.tweak('A/B/lambda', wc_rev=3, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def revert_replacement_props(sbox):
+ "props work when reverting a replacement"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to two files
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ sbox.simple_propset('cash-sound', 'cha-ching!', 'iota')
+ sbox.simple_propset('boson', 'W', 'A/B/lambda')
+
+ # Commit rev 2. (### someday use run_and_verify_commit for better coverage)
+ sbox.simple_commit()
+
+ # Schedule both files for deletion
+ sbox.simple_rm('iota', 'A/B/lambda')
+
+ # Now recreate the files, and schedule them for addition.
+ # Poof, the 'new' files don't have any properties at birth.
+ svntest.main.file_append(iota_path, 'iota TNG')
+ svntest.main.file_append(lambda_path, 'lambda TNG')
+ sbox.simple_add('iota', 'A/B/lambda')
+
+ # Sanity check: the two files should be scheduled for (R)eplacement.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2, status='R ')
+ expected_status.tweak('A/B/lambda', wc_rev=2, status='R ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now add a property to lambda. Iota still doesn't have any.
+ sbox.simple_propset('capacitor', 'flux', 'A/B/lambda')
+
+ # Now revert both files.
+ sbox.simple_revert('iota', 'A/B/lambda')
+
+ # Do an update; even though the update is really a no-op,
+ # run_and_verify_update has the nice feature of scanning disk as
+ # well as running status. We want to verify that we truly have a
+ # *pristine* revision 2 tree, with the original rev 2 props, and no
+ # local mods at all.
+
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('iota', status=' ')
+ expected_status.tweak('A/B/lambda', status=' ')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props={'cash-sound' : 'cha-ching!'})
+ expected_disk.tweak('A/B/lambda', props={'boson' : 'W'})
+
+ # scan disk for props too.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@Issues(920,2065)
+def inappropriate_props(sbox):
+ "try to set inappropriate props"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ E_path = sbox.ospath('A/B/E')
+ iota_path = sbox.ospath('iota')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # These should produce an error
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'propset', 'svn:executable', 'on', A_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:keywords', 'LastChangedDate',
+ A_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:eol-style', 'native', A_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:eol-style', 'invalid value',
+ os.path.join(A_path, 'mu'))
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:mime-type', 'image/png', A_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:ignore', '*.o', iota_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:externals',
+ 'foo http://host.com/repos', iota_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:author', 'socrates', iota_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:log', 'log message', iota_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:date', 'Tue Jan 19 04:14:07 2038',
+ iota_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput, 'propset',
+ 'svn:original-date',
+ 'Thu Jan 1 01:00:00 1970', iota_path)
+
+ # Status unchanged
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Recursive setting of inappropriate dir prop should work on files
+ svntest.actions.run_and_verify_svn(None, [], 'propset', '-R',
+ 'svn:executable', 'on', E_path)
+
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status=' M')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Issue #920. Don't allow setting of svn:eol-style on binary files or files
+ # with inconsistent eol types.
+
+ path = sbox.ospath('binary')
+ svntest.main.file_append(path, "binary")
+ sbox.simple_add('binary')
+
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type', 'application/octet-stream',
+ sbox.ospath('binary'))
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput,
+ 'propset', 'svn:eol-style',
+ 'CRLF', path)
+
+ path = sbox.ospath('multi-eol')
+ svntest.main.file_append(path, "line1\rline2\n")
+ sbox.simple_add('multi-eol')
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput,
+ 'propset', 'svn:eol-style',
+ 'LF', path)
+
+ path = sbox.ospath('backwards-eol')
+ svntest.main.file_append(path, "line1\n\r")
+ sbox.simple_add('backwards-eol')
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput,
+ 'propset', 'svn:eol-style',
+ 'native', path)
+
+ path = sbox.ospath('incomplete-eol')
+ svntest.main.file_append(path, "line1\r\n\r")
+ sbox.simple_add('incomplete-eol')
+
+ svntest.actions.run_and_verify_svn(None,
+ svntest.verify.AnyOutput,
+ 'propset', 'svn:eol-style',
+ 'CR', path)
+
+ # Issue #2065. Do allow setting of svn:eol-style on binary files or files
+ # with inconsistent eol types if --force is passed.
+
+ path = sbox.ospath('binary')
+ svntest.main.file_append(path, "binary")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--force',
+ 'svn:eol-style', 'CRLF',
+ path)
+
+ path = sbox.ospath('multi-eol')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--force',
+ 'svn:eol-style', 'LF',
+ path)
+
+ path = sbox.ospath('backwards-eol')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--force',
+ 'svn:eol-style', 'native',
+ path)
+
+ path = sbox.ospath('incomplete-eol')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--force',
+ 'svn:eol-style', 'CR',
+ path)
+
+ # Prevent setting of svn:mergeinfo prop values that are...
+ path = sbox.ospath('A/D')
+
+ # ...grammatically incorrect
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Pathname not terminated by ':'\n",
+ 'propset', SVN_PROP_MERGEINFO, '/trunk',
+ path)
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200022: Invalid revision number found "
+ "parsing 'one'\n",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/trunk:one', path)
+
+ # ...contain overlapping revision ranges of differing inheritability.
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Unable to parse overlapping "
+ "revision ranges '9-20\\*' and "
+ "'18-22' with different "
+ "inheritance types\n",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/branch:5-7,9-20*,18-22', path)
+
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Unable to parse overlapping "
+ "revision ranges "
+ "(('3' and '3\\*')|('3\\*' and '3')) "
+ "with different "
+ "inheritance types\n",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/branch:3,3*', path)
+
+ # ...contain revision ranges with start revisions greater than or
+ # equal to end revisions.
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Unable to parse reversed "
+ "revision range '20-5'\n",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/featureX:4,20-5', path)
+
+ # ...contain paths mapped to empty revision ranges
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Mergeinfo for '/trunk' maps to "
+ "an empty revision range\n",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/trunk:', path)
+
+ # ...contain non-inheritable ranges when the target is a file.
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200020: Cannot set non-inheritable "
+ "mergeinfo on a non-directory*",
+ 'propset', SVN_PROP_MERGEINFO,
+ '/A/D/H/psi:1*', iota_path)
+
+#----------------------------------------------------------------------
+
+# Issue #976. When copying a file, do not determine svn:executable
+# and svn:mime-type values as though the file is brand new, instead
+# use the copied file's property values.
+@Issue(976)
+def copy_inherits_special_props(sbox):
+ "file copies inherit (not re-derive) special props"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ orig_mime_type = 'image/fake_image'
+
+ # Create two paths
+ new_path1 = sbox.ospath('new_file1.bin')
+ new_path2 = sbox.ospath('new_file2.bin')
+
+ # Create the first path as a binary file. To have svn treat the
+ # file as binary, have a 0x00 in the file.
+ svntest.main.file_append(new_path1, "binary file\000")
+ sbox.simple_add('new_file1.bin')
+
+ # Add initial svn:mime-type to the file
+ sbox.simple_propset('svn:mime-type', orig_mime_type, 'new_file1.bin')
+
+ # Set the svn:executable property on the file if this is a system
+ # that can handle chmod, in which case svn will turn on the
+ # executable bits on the file. Then remove the executable bits
+ # manually on the file and see the value of svn:executable in the
+ # copied file.
+ if os.name == 'posix':
+ sbox.simple_propset('svn:executable', 'on', 'new_file1.bin')
+ os.chmod(new_path1, svntest.main.S_ALL_READ | stat.S_IWUSR)
+
+ # Commit the file
+ sbox.simple_commit()
+
+ # Copy the file
+ svntest.main.run_svn(None, 'cp', new_path1, new_path2)
+
+ # Check the svn:mime-type
+ actual_exit, actual_stdout, actual_stderr = svntest.main.run_svn(
+ None, 'pg', 'svn:mime-type', new_path2)
+
+ expected_stdout = [orig_mime_type + '\n']
+ if actual_stdout != expected_stdout:
+ logger.warn("svn pg svn:mime-type output does not match expected.")
+ logger.warn("Expected standard output: %s\n", expected_stdout)
+ logger.warn("Actual standard output: %s\n", actual_stdout)
+ raise svntest.verify.SVNUnexpectedOutput
+
+ # Check the svn:executable value.
+ # The value of the svn:executable property is now always forced to '*'
+ if os.name == 'posix':
+ actual_exit, actual_stdout, actual_stderr = svntest.main.run_svn(
+ None, 'pg', 'svn:executable', new_path2)
+
+ expected_stdout = ['*\n']
+ if actual_stdout != expected_stdout:
+ logger.warn("svn pg svn:executable output does not match expected.")
+ logger.warn("Expected standard output: %s\n", expected_stdout)
+ logger.warn("Actual standard output: %s\n", actual_stdout)
+ raise svntest.verify.SVNUnexpectedOutput
+
+#----------------------------------------------------------------------
+# Test for issue #3086 'mod-dav-svn ignores pre-revprop-change failure
+# on revprop delete'
+#
+# If we learn how to write a pre-revprop-change hook for
+# non-Posix platforms, we won't have to skip here:
+@Skip(is_non_posix_and_non_windows_os)
+@Issue(3086)
+def revprop_change(sbox):
+ "set, get, and delete a revprop change"
+
+ sbox.build()
+
+ # First test the error when no revprop-change hook exists.
+ svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change',
+ 'propset', '--revprop', '-r', '0',
+ 'cash-sound', 'cha-ching!', sbox.wc_dir)
+
+ # Now test error output from revprop-change hook.
+ svntest.actions.disable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change.* 0 jrandom cash-sound A',
+ 'propset', '--revprop', '-r', '0',
+ 'cash-sound', 'cha-ching!', sbox.wc_dir)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '0',
+ 'cash-sound', 'cha-ching!', sbox.wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propget', '--revprop', '-r', '0',
+ 'cash-sound', sbox.wc_dir)
+
+ # Now test that blocking the revprop delete.
+ svntest.actions.disable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change.* 0 jrandom cash-sound D',
+ 'propdel', '--revprop', '-r', '0',
+ 'cash-sound', sbox.wc_dir)
+
+ # Now test actually deleting the revprop.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', '--revprop', '-r', '0',
+ 'cash-sound', sbox.wc_dir)
+
+ # The property should have been deleted.
+ svntest.actions.run_and_verify_svn(None,
+ '.*(E195011|E200017).*cash-sound.*',
+ 'propget', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir)
+
+
+#----------------------------------------------------------------------
+
+def prop_value_conversions(sbox):
+ "some svn: properties should be converted"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ B_path = sbox.ospath('A/B')
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+
+ # Leading and trailing whitespace should be stripped
+ svntest.actions.set_prop('svn:mime-type', ' text/html\n\n', iota_path)
+ svntest.actions.set_prop('svn:mime-type', 'text/html', mu_path)
+
+ # Leading and trailing whitespace should be stripped
+ svntest.actions.set_prop('svn:eol-style', '\nnative\n', iota_path)
+ svntest.actions.set_prop('svn:eol-style', 'native', mu_path)
+
+ # A trailing newline should be added
+ svntest.actions.set_prop('svn:ignore', '*.o\nfoo.c', A_path)
+ svntest.actions.set_prop('svn:ignore', '*.o\nfoo.c\n', B_path)
+
+ # A trailing newline should be added
+ svntest.actions.set_prop('svn:externals', 'foo http://foo.com/repos', A_path)
+ svntest.actions.set_prop('svn:externals', 'foo http://foo.com/repos\n', B_path)
+
+ # Leading and trailing whitespace should be stripped, but not internal
+ # whitespace
+ svntest.actions.set_prop('svn:keywords', ' Rev Date \n', iota_path)
+ svntest.actions.set_prop('svn:keywords', 'Rev Date', mu_path)
+
+ # svn:executable value should be forced to a '*'
+ svntest.actions.set_prop('svn:executable', 'foo', iota_path)
+ svntest.actions.set_prop('svn:executable', '*', lambda_path)
+ for pval in (' ', '', 'no', 'off', 'false'):
+ svntest.actions.set_prop('svn:executable', pval, mu_path,
+ "svn: warning: W125005.*use 'svn propdel'")
+
+ # Anything else should be untouched
+ svntest.actions.set_prop('svn:some-prop', 'bar', lambda_path, force=True)
+ svntest.actions.set_prop('svn:some-prop', ' bar baz', mu_path, force=True)
+ svntest.actions.set_prop('svn:some-prop', 'bar\n', iota_path, force=True)
+ svntest.actions.set_prop('some-prop', 'bar', lambda_path)
+ svntest.actions.set_prop('some-prop', ' bar baz', mu_path)
+ svntest.actions.set_prop('some-prop', 'bar\n', iota_path)
+
+ # NOTE: When writing out multi-line prop values in svn:* props, the
+ # client converts to local encoding and local eol style.
+ # Therefore, the expected output must contain the right kind of eoln
+ # strings. That's why we use os.linesep in the tests below, not just
+ # plain '\n'. The _last_ \n is also from the client, but it's not
+ # part of the prop value and it doesn't get converted in the pipe.
+
+ # Check svn:mime-type
+ svntest.actions.check_prop('svn:mime-type', iota_path, [b'text/html'])
+ svntest.actions.check_prop('svn:mime-type', mu_path, [b'text/html'])
+
+ # Check svn:eol-style
+ svntest.actions.check_prop('svn:eol-style', iota_path, [b'native'])
+ svntest.actions.check_prop('svn:eol-style', mu_path, [b'native'])
+
+ # Check svn:ignore
+ linesep = os.linesep.encode()
+ svntest.actions.check_prop('svn:ignore', A_path,
+ [b'*.o'+linesep, b'foo.c'+linesep])
+ svntest.actions.check_prop('svn:ignore', B_path,
+ [b'*.o'+linesep, b'foo.c'+linesep])
+
+ # Check svn:externals
+ svntest.actions.check_prop('svn:externals', A_path,
+ [b'foo http://foo.com/repos'+linesep])
+ svntest.actions.check_prop('svn:externals', B_path,
+ [b'foo http://foo.com/repos'+linesep])
+
+ # Check svn:keywords
+ svntest.actions.check_prop('svn:keywords', iota_path, [b'Rev Date'])
+ svntest.actions.check_prop('svn:keywords', mu_path, [b'Rev Date'])
+
+ # Check svn:executable
+ svntest.actions.check_prop('svn:executable', iota_path, [b'*'])
+ svntest.actions.check_prop('svn:executable', lambda_path, [b'*'])
+ svntest.actions.check_prop('svn:executable', mu_path, [b'*'])
+
+ # Check other props
+ svntest.actions.check_prop('svn:some-prop', lambda_path, [b'bar'])
+ svntest.actions.check_prop('svn:some-prop', mu_path, [b' bar baz'])
+ svntest.actions.check_prop('svn:some-prop', iota_path, [b'bar'+linesep])
+ svntest.actions.check_prop('some-prop', lambda_path, [b'bar'])
+ svntest.actions.check_prop('some-prop', mu_path,[b' bar baz'])
+ svntest.actions.check_prop('some-prop', iota_path, [b'bar\n'])
+
+
+#----------------------------------------------------------------------
+
+def binary_props(sbox):
+ "test binary property support"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Some path convenience vars.
+ A_path = sbox.ospath('A')
+ B_path = sbox.ospath('A/B')
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+
+ A_path_bak = sbox.ospath('A', wc_dir=wc_backup)
+ B_path_bak = sbox.ospath('A/B', wc_dir=wc_backup)
+ iota_path_bak = sbox.ospath('iota', wc_dir=wc_backup)
+ lambda_path_bak = sbox.ospath('A/B/lambda', wc_dir=wc_backup)
+ mu_path_bak = sbox.ospath('A/mu', wc_dir=wc_backup)
+
+ # Property value convenience vars.
+ prop_zb = b"This property has a zer\000 byte."
+ prop_ff = b"This property has a form\014feed."
+ prop_xml = b"This property has an <xml> tag."
+ prop_binx = b"This property has an <xml> tag and a zer\000 byte."
+
+ # Set some binary properties.
+ svntest.actions.set_prop('prop_zb', prop_zb, B_path, )
+ svntest.actions.set_prop('prop_ff', prop_ff, iota_path)
+ svntest.actions.set_prop('prop_xml', prop_xml, lambda_path)
+ svntest.actions.set_prop('prop_binx', prop_binx, mu_path)
+ svntest.actions.set_prop('prop_binx', prop_binx, A_path)
+
+ # Create expected output and status trees.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Sending'),
+ 'A/B' : Item(verb='Sending'),
+ 'iota' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A', 'A/B', 'iota', 'A/B/lambda', 'A/mu',
+ wc_rev=2, status=' ')
+
+ # Commit the propsets.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Create expected output, disk, and status trees for an update of
+ # the wc_backup.
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A' : Item(status=' U'),
+ 'A/B' : Item(status=' U'),
+ 'iota' : Item(status=' U'),
+ 'A/B/lambda' : Item(status=' U'),
+ 'A/mu' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+
+ # Do the update and check the results.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Now, check those properties.
+ svntest.actions.check_prop('prop_zb', B_path_bak, [prop_zb])
+ svntest.actions.check_prop('prop_ff', iota_path_bak, [prop_ff])
+ svntest.actions.check_prop('prop_xml', lambda_path_bak, [prop_xml])
+ svntest.actions.check_prop('prop_binx', mu_path_bak, [prop_binx])
+ svntest.actions.check_prop('prop_binx', A_path_bak, [prop_binx])
+
+#----------------------------------------------------------------------
+
+# Ensure that each line of output contains the corresponding string of
+# expected_out, and that errput is empty.
+def verify_output(expected_out, output, errput):
+ if errput != []:
+ logger.warn('Error: stderr:')
+ logger.warn(errput)
+ raise svntest.Failure
+ output.sort()
+ ln = 0
+ for line in output:
+ if line.startswith('DBG:'):
+ continue
+ if ((line.find(expected_out[ln]) == -1) or
+ (line != '' and expected_out[ln] == '')):
+ logger.warn('Error: expected keywords: %s', expected_out)
+ logger.warn(' actual full output: %s', output)
+ raise svntest.Failure
+ ln = ln + 1
+ if ln != len(expected_out):
+ raise svntest.Failure
+
+@Issue(1794)
+def recursive_base_wc_ops(sbox):
+ "recursive property operations in BASE and WC"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Files with which to test, in alphabetical order
+ fp_add = sbox.ospath('A/added')
+ fp_del = sbox.ospath('A/mu')
+ #fp_keep= sbox.ospath('iota')
+
+ # Set up properties
+ sbox.simple_propset('p', 'old-del', 'A/mu')
+ sbox.simple_propset('p', 'old-keep', 'iota')
+ sbox.simple_commit()
+
+ svntest.main.file_append(fp_add, 'blah')
+ sbox.simple_add('A/added')
+ sbox.simple_propset('p', 'new-add', 'A/added')
+ sbox.simple_propset('p', 'new-del', 'A/mu')
+ sbox.simple_propset('p', 'new-keep', 'iota')
+ svntest.main.run_svn(None, 'del', '--force', fp_del)
+
+ # Test recursive proplist
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '-R',
+ '-v', wc_dir, '-rBASE')
+ verify_output([ 'old-del', 'old-keep', 'p', 'p',
+ 'Properties on ', 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '-R',
+ '-v', wc_dir)
+ verify_output([ 'new-add', 'new-keep', 'p', 'p',
+ 'Properties on ', 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test recursive propget
+ exit_code, output, errput = svntest.main.run_svn(None, 'propget', '-R',
+ 'p', wc_dir, '-rBASE')
+ verify_output([ 'old-del', 'old-keep' ], output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'propget', '-R',
+ 'p', wc_dir)
+ verify_output([ 'new-add', 'new-keep' ], output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test recursive propset (issue 1794)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status='D ', wc_rev=2)
+ expected_status.tweak('iota', status=' M', wc_rev=2)
+ expected_status.add({
+ 'A/added' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '-R', 'svn:keywords', 'Date',
+ os.path.join(wc_dir, 'A', 'B'))
+ expected_status.tweak('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta', status=' M')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def url_props_ops(sbox):
+ "property operations on a URL"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ prop1 = 'prop1'
+ propval1 = 'propval1 is foo'
+ prop2 = 'prop2'
+ propval2 = 'propval2'
+
+ iota_url = sbox.repo_url + '/iota'
+ A_url = sbox.repo_url + '/A'
+
+ # Add a couple of properties
+ sbox.simple_propset(prop1, propval1, 'iota')
+ sbox.simple_propset(prop1, propval1, 'A')
+
+ # Commit
+ sbox.simple_commit()
+
+ # Add a few more properties
+ sbox.simple_propset(prop2, propval2, 'iota')
+ sbox.simple_propset(prop2, propval2, 'A')
+
+ # Commit again
+ sbox.simple_commit()
+
+ # Test propget
+ svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [],
+ 'propget', prop1, iota_url)
+ svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [],
+ 'propget', prop1, A_url)
+
+ # Test normal proplist
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', iota_url)
+ verify_output([ prop1, prop2, 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', A_url)
+ verify_output([ prop1, prop2, 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test verbose proplist
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', '-v', iota_url)
+ verify_output([ propval1, propval2, prop1, prop2,
+ 'Properties on ' ], output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', '-v', A_url)
+ verify_output([ propval1, propval2, prop1, prop2,
+ 'Properties on ' ], output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test propedit
+ svntest.main.use_editor('foo_to_bar')
+ propval1 = propval1.replace('foo', 'bar')
+ svntest.main.run_svn(None,
+ 'propedit', prop1, '-m', 'editlog', iota_url)
+ svntest.main.run_svn(None,
+ 'propedit', prop1, '-m', 'editlog', A_url)
+ svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [],
+ 'propget', prop1, iota_url)
+ svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [],
+ 'propget', prop1, A_url)
+
+ # Edit without actually changing the property
+ svntest.main.use_editor('identity')
+ svntest.actions.run_and_verify_svn("No changes to property '%s' on '.*'"
+ % prop1,
+ [],
+ 'propedit', prop1, '-m', 'nocommit',
+ iota_url)
+
+
+
+#----------------------------------------------------------------------
+def removal_schedule_added_props(sbox):
+ "removal of schedule added file with properties"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ newfile_path = sbox.ospath('newfile')
+ file_add_output = ["A " + newfile_path + "\n"]
+ propset_output = ["property 'newprop' set on '" + newfile_path + "'\n"]
+ file_rm_output = ["D " + newfile_path + "\n"]
+ propls_output = [
+ "Properties on '" + newfile_path + "':\n",
+ " newprop\n",
+ " newvalue\n",
+ ]
+
+ # create new fs file
+ open(newfile_path, 'w').close()
+ # Add it and set a property
+ svntest.actions.run_and_verify_svn(file_add_output, [], 'add', newfile_path)
+ svntest.actions.run_and_verify_svn(propset_output, [], 'propset',
+ 'newprop', 'newvalue', newfile_path)
+ svntest.actions.run_and_verify_svn(propls_output, [],
+ 'proplist', '-v', newfile_path)
+ # remove the file
+ svntest.actions.run_and_verify_svn(file_rm_output, [],
+ 'rm', '--force', newfile_path)
+ # recreate the file and add it again
+ open(newfile_path, 'w').close()
+ svntest.actions.run_and_verify_svn(file_add_output, [], 'add', newfile_path)
+
+ # Now there should be NO properties leftover...
+ svntest.actions.run_and_verify_svn([], [],
+ 'proplist', '-v', newfile_path)
+
+#----------------------------------------------------------------------
+
+def update_props_on_wc_root(sbox):
+ "receive properties on the wc root via update"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Add a property to the root folder
+ sbox.simple_propset('red', 'rojo', '')
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(verb='Sending')
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', wc_rev=2, status=' ')
+
+ # Commit the working copy
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = svntest.wc.State(wc_backup, {
+ '' : Item(status=' U'),
+ })
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ '' : Item(props = {'red' : 'rojo'}),
+ })
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('', status=' ')
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+# test for issue 2743
+@Issue(2743)
+def props_on_replaced_file(sbox):
+ """test properties on replaced files"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add some properties to iota
+ iota_path = sbox.ospath("iota")
+ sbox.simple_propset('red', 'rojo', 'iota')
+ sbox.simple_propset('blue', 'lagoon', 'iota')
+ sbox.simple_commit()
+
+ # replace iota_path
+ sbox.simple_rm('iota')
+ svntest.main.file_append(iota_path, "some mod")
+ sbox.simple_add('iota')
+
+ # check that the replaced file has no properties
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents="some mod")
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # now add a new property to iota
+ sbox.simple_propset('red', 'mojo', 'iota')
+ sbox.simple_propset('groovy', 'baby', 'iota')
+
+ # What we expect the disk tree to look like:
+ expected_disk.tweak('iota', props={'red' : 'mojo', 'groovy' : 'baby'})
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+#----------------------------------------------------------------------
+
+def depthy_wc_proplist(sbox):
+ """test proplist at various depths on a wc"""
+ # Bootstrap.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Set up properties.
+ sbox.simple_propset('p', 'prop1', '')
+ sbox.simple_propset('p', 'prop2', 'iota')
+ sbox.simple_propset('p', 'prop3', 'A')
+ sbox.simple_propset('p', 'prop4', 'A/mu')
+
+ # Commit.
+ sbox.simple_commit()
+
+ # Test depth-empty proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist',
+ '--depth', 'empty',
+ '-v', wc_dir)
+ verify_output([ 'prop1', 'p', 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-files proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist',
+ '--depth', 'files',
+ '-v', wc_dir)
+ verify_output([ 'prop1', 'prop2', 'p', 'p',
+ 'Properties on ', 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-immediates proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '--depth',
+ 'immediates', '-v', wc_dir)
+ verify_output([ 'prop1', 'prop2', 'prop3' ] +
+ ['p'] * 3 + ['Properties on '] * 3,
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-infinity proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '--depth',
+ 'infinity', '-v', wc_dir)
+ verify_output([ 'prop1', 'prop2', 'prop3', 'prop4' ] +
+ ['p'] * 4 + ['Properties on '] * 4,
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+#----------------------------------------------------------------------
+
+def depthy_url_proplist(sbox):
+ """test proplist at various depths on a url"""
+ # Bootstrap.
+ sbox.build()
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+
+ # Set up properties.
+ sbox.simple_propset('p', 'prop1', '')
+ sbox.simple_propset('p', 'prop2', 'iota')
+ sbox.simple_propset('p', 'prop3', 'A')
+ sbox.simple_propset('p', 'prop4', 'A/mu')
+ sbox.simple_commit()
+
+ # Test depth-empty proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist',
+ '--depth', 'empty',
+ '-v', repo_url)
+ verify_output([ 'prop1', 'p', 'Properties on '],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-files proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist',
+ '--depth', 'files',
+ '-v', repo_url)
+ verify_output([ 'prop1', 'prop2', 'p', 'p',
+ 'Properties on ', 'Properties on ' ],
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-immediates proplist.
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist',
+ '--depth', 'immediates',
+ '-v', repo_url)
+
+ verify_output([ 'prop1', 'prop2', 'prop3' ] + ['p'] * 3 +
+ ['Properties on '] * 3,
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Test depth-infinity proplist.
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', '--depth',
+ 'infinity', '-v', repo_url)
+ verify_output([ 'prop1', 'prop2', 'prop3', 'prop4' ] + ['p'] * 4 +
+ ['Properties on '] * 4,
+ output, errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+#----------------------------------------------------------------------
+
+def invalid_propnames(sbox):
+ """test prop* handle invalid property names"""
+ # Bootstrap.
+ sbox.build()
+ repo_url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+ cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ propname = chr(8)
+ propval = 'foo'
+
+ expected_stdout = (".*Attempting to delete nonexistent property "
+ "'%s'.*" % (propname,))
+ svntest.actions.run_and_verify_svn(expected_stdout, [],
+ 'propdel', propname)
+ expected_stderr = (".*'%s' is not a valid Subversion"
+ ' property name' % (propname,))
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'propedit', propname)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'propget', propname)
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'propset', propname, propval)
+
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'commit', '--with-revprop',
+ '='.join([propname, propval]))
+ # Now swap them: --with-revprop should accept propname as a property
+ # value; no concept of validity there.
+ svntest.actions.run_and_verify_svn([], [],
+ 'commit', '--with-revprop',
+ '='.join([propval, propname]))
+
+ os.chdir(cwd)
+
+@SkipUnless(svntest.main.is_posix_os)
+@Issue(2581)
+def perms_on_symlink(sbox):
+ "propset shouldn't touch symlink perms"
+ sbox.build()
+ # We can't just run commands on absolute paths in the usual way
+ # (e.g., os.path.join(sbox.wc_dir, 'newdir')), because for some
+ # reason, if the symlink points to newdir as an absolute path, the
+ # bug doesn't reproduce. I have no idea why. Since it does have to
+ # point to newdir, the only other choice is to have it point to it
+ # in the same directory, so we have to run the test from inside the
+ # working copy.
+ saved_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ try:
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', 'newdir')
+ os.symlink('newdir', 'symlink')
+ svntest.actions.run_and_verify_svn(None, [], 'add', 'symlink')
+ old_mode = os.stat('newdir')[stat.ST_MODE]
+ # The only property on 'symlink' is svn:special, so attempting to remove
+ # 'svn:executable' should result in an error
+ expected_stdout = (".*Attempting to delete nonexistent property "
+ "'svn:executable'.*")
+ svntest.actions.run_and_verify_svn(expected_stdout, [], 'propdel',
+ 'svn:executable', 'symlink')
+ new_mode = os.stat('newdir')[stat.ST_MODE]
+ if not old_mode == new_mode:
+ # Chmod newdir back, so the test suite can remove this working
+ # copy when cleaning up later.
+ os.chmod('newdir', stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+ raise svntest.Failure
+ finally:
+ os.chdir(saved_cwd)
+
+# Use a property with a custom namespace, ie 'ns:prop' or 'mycompany:prop'.
+def remove_custom_ns_props(sbox):
+ "remove a property with a custom namespace"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a property to a file
+ sbox.simple_propset('ns:cash-sound', 'cha-ching!', 'iota')
+
+ # Commit the file
+ sbox.simple_commit('iota')
+
+ # Now, make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Remove the property
+ sbox.simple_propdel('ns:cash-sound', 'iota')
+
+ # Create expected trees.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=3, status=' ')
+
+ # Commit the one file.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected trees for the update.
+ expected_output = svntest.wc.State(wc_backup, {
+ 'iota' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 3)
+ expected_status.tweak('iota', wc_rev=3, status=' ')
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+def props_over_time(sbox):
+ "property retrieval with peg and operative revs"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Convenience variables
+ iota_path = sbox.ospath('iota')
+ iota_url = sbox.repo_url + '/iota'
+
+ # Add/tweak a property 'revision' with value revision-committed to a
+ # file, commit, and then repeat this a few times.
+ for rev in range(2, 4):
+ sbox.simple_propset('revision', str(rev), 'iota')
+ sbox.simple_commit('iota')
+
+ # Backdate to r2 so the defaults for URL- vs. WC-style queries are
+ # different.
+ svntest.main.run_svn(None, 'up', '-r2', wc_dir)
+
+ # Now, test propget of the property across many combinations of
+ # pegrevs, operative revs, and wc-path vs. url style input specs.
+ # NOTE: We're using 0 in these loops to mean "unspecified".
+ for path in iota_path, iota_url:
+ for peg_rev in range(0, 4):
+ for op_rev in range(0, 4):
+ # Calculate the expected property value. If there is an
+ # operative rev, we expect the output to match revisions
+ # there. Else, we'll be looking at the peg-rev value. And if
+ # neither are supplied, it depends on the path vs. URL
+ # question.
+ if op_rev > 1:
+ expected = str(op_rev)
+ elif op_rev == 1:
+ expected = None
+ else:
+ if peg_rev > 1:
+ expected = str(peg_rev)
+ elif peg_rev == 1:
+ expected = None
+ else:
+ if path == iota_url:
+ expected = "3" # HEAD
+ else:
+ expected = "2" # BASE
+
+ peg_path = path + (peg_rev != 0 and '@' + str(peg_rev) or "")
+
+ ### Test 'svn propget'
+ pget_expected = expected
+ if pget_expected:
+ pget_expected = [ pget_expected + "\n" ]
+ expected_err = [] if expected else '.*W200017: Property.*not found.*'
+ if op_rev != 0:
+ svntest.actions.run_and_verify_svn(pget_expected, expected_err,
+ 'propget', 'revision', peg_path,
+ '-r', str(op_rev))
+ else:
+ svntest.actions.run_and_verify_svn(pget_expected, expected_err,
+ 'propget', 'revision', peg_path)
+
+ ### Test 'svn proplist -v'
+ if op_rev != 0 or peg_rev != 0: # a revision-ful query output URLs
+ path = iota_url
+ plist_expected = expected
+ if plist_expected:
+ plist_expected = [ "Properties on '" + path + "':\n",
+ " revision\n",
+ " " + expected + "\n" ]
+
+ if op_rev != 0:
+ svntest.actions.run_and_verify_svn(plist_expected, [],
+ 'proplist', '-v', peg_path,
+ '-r', str(op_rev))
+ else:
+ svntest.actions.run_and_verify_svn(plist_expected, [],
+ 'proplist', '-v', peg_path)
+
+
+# XFail the same reason revprop_change() is.
+@SkipUnless(svntest.main.server_enforces_date_syntax)
+@Issue(3086)
+def invalid_propvalues(sbox):
+ "test handling invalid svn:* property values"
+
+ sbox.build(create_wc = False)
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+
+ svntest.actions.enable_revprop_changes(repo_dir)
+
+ expected_stderr = '.*unexpected property value.*|.*Bogus date.*'
+ svntest.actions.run_and_verify_svn([], expected_stderr,
+ 'propset', '--revprop', '-r', '0',
+ 'svn:date', 'Sat May 10 12:12:31 2008',
+ repo_url)
+
+@Issue(3282)
+def same_replacement_props(sbox):
+ "commit replacement props when same as old props"
+ # issue #3282
+ sbox.build()
+
+ foo_path = sbox.ospath('foo')
+
+ open(foo_path, 'w').close()
+ sbox.simple_add('foo')
+ sbox.simple_propset('someprop', 'someval', 'foo')
+ sbox.simple_commit('foo')
+ sbox.simple_rm('foo')
+
+ # Now replace 'foo'.
+ open(foo_path, 'w').close()
+ sbox.simple_add('foo')
+
+ # Set the same property again, with the same value.
+ sbox.simple_propset('someprop', 'someval', 'foo')
+ sbox.simple_commit('foo')
+
+ # Check if the property made it into the repository.
+ foo_url = sbox.repo_url + '/foo'
+ expected_out = [ "Properties on '" + foo_url + "':\n",
+ " someprop\n",
+ " someval\n" ]
+ svntest.actions.run_and_verify_svn(expected_out, [],
+ 'proplist', '-v', foo_url)
+
+def added_moved_file(sbox):
+ "'svn mv added_file' preserves props"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create it
+ foo_path = sbox.ospath('foo')
+ foo2_path = sbox.ospath('foo2')
+ foo2_url = sbox.repo_url + '/foo2'
+
+ open(foo_path, 'w').close()
+
+ # add it
+ sbox.simple_add('foo')
+ sbox.simple_propset('someprop', 'someval', 'foo')
+
+ # move it
+ svntest.main.run_svn(None, 'mv', foo_path, foo2_path)
+
+ # should still have the property
+ svntest.actions.check_prop('someprop', foo2_path, [b'someval'])
+
+ # the property should get committed, too
+ sbox.simple_commit()
+ svntest.actions.check_prop('someprop', foo2_url, [b'someval'])
+
+
+# Issue 2220, deleting a non-existent property should error
+@Issue(2220)
+def delete_nonexistent_property(sbox):
+ "remove a property which doesn't exist"
+
+ # Bootstrap
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove one property
+ expected_stdout = ".*Attempting to delete nonexistent property 'yellow'.*"
+ svntest.actions.run_and_verify_svn(expected_stdout, [],
+ 'propdel', 'yellow',
+ os.path.join(wc_dir, 'A', 'D', 'G'))
+
+#----------------------------------------------------------------------
+@Issue(3553)
+def post_revprop_change_hook(sbox):
+ "post-revprop-change hook"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Include a non-XML-safe message to regression-test issue #3553.
+ error_msg = 'Text with <angle brackets> & ampersand'
+
+ svntest.actions.enable_revprop_changes(repo_dir)
+ svntest.actions.create_failing_hook(repo_dir, 'post-revprop-change',
+ error_msg)
+
+ # serf/mod_dav_svn give SVN_ERR_RA_DAV_PROPPATCH_FAILED
+ # file/svn give SVN_ERR_REPOS_HOOK_FAILURE
+ expected_error = 'svn: (E175008|E165001).*post-revprop-change hook failed'
+
+ svntest.actions.run_and_verify_svn([], expected_error,
+ 'ps', '--revprop', '-r0', 'p', 'v',
+ wc_dir)
+
+ # Verify change has stuck -- at one time mod_dav_svn would rollback
+ # revprop changes on post-revprop-change hook errors
+ svntest.actions.run_and_verify_svn('v', [],
+ 'pg', '--revprop', '-r0', 'p',
+ wc_dir)
+
+def rm_of_replaced_file(sbox):
+ """properties after a removal of a replaced file"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add some properties to iota and mu
+ iota_path = sbox.ospath('iota')
+ sbox.simple_propset('red', 'rojo', 'iota')
+ sbox.simple_propset('blue', 'lagoon', 'iota')
+
+ mu_path = sbox.ospath('A/mu')
+ sbox.simple_propset('yellow', 'submarine', 'A/mu')
+ sbox.simple_propset('orange', 'toothpick', 'A/mu')
+
+ sbox.simple_commit()
+
+ # Copy iota over the top of mu
+ sbox.simple_rm('A/mu')
+ svntest.main.run_svn(None, 'cp', iota_path, mu_path)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', props={'red': 'rojo', 'blue': 'lagoon'})
+ expected_disk.tweak('A/mu', props={'red': 'rojo', 'blue': 'lagoon'},
+ contents="This is the file 'iota'.\n")
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Remove the copy. This should leave the original locally-deleted mu,
+ # which should have no properties.
+ svntest.main.run_svn(None, 'rm', '--force', mu_path)
+
+ svntest.actions.run_and_verify_svn(
+ [],
+ 'svn: E200009.*some targets are not versioned.*',
+ 'proplist', '-v', mu_path)
+
+ # Run it again, but ask for the pristine properties, which should
+ # be mu's original props.
+ exit_code, output, errput = svntest.main.run_svn(None,
+ 'proplist', '-v',
+ mu_path + '@base')
+ expected_output = svntest.verify.UnorderedRegexListOutput([
+ 'Properties on',
+ ' yellow',
+ ' submarine',
+ ' orange',
+ ' toothpick',
+ ])
+ svntest.verify.compare_and_display_lines('message', 'label',
+ expected_output, output)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+
+def prop_reject_grind(sbox):
+ """grind through all variants of prop rejects"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ mu_prej_path = sbox.ospath('A/mu.prej')
+
+ # Create r2 with all the properties we intend to use as incoming-change,
+ # and as incoming-delete. Also set up our local-edit and local-delete
+ # properties. We also need some properties that are simply different
+ # from the incoming properties
+ sbox.simple_propset('edit.diff', 'repos', 'iota')
+ sbox.simple_propset('edit.edit', 'repos', 'iota')
+ sbox.simple_propset('edit.del', 'repos', 'iota')
+ sbox.simple_propset('edit.add', 'repos', 'iota')
+ sbox.simple_propset('edit.none', 'repos', 'iota')
+ sbox.simple_propset('del.edit', 'repos', 'iota')
+ sbox.simple_propset('del.edit2', 'repos', 'iota')
+ sbox.simple_propset('del.diff', 'repos', 'iota')
+ sbox.simple_propset('del.del', 'repos', 'iota')
+ sbox.simple_propset('del.add', 'repos', 'iota')
+
+ sbox.simple_propset('edit.edit', 'local', 'A/mu')
+ sbox.simple_propset('add.edit', 'local', 'A/mu')
+ sbox.simple_propset('del.edit', 'local', 'A/mu')
+ sbox.simple_propset('del.edit2', 'repos', 'A/mu')
+ sbox.simple_propset('add.del', 'local', 'A/mu')
+ sbox.simple_propset('edit.del', 'local', 'A/mu')
+ sbox.simple_propset('del.del', 'local', 'A/mu')
+ sbox.simple_propset('edit.diff', 'local', 'A/mu')
+ sbox.simple_propset('add.diff', 'local', 'A/mu')
+ sbox.simple_propset('del.diff', 'local', 'A/mu')
+
+ sbox.simple_commit()
+
+ # Create r3 with all the properties that we intend to use as incoming-add,
+ # and then perform the incoming-edits and incoming-deletes.
+ sbox.simple_propset('add.add', 'repos', 'iota')
+ sbox.simple_propset('add.edit', 'repos', 'iota')
+ sbox.simple_propset('add.del', 'repos', 'iota')
+ sbox.simple_propset('add.diff', 'repos', 'iota')
+ sbox.simple_propset('edit.diff', 'repos.changed', 'iota')
+ sbox.simple_propset('edit.edit', 'repos.changed', 'iota')
+ sbox.simple_propset('edit.del', 'repos.changed', 'iota')
+ sbox.simple_propset('edit.add', 'repos.changed', 'iota')
+ sbox.simple_propset('edit.none', 'repos.changed', 'iota')
+ sbox.simple_propdel('del.edit', 'iota')
+ sbox.simple_propdel('del.edit2', 'iota')
+ sbox.simple_propdel('del.diff', 'iota')
+ sbox.simple_propdel('del.del', 'iota')
+ sbox.simple_propdel('del.add', 'iota')
+ sbox.simple_commit()
+
+ # Set up our victim for all the right rejects: local-adds, local-edits,
+ # and local-deletes.
+ sbox.simple_propset('edit.add', 'local', 'A/mu')
+ sbox.simple_propset('add.add', 'local', 'A/mu')
+ sbox.simple_propset('del.add', 'local', 'A/mu')
+ sbox.simple_propset('edit.edit', 'local.changed', 'A/mu')
+ sbox.simple_propset('add.edit', 'local.changed', 'A/mu')
+ sbox.simple_propset('del.edit', 'local.changed', 'A/mu')
+ sbox.simple_propset('del.edit2', 'repos.changed', 'A/mu')
+ sbox.simple_propdel('add.del', 'A/mu')
+ sbox.simple_propdel('edit.del', 'A/mu')
+ sbox.simple_propdel('del.del', 'A/mu')
+
+ # Now merge r2:3 into the victim to create all variants
+ svntest.main.run_svn(False, 'merge', '-r2:3', sbox.repo_url + '/iota',
+ mu_path)
+
+ # Check that A/mu.prej reports the expected conflicts:
+ expected_prej = [
+ "Trying to change property 'edit.none'\n"
+ "but the property does not exist locally.\n"
+ "<<<<<<< (local property value)\n"
+ "||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ "repos.changed>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to delete property 'del.del'\n"
+ "but the property has been locally deleted and had a different value.\n",
+
+ "Trying to delete property 'del.edit'\n"
+ "but the local property value is different.\n"
+ "<<<<<<< (local property value)\n"
+ "local.changed||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ ">>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to change property 'edit.del'\n"
+ "but the property has been locally deleted.\n"
+ "<<<<<<< (local property value)\n"
+ "||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ "repos.changed>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to change property 'edit.edit'\n"
+ "but the property has already been locally changed to a different value.\n"
+ "<<<<<<< (local property value)\n"
+ "local.changed||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ "repos.changed>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to delete property 'del.edit2'\n"
+ "but the property has been locally modified.\n"
+ "<<<<<<< (local property value)\n"
+ "repos.changed||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ ">>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to delete property 'del.add'\n"
+ "but the property has been locally added.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ ">>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to delete property 'del.diff'\n"
+ "but the local property value is different.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ ">>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to change property 'edit.add'\n"
+ "but the property has been locally added with a different value.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ "repos.changed>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to change property 'edit.diff'\n"
+ "but the local property value conflicts with the incoming change.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "repos=======\n"
+ "repos.changed>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to add new property 'add.add'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "repos>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to add new property 'add.diff'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "local||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "repos>>>>>>> (incoming 'changed to' value)\n",
+
+ "Trying to add new property 'add.del'\n"
+ "but the property has been locally deleted.\n"
+ "Incoming property value:\n"
+ "repos\n",
+
+ "Trying to add new property 'add.edit'\n"
+ "but the property already exists.\n"
+ "<<<<<<< (local property value)\n"
+ "local.changed||||||| (incoming 'changed from' value)\n"
+ "=======\n"
+ "repos>>>>>>> (incoming 'changed to' value)\n",
+ ]
+
+ # Get the contents of mu.prej. The error messages are in the prej file
+ # but there is no guarantee as to order. So try to locate each message
+ # in the file individually.
+ prej_file = open(mu_prej_path, 'r')
+ n = 0
+ for message in expected_prej:
+ prej_file.seek(0)
+ match = False
+ i = 0
+ j = 0
+ msg_lines = message.split('\n')
+ for file_line in prej_file:
+ line = msg_lines[i] + '\n'
+ match = (line == file_line)
+ if match:
+ # The last line in the list is always an empty string.
+ if msg_lines[i + 1] == "":
+ #logger.info("found message %i in file at line %i" % (n, j))
+ break
+ i += 1
+ else:
+ i = 0
+ j += 1
+ n += 1
+ if not match:
+ raise svntest.main.SVNUnmatchedError(
+ "Expected mu.prej doesn't match actual mu.prej")
+
+def obstructed_subdirs(sbox):
+ """test properties of obstructed subdirectories"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # at one point during development, obstructed subdirectories threw
+ # errors trying to fetch property information during 'svn status'.
+ # this test ensures we won't run into that problem again.
+
+ C_path = sbox.ospath('A/C')
+ sbox.simple_propset('red', 'blue', 'A/C')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/C', props={'red': 'blue'})
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Remove the subdir from disk, and validate the status
+ svntest.main.safe_rmtree(C_path)
+
+ expected_disk.remove('A/C')
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/C', status='!M', wc_rev='1')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Drop an empty file there to obstruct the now-deleted subdir
+ open(C_path, 'w')
+
+ expected_disk.add({'A/C': Item(contents='', props={'red': 'blue'})})
+ expected_status.tweak('A/C', status='~M', wc_rev='1')
+
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def atomic_over_ra(sbox):
+ "test revprop atomicity guarantees of libsvn_ra"
+
+ sbox.build(create_wc=False)
+ repo_url = sbox.repo_url
+
+ # From this point on, similar to ../libsvn_fs/fs-test.c:revision_props().
+ s1 = "violet"
+ s2 = "wrong value"
+
+ # But test "" explicitly, since the RA layers have to marshal "" and <unset>
+ # differently.
+ s3 = ""
+
+ # Initial state.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '0',
+ 'flower', s1, repo_url)
+
+ # Helpers.
+
+ def expect_old_server_fail(old_value, proposed_value):
+ # We are setting a (possibly "not present") expectation for the old value,
+ # so we should fail.
+ expected_stderr = ".*doesn't advertise.*ATOMIC_REVPROP"
+ svntest.actions.run_and_verify_atomic_ra_revprop_change(
+ None, expected_stderr, 1, repo_url, 0, 'flower',
+ old_value, proposed_value, True)
+
+ # The original value is still there.
+ svntest.actions.check_prop('flower', repo_url, [s1], 0)
+
+ def FAILS_WITH_BPV(not_the_old_value, proposed_value):
+ if svntest.main.server_has_atomic_revprop():
+ svntest.actions.run_and_verify_atomic_ra_revprop_change(
+ None, [], 0, repo_url, 0, 'flower',
+ not_the_old_value, proposed_value, True)
+ else:
+ expect_old_server_fail(not_the_old_value, proposed_value)
+
+ def PASSES_WITHOUT_BPV(yes_the_old_value, proposed_value):
+ if svntest.main.server_has_atomic_revprop():
+ svntest.actions.run_and_verify_atomic_ra_revprop_change(
+ None, [], 0, repo_url, 0, 'flower',
+ yes_the_old_value, proposed_value, False)
+ else:
+ expect_old_server_fail(yes_the_old_value, proposed_value)
+
+ # Value of "flower" is 's1'.
+ FAILS_WITH_BPV(s2, s1)
+ FAILS_WITH_BPV(s3, s1)
+ PASSES_WITHOUT_BPV(s1, s2)
+
+ # Value of "flower" is 's2'.
+ PASSES_WITHOUT_BPV(s2, s3)
+
+ # Value of "flower" is 's3'.
+ FAILS_WITH_BPV(None, s3)
+ FAILS_WITH_BPV(s1, s3)
+ PASSES_WITHOUT_BPV(s3, s2)
+
+ # Value of "flower" is 's2'.
+ FAILS_WITH_BPV(None, None)
+ FAILS_WITH_BPV(s1, None)
+ FAILS_WITH_BPV(s3, None)
+ PASSES_WITHOUT_BPV(s2, None)
+
+ # Value of "flower" is <not set>.
+ FAILS_WITH_BPV(s2, s1)
+ FAILS_WITH_BPV(s3, s1)
+ PASSES_WITHOUT_BPV(None, s1)
+
+ # Value of "flower" is 's1'.
+ svntest.actions.check_prop('flower', repo_url, [s1.encode()], 0)
+
+# Test for issue #3721 'redirection of svn propget output corrupted with
+# large property values'
+@Issue(3721)
+def propget_redirection(sbox):
+ """pg of large text properties redirects properly"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ B_path = os.path.join(wc_dir, "A", "B")
+ C_path = os.path.join(wc_dir, "A", "C")
+ D_path = os.path.join(wc_dir, "A", "D")
+
+ prop_val_file = os.path.join(wc_dir, "prop_val")
+ redirect_file = os.path.join(wc_dir, "pg.vR.out")
+
+ # A 'big' mergeinfo property. Yes, it is bogus in the sense that
+ # it refers to non-existent path-revs, but that is not relevant to
+ # this test. What matters is that it is a realistic 'big' mergeinfo
+ # value (it is from Subversion's own 1.6.x branch in fact).
+ # Also, the syntax is wrong: every path should start with '/';
+ # Subversion currently silently corrects this.
+ big_prop_val = "subversion/branches/1.5.x:872364-874936\n" + \
+ "subversion/branches/1.5.x-34184:874657-874741\n" + \
+ "subversion/branches/1.5.x-34432:874744-874798\n" + \
+ "subversion/branches/1.5.x-issue3067:872184-872314\n" + \
+ "subversion/branches/1.5.x-issue3157:872165-872175\n" + \
+ "subversion/branches/1.5.x-issue3174:872178-872348\n" + \
+ "subversion/branches/1.5.x-r30215:870310,870312,870319,870362\n" + \
+ "subversion/branches/1.5.x-r30756:874853-874870\n" + \
+ "subversion/branches/1.5.x-r30868:870951-870970\n" + \
+ "subversion/branches/1.5.x-r31314:874476-874605\n" + \
+ "subversion/branches/1.5.x-r31516:871592-871649\n" + \
+ "subversion/branches/1.5.x-r32470:872546-872676\n" + \
+ "subversion/branches/1.5.x-r32968:873773-873872\n" + \
+ "subversion/branches/1.5.x-r33447:873527-873547\n" + \
+ "subversion/branches/1.5.x-r33465:873541-873549\n" + \
+ "subversion/branches/1.5.x-r33641:873880-873883\n" + \
+ "subversion/branches/1.5.x-r34050-followups:874639-874686\n" + \
+ "subversion/branches/1.5.x-r34487:874562-874581\n" + \
+ "subversion/branches/1.5.x-ra_serf-backports:872354-872626\n" + \
+ "subversion/branches/1.5.x-rb-test-fix:874916-874919\n" + \
+ "subversion/branches/1.5.x-reintegrate-improvements:874586-874922\n" + \
+ "subversion/branches/1.5.x-tests-pass:870925-870973\n" + \
+ "subversion/branches/dont-save-plaintext-passwords-by-default:" + \
+ "870728-871118\n" + \
+ "subversion/branches/gnome-keyring:870558-871410\n" + \
+ "subversion/branches/issue-3220-dev:872210-872226\n" + \
+ "subversion/branches/kwallet:870785-871314\n" + \
+ "subversion/branches/log-g-performance:870941-871032\n" + \
+ "subversion/branches/r30963-1.5.x:871056-871076\n" + \
+ "subversion/branches/reintegrate-improvements:873853-874164\n" + \
+ "subversion/branches/svn-mergeinfo-enhancements:870196\n" + \
+ "subversion/branches/svnpatch-diff:871905\n" + \
+ "subversion/trunk:869159-869165,869168-869181,869185,869188,869191," + \
+ "869200-869201,869203-869207,869209-869224,869227-869238,869240-" + \
+ "869244,869248,869250-869260,869262-869263,869265,869267-869268," + \
+ "869272-869280,869282-869325,869328-869330,869335,869341-869347," + \
+ "869351,869354-869355,869358,869361-869377,869379-869381,869383-" + \
+ "869417,869419-869422,869432-869453,869455-869466,869471-869473," + \
+ "869475,869483,869486,869488-869489,869491-869497,869499-869500," + \
+ "869503,869506-869508,869510-869521,869523-869540,869542-869552," + \
+ "869556,869558,869560-869561,869563,869565,869567,869570,869572," + \
+ "869582,869601-869602,869605,869607,869613-869614,869616,869618," + \
+ "869620,869625,869627,869630,869633,869639,869641-869643,869645-" + \
+ "869652,869655,869657,869665,869668,869674,869677,869681,869685," + \
+ "869687-869688,869693,869697,869699-869700,869704-869708,869716," + \
+ "869719,869722,869724,869730,869733-869734,869737-869740,869745-" + \
+ "869746,869751-869754,869766,869812-869813,869815-869818,869820," + \
+ "869825,869837,869841,869843-869844,869858,869860-869861,869871," + \
+ "869875,869889,869895,869898,869902,869907,869909,869926,869928-" + \
+ "869929,869931-869933,869942-869943,869950,869952,869957-869958," + \
+ "869969,869972,869974,869988,869994,869996,869999,870004,870013-" + \
+ "870014,870016,870024,870032,870036,870039,870041-870043,870054," + \
+ "870060,870068-870071,870078,870083,870094,870104,870124,870127-" + \
+ "870128,870133,870135-870136,870141,870144,870148,870160,870172," + \
+ "870175,870191,870198,870203-870204,870211,870219,870225,870233," + \
+ "870235-870236,870254-870255,870259,870307,870311,870313,870320," + \
+ "870323,870330-870331,870352-870353,870355,870359-870360,870371," + \
+ "870373,870378,870393-870395,870402,870409-870410,870414,870416," + \
+ "870421,870436,870442,870447,870449,870452,870454,870466,870476," + \
+ "870481-870483,870486,870500,870502,870505,870513-870518,870522-" + \
+ "870523,870527,870529,870534,870536-870538,870540-870541,870543-" + \
+ "870548,870554,870556,870561,870563,870584,870590-870592,870594-" + \
+ "870595,870597,870618,870620,870622,870625-870626,870641,870647," + \
+ "870657,870665,870671,870681,870702-870703,870706-870708,870717-" + \
+ "870718,870727,870730,870737,870740,870742,870752,870758,870800," + \
+ "870809,870815,870817,870820-870825,870830,870834-870836,870850-" + \
+ "870851,870853,870859,870861,870886,870894,870916-870918,870942," + \
+ "870945,870957,870962,870970,870979,870981,870989,870996,871003," + \
+ "871005,871009,871011,871023,871033,871035-871038,871041,871060," + \
+ "871078,871080,871092,871097,871099,871105,871107,871120,871123-" + \
+ "871127,871130,871133-871135,871140,871149,871155-871156,871160," + \
+ "871162,871164,871181,871191,871199-871200,871205,871211-871212," + \
+ "871215,871219,871225,871227,871229,871231,871236,871270,871273," + \
+ "871277,871283,871297,871302,871306,871308,871315-871320,871323-" + \
+ "871325,871333-871335,871345,871347-871350,871354,871357,871361," + \
+ "871363-871366,871374-871375,871377,871382,871385-871388,871391," + \
+ "871408,871411,871422,871435,871441,871443-871444,871465,871470," + \
+ "871472-871476,871481,871489,871499,871501-871502,871505,871508," + \
+ "871520,871523,871525-871527,871538,871542,871544,871547-871549," + \
+ "871556,871559,871562-871563,871578,871581,871587,871589-871597," + \
+ "871608,871613,871616-871617,871620,871624,871649,871668,871675," + \
+ "871677,871693-871694,871696,871704,871732-871733,871744,871747," + \
+ "871759,871762,871766,871769,871793,871796,871799,871801,871811," + \
+ "871813,871821-871826,871831,871843,871860,871880,871891,871894," + \
+ "871899,871907,871911,871926,871928,871933,871935,871941-871942," + \
+ "871947-871949,871958,871974,872000-872001,872003,872005,872018," + \
+ "872022,872038,872065,872068,872086,872091,872093,872097,872103," + \
+ "872112,872130,872154,872157,872206,872216,872218-872219,872227," + \
+ "872234,872238,872243,872253,872255,872259,872261,872278-872279," + \
+ "872281,872310-872311,872362,872404,872416-872417,872429,872431," + \
+ "872434,872439,872450-872453,872468,872470,872477-872478,872483," + \
+ "872490-872491,872495,872515-872516,872518-872519,872537,872541," + \
+ "872544,872565,872568,872571-872573,872584,872596-872597,872612," + \
+ "872619,872624,872632,872656,872670,872706,872710,872713,872717," + \
+ "872746-872748,872777,872780-872782,872791,872804,872813,872845," + \
+ "872864,872870,872872,872947-872948,872961,872974,872981,872985-" + \
+ "872987,873004,873042,873049,873051,873076,873087,873090,873096," + \
+ "873098,873100,873183,873186,873192,873195,873210-873211,873247," + \
+ "873252,873256,873259,873275,873286,873288,873343,873379-873381," + \
+ "873443,873521,873538-873539,873714-873715,873718,873733,873745," + \
+ "873751,873767,873778,873781,873849,873856,873862,873914,873940," + \
+ "873947-873948,873975-873976,873987,873998,874026-874027,874075," + \
+ "874077-874078,874124-874125,874127,874156,874159,874161,874165," + \
+ "874168,874170,874184,874189,874204,874223-874224,874245,874258," + \
+ "874262,874270,874292-874297,874300-874301,874303,874305,874316-" + \
+ "874318,874330,874363,874380,874405,874421,874441,874459,874467," + \
+ "874473,874497,874506,874545-874546,874561,874566,874568,874580," + \
+ "874619,874621,874634,874636,874659,874673,874681,874727,874730," + \
+ "874743,874765-874767,874806,874816,874848,874868,874888,874896," + \
+ "874909,874912,874996,875051,875069,875129,875132,875134,875137," + \
+ "875151-875153,875186-875188,875190,875235-875237,875242-875243," + \
+ "875249,875388,875393,875406,875411\n"
+
+ # Set the 'big' mergeinfo prop on A/B, A/C, and A/D.
+ svntest.main.file_write(prop_val_file, big_prop_val)
+
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ SVN_PROP_MERGEINFO, '-F', prop_val_file,
+ B_path)
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ SVN_PROP_MERGEINFO, '-F', prop_val_file,
+ C_path)
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ SVN_PROP_MERGEINFO, '-F', prop_val_file,
+ D_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'ps some large svn:mergeinfos', wc_dir)
+
+ # Run propget -vR svn:mergeinfo, redirecting the stdout to a file.
+ arglist = [svntest.main.svn_binary, 'propget', SVN_PROP_MERGEINFO, '-vR',
+ '--config-dir', svntest.main.default_config_dir, wc_dir]
+ redir_file = open(redirect_file, 'wb')
+ pg_proc = subprocess.Popen(arglist, stdout=redir_file)
+ pg_proc.wait()
+ redir_file.close()
+ pg_stdout_redir = open(redirect_file, 'r').readlines()
+
+ # Check if the redirected output of svn pg -vR on the root of the WC
+ # is what we expect.
+ expected_mergeinfo_displayed = [
+ ' /' + line for line in big_prop_val.splitlines(True) ]
+ expected_output = [
+ "Properties on '" + B_path + "':\n", # Should ocur only once!
+ " svn:mergeinfo\n",
+ ] + expected_mergeinfo_displayed + [
+ "Properties on '" + C_path + "':\n", # Should ocur only once!
+ " svn:mergeinfo\n",
+ ] + expected_mergeinfo_displayed + [
+ "Properties on '" + D_path + "':\n", # Should ocur only once!
+ " svn:mergeinfo\n",
+ ] + expected_mergeinfo_displayed
+ svntest.verify.verify_outputs(
+ "Redirected pg -vR doesn't match pg -vR stdout",
+ pg_stdout_redir, None,
+ svntest.verify.UnorderedOutput(expected_output), None)
+ # (We want this check to fail if the redirected pg output contains
+ # unexpected duplicate lines, although this hasn't been observed as
+ # part of issue #3721. We used to check separately here because the old
+ # UnorderedOutput class ignored duplicates but now it detects them.)
+
+@Issue(3852)
+def file_matching_dir_prop_reject(sbox):
+ "prop conflict for file matching dir prop reject"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add file with awkward name
+ svntest.main.file_append(sbox.ospath('A/dir_conflicts'), "some content\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', sbox.ospath('A/dir_conflicts'))
+ sbox.simple_propset('prop', 'val1', 'A/dir_conflicts')
+ sbox.simple_propset('prop', 'val1', 'A')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Sending'),
+ 'A/dir_conflicts' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A', wc_rev=2)
+ expected_status.add({
+ 'A/dir_conflicts' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Modify/commit property change
+ sbox.simple_propset('prop', 'val2', 'A/dir_conflicts')
+ sbox.simple_propset('prop', 'val2', 'A')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Sending'),
+ 'A/dir_conflicts' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A', 'A/dir_conflicts', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Local property mod
+ sbox.simple_propset('prop', 'val3', 'A/dir_conflicts')
+ sbox.simple_propset('prop', 'val3', 'A')
+
+ # Update to trigger property conflicts
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/dir_conflicts' : Item('some content\n', props = {'prop' : 'val3'}),
+ })
+ expected_disk.tweak('A', props={'prop' : 'val3'})
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' C'),
+ 'A/dir_conflicts' : Item(status=' C'),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A', 'A/dir_conflicts', status=' C')
+
+ # Conflict: BASE=val2 WORKING=val3 INCOMING_OLD=val2 INCOMING_NEW=val1
+ extra_files = ['dir_conflicts.prej', 'dir_conflicts.2.prej']
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '2', wc_dir,
+ extra_files=extra_files)
+
+ # Revert and update to check that conflict files are removed
+ svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir)
+ expected_status.tweak('A', 'A/dir_conflicts', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'A/dir_conflicts' : Item(status=' U'),
+ })
+ expected_disk.tweak('A', 'A/dir_conflicts', props={'prop' : 'val2'})
+ expected_status.tweak(wc_rev=3)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+def pristine_props_listed(sbox):
+ "check if pristine properties are visible"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('prop', 'val', 'A')
+ sbox.simple_commit()
+
+ expected_output = ["Properties on '" + sbox.ospath('A') + "':\n", " prop\n"]
+
+ # Now we see the pristine properties
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'proplist', '-R', wc_dir, '-r', 'BASE')
+
+ sbox.simple_propset('prop', 'needs-fix', 'A')
+
+ # And now we see no property at all
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'proplist', '-R', wc_dir, '-r', 'BASE')
+
+def create_inherited_ignores_config(sbox):
+ "create config stuffs for inherited ignores tests"
+
+ # contents of the file 'config'
+ config_contents = '''\
+[miscellany]
+global-ignores = *.boo *.goo
+'''
+
+ return sbox.create_config_dir(config_contents)
+
+def inheritable_ignores(sbox):
+ "inheritable ignores with svn:ignores and config"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ config_dir = create_inherited_ignores_config(sbox)
+
+ sbox.simple_propset(SVN_PROP_INHERITABLE_IGNORES, '*.doo', 'A/B')
+ sbox.simple_propset(SVN_PROP_INHERITABLE_IGNORES, '*.moo', 'A/D')
+ sbox.simple_propset('svn:ignore', '*.foo', 'A/B/E')
+ sbox.simple_commit()
+
+ # Some directories and files that should always be added because they
+ # don't match any applicable ignore patterns.
+ X_dir_path = sbox.ospath('ADD-ME-DIR-X')
+ Y_dir_path = sbox.ospath('A/ADD-ME-DIR-Y.doo')
+ Z_dir_path = sbox.ospath('A/D/G/ADD-ME-DIR-Z.doo')
+ os.mkdir(X_dir_path)
+ os.mkdir(Y_dir_path)
+ os.mkdir(Z_dir_path)
+
+ # Some directories and files that should be ignored when adding
+ # because they match an ignore pattern (unless of course they are
+ # the direct target of an add, which we always add).
+ boo_dir_path = sbox.ospath('IGNORE-ME-DIR.boo')
+ goo_dir_path = sbox.ospath('IGNORE-ME-DIR.boo/IGNORE-ME-DIR.goo')
+ doo_dir_path = sbox.ospath('A/B/IGNORE-ME-DIR.doo')
+ moo_dir_path = sbox.ospath('A/D/IGNORE-ME-DIR.moo')
+ foo_dir_path = sbox.ospath('A/B/E/IGNORE-ME-DIR.foo')
+ os.mkdir(boo_dir_path)
+ os.mkdir(goo_dir_path)
+ os.mkdir(doo_dir_path)
+ os.mkdir(moo_dir_path)
+ os.mkdir(foo_dir_path)
+ boo_file_path = sbox.ospath('ADD-ME-DIR-X/ignore-me-file.boo')
+ goo_file_path = sbox.ospath('A/D/G/ignore-me-file.goo')
+ doo_file_path = sbox.ospath('A/B/IGNORE-ME-DIR.doo/ignore-me-file.doo')
+ doo_file2_path = sbox.ospath('A/B/E/ignore-me-file.doo')
+ moo_file_path = sbox.ospath('A/D/ignore-me-file.moo')
+ foo_file_path = sbox.ospath('A/B/E/ignore-me-file.foo')
+ svntest.main.file_write(boo_file_path, 'I should not be versioned!\n')
+ svntest.main.file_write(goo_file_path, 'I should not be versioned!\n')
+ svntest.main.file_write(doo_file_path, 'I should not be versioned!\n')
+ svntest.main.file_write(doo_file2_path, 'I should not be versioned!\n')
+ svntest.main.file_write(moo_file_path, 'I should not be versioned!\n')
+ svntest.main.file_write(foo_file_path, 'I should not be versioned!\n')
+
+ # Some directories and files that don't match any ignore pattern
+ # but are located within a subtree that does match and so shouldn't
+ # be added.
+ roo_file_path = sbox.ospath('A/B/IGNORE-ME-DIR.doo/ignore-me-file.roo')
+ svntest.main.file_write(roo_file_path, 'I should not be versioned!\n')
+
+ # Check (non-verbose) status with the custom config. We should only see
+ # the three unversioned directories which don't match any of the ignore
+ # patterns and aren't proper subtrees of an unversioned or ignored
+ # subtree.
+ expected_output = svntest.verify.UnorderedOutput(
+ ['? ' + X_dir_path + '\n',
+ '? ' + Y_dir_path + '\n',
+ '? ' + Z_dir_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'st',
+ '--config-dir', config_dir, wc_dir)
+
+ # Check status without the custom config.
+ # Should be the same as above except the *.boo and *.goo paths
+ # now show up as unversioned '?'.
+ expected_output = svntest.verify.UnorderedOutput(
+ ['? ' + X_dir_path + '\n',
+ '? ' + Y_dir_path + '\n',
+ '? ' + Z_dir_path + '\n',
+ '? ' + boo_dir_path + '\n',
+ '? ' + goo_file_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'st', wc_dir)
+
+ # Check status with the custom config and --no-ignore.
+ expected_output = svntest.verify.UnorderedOutput(
+ ['? ' + X_dir_path + '\n',
+ '? ' + Y_dir_path + '\n',
+ '? ' + Z_dir_path + '\n',
+ 'I ' + boo_dir_path + '\n',
+ 'I ' + doo_dir_path + '\n',
+ 'I ' + doo_file2_path + '\n',
+ 'I ' + moo_dir_path + '\n',
+ 'I ' + foo_dir_path + '\n',
+ 'I ' + goo_file_path + '\n',
+ 'I ' + moo_file_path + '\n',
+ 'I ' + foo_file_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'st',
+ '--config-dir', config_dir,
+ '--no-ignore', wc_dir)
+
+ # Check status without the custom config and --no-ignore.
+ # Should be the same as above except the *.boo and *.goo paths
+ # are reported as unversioned '?' rather than ignored 'I'.
+ expected_output = svntest.verify.UnorderedOutput(
+ ['? ' + X_dir_path + '\n',
+ '? ' + Y_dir_path + '\n',
+ '? ' + Z_dir_path + '\n',
+ '? ' + boo_dir_path + '\n',
+ 'I ' + doo_dir_path + '\n',
+ 'I ' + doo_file2_path + '\n',
+ 'I ' + moo_dir_path + '\n',
+ 'I ' + foo_dir_path + '\n',
+ '? ' + goo_file_path + '\n',
+ 'I ' + moo_file_path + '\n',
+ 'I ' + foo_file_path + '\n',])
+ svntest.actions.run_and_verify_svn(expected_output, [], 'st',
+ '--no-ignore', wc_dir)
+
+ # Perform the add with the --force flag, targeting the root of the WC.
+ ### Note: You have to be inside the working copy or else Subversion
+ ### will think you're trying to add the working copy to its parent
+ ### directory, and will (possibly, if the parent directory isn't
+ ### versioned) fail -- see also schedule_tests.py 11 "'svn add'
+ ### should traverse already-versioned dirs"
+ saved_wd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ expected = svntest.verify.UnorderedOutput(
+ ['A ' + 'ADD-ME-DIR-X\n',
+ 'A ' + os.path.join('A', 'ADD-ME-DIR-Y.doo') + '\n',
+ 'A ' + os.path.join('A', 'D', 'G', 'ADD-ME-DIR-Z.doo') + '\n'])
+ svntest.actions.run_and_verify_svn(expected,
+ [], 'add', '.', '--force',
+ '--config-dir', config_dir)
+ os.chdir(saved_wd)
+
+ # Now revert and try the add with the --no-ignore flag, nothing should
+ # be ignored.
+ svntest.actions.run_and_verify_svn(None, [], 'revert', wc_dir, '-R')
+ saved_wd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ expected = svntest.verify.UnorderedOutput(
+ ['A ' + 'ADD-ME-DIR-X\n',
+ 'A ' + os.path.join('A', 'ADD-ME-DIR-Y.doo') + '\n',
+ 'A ' + os.path.join('A', 'D', 'G', 'ADD-ME-DIR-Z.doo') + '\n',
+ 'A ' + os.path.join('ADD-ME-DIR-X', 'ignore-me-file.boo') + '\n',
+ 'A ' + 'IGNORE-ME-DIR.boo' + '\n',
+ 'A ' + os.path.join('IGNORE-ME-DIR.boo',
+ 'IGNORE-ME-DIR.goo') + '\n',
+ 'A ' + os.path.join('A', 'B', 'E', 'IGNORE-ME-DIR.foo') + '\n',
+ 'A ' + os.path.join('A', 'B', 'E', 'ignore-me-file.foo') + '\n',
+ 'A ' + os.path.join('A', 'D', 'G', 'ignore-me-file.goo') + '\n',
+
+ 'A ' + os.path.join('A', 'B', 'E', 'ignore-me-file.doo') + '\n',
+ 'A ' + os.path.join('A', 'B', 'IGNORE-ME-DIR.doo') + '\n',
+ 'A ' + os.path.join('A', 'B', 'IGNORE-ME-DIR.doo',
+ 'ignore-me-file.doo') + '\n',
+ 'A ' + os.path.join('A', 'B', 'IGNORE-ME-DIR.doo',
+ 'ignore-me-file.roo') + '\n',
+ 'A ' + os.path.join('A', 'D', 'IGNORE-ME-DIR.moo') + '\n',
+ 'A ' + os.path.join('A', 'D', 'ignore-me-file.moo') + '\n'])
+ svntest.actions.run_and_verify_svn(expected, [], 'add', '.', '--force',
+ '--no-ignore', '--config-dir',
+ config_dir)
+
+def almost_known_prop_names(sbox):
+ "propset with svn: prefix but unknown name"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+
+ # Same prefix, different prop name
+ svntest.actions.set_prop('svn:exemutable', 'x', iota_path,
+ "svn: E195011: 'svn:exemutable' "
+ "is not a valid svn: property name")
+ svntest.actions.set_prop('svn:exemutable', 'x', iota_path, force=True)
+
+ # Similar prefix, different prop name
+ svntest.actions.set_prop('svm:exemutable', 'x', iota_path)
+
+ # Similar prefix, same prop name
+ svntest.actions.set_prop('svm:executable', 'x', iota_path,
+ "svn: E195011: 'svm:executable' "
+ "is not a valid svn: property name")
+ svntest.actions.set_prop('svm:executable', 'x', iota_path, force=True)
+
+ # Different prefix, same prop name
+ svntest.actions.set_prop('tsvn:executable', 'x', iota_path)
+
+ # Property name is too different to matter
+ svntest.actions.set_prop('svn:foobar', 'x', iota_path,
+ "svn: E195011: 'svn:foobar'"
+ " is not a valid svn: property name;"
+ " use '--force' to set it")
+
+@Issue(3231)
+def peg_rev_base_working(sbox):
+ """peg rev @BASE, peg rev @WORKING"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # set up a local prop mod
+ svntest.actions.set_prop('ordinal', 'ninth\n', sbox.ospath('iota'))
+ sbox.simple_commit(message='r2')
+ svntest.actions.set_prop('cardinal', 'nine\n', sbox.ospath('iota'))
+ svntest.actions.run_and_verify_svn(['ninth\n'], [],
+ 'propget', '--no-newline', 'ordinal',
+ sbox.ospath('iota') + '@BASE')
+
+@Issue(4415)
+def xml_unsafe_author(sbox):
+ "svn:author with XML unsafe chars"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # client sends svn:author (via PROPPATCH for DAV)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '1',
+ 'svn:author', 'foo\bbar', wc_dir)
+
+ # mod_dav_svn sends svn:author (via REPORT for DAV)
+ sbox.simple_update(revision=0)
+ sbox.simple_update(revision=1)
+ expected_info = [{
+ 'Path' : re.escape(wc_dir),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir),
+ 'Last Changed Author' : 'foo\bbar',
+ }]
+ svntest.actions.run_and_verify_info(expected_info, wc_dir)
+
+ # mod_dav_svn sends svn:author (via PROPFIND for DAV)
+ # Since r1553367 this works correctly on ra_serf, since we now request
+ # a single property value which skips creating the creator-displayname property
+ svntest.actions.run_and_verify_svn(['foo\bbar'], [],
+ 'propget', '--revprop', '-r', '1',
+ 'svn:author', '--no-newline', wc_dir)
+
+ # Ensure a stable date
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '1',
+ 'svn:date', '2015-01-01T00:00:00.0Z', wc_dir)
+
+ # But a proplist of this property value still fails via DAV.
+ expected_output = svntest.verify.UnorderedOutput([
+ 'Unversioned properties on revision 1:\n',
+ ' svn:author\n',
+ ' foo\bbar\n',
+ ' svn:date\n',
+ ' 2015-01-01T00:00:00.0Z\n',
+ ' svn:log\n',
+ ' Log message for revision 1.\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'proplist', '--revprop', '-r', '1', '-v',
+ wc_dir)
+
+@Issue(4415)
+def xml_unsafe_author2(sbox):
+ "svn:author with XML unsafe chars 2"
+
+ sbox.build(create_wc = False)
+ repo_url = sbox.repo_url
+
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # client sends svn:author (via PROPPATCH for DAV)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '1',
+ 'svn:author', 'foo\bbar', repo_url)
+
+ # Ensure a stable date
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r', '1',
+ 'svn:date', '2000-01-01T12:00:00.0Z',
+ repo_url)
+
+ if svntest.main.is_ra_type_dav():
+ # This receives the filtered author (but that is better than an Xml fail)
+ expected_author = 'foobar'
+ else:
+ expected_author = 'foo\bbar'
+
+ # Use svn ls in --xml mode to test locale independent output.
+ expected_output = [
+ '<?xml version="1.0" encoding="UTF-8"?>\n',
+ '<lists>\n',
+ '<list\n',
+ ' path="%s">\n' % sbox.repo_url,
+ '<entry\n',
+ ' kind="dir">\n',
+ '<name>A</name>\n',
+ '<commit\n',
+ ' revision="1">\n',
+ '<author>%s</author>\n' % expected_author,
+ '<date>2000-01-01T12:00:00.000000Z</date>\n',
+ '</commit>\n',
+ '</entry>\n',
+ '<entry\n',
+ ' kind="file">\n',
+ '<name>iota</name>\n',
+ '<size>25</size>\n',
+ '<commit\n',
+ ' revision="1">\n',
+ '<author>%s</author>\n' % expected_author,
+ '<date>2000-01-01T12:00:00.000000Z</date>\n',
+ '</commit>\n',
+ '</entry>\n',
+ '</list>\n',
+ '</lists>\n'
+ ]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'ls', '--xml', repo_url)
+
+ expected_info = [{
+ 'Repository Root' : sbox.repo_url,
+ 'Last Changed Author' : expected_author,
+ }]
+ svntest.actions.run_and_verify_info(expected_info, repo_url)
+
+def dir_prop_conflict_details(sbox):
+ "verify dir property conflict details"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Apply some changes
+ sbox.simple_propset('svn:mergeinfo', '/B:1', 'A')
+ sbox.simple_propset('my-prop', 'my-val', 'A')
+ sbox.simple_commit()
+
+ # Revert to r1
+ sbox.simple_update('', revision=1)
+
+ # Apply some incompatible changes
+ sbox.simple_propset('svn:mergeinfo', '/C:1', 'A')
+ sbox.simple_propset('my-prop', 'other-val', 'A')
+
+ # This should report out of date because there are incompatible property
+ # changes that can't be merged on the server
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ '.*[Oo]ut of date.*')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' C'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A', status=' C')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ check_props=True)
+ expected_info = {
+ 'Conflicted Properties' : 'my-prop',
+ 'Conflict Details': re.escape('incoming dir edit upon update'
+ + ' Source left: (dir) ^/A@1'
+ + ' Source right: (dir) ^/A@2')
+ }
+ svntest.actions.run_and_verify_info([expected_info], sbox.path('A'))
+
+
+def iprops_list_abspath(sbox):
+ "test listing iprops via abspath"
+
+ sbox.build()
+
+ sbox.simple_propset('im', 'root', '')
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', '^/A/D', sbox.ospath(''),
+ '--ignore-ancestry')
+
+ sbox.simple_propset('im', 'GammA', 'gamma')
+
+ expected_output = [
+ 'Inherited properties on \'%s\',\n' % sbox.ospath(''),
+ 'from \'%s\':\n' % sbox.repo_url,
+ ' im\n',
+ ' root\n',
+ 'Properties on \'%s\':\n' % sbox.ospath('gamma'),
+ ' im\n',
+ ' GammA\n'
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'pl', '-R',
+ '--show-inherited-props', '-v',
+ sbox.ospath(''))
+
+ expected_output = [
+ 'Inherited properties on \'%s\',\n' % os.path.abspath(sbox.ospath('')),
+ 'from \'%s\':\n' % sbox.repo_url,
+ ' im\n',
+ ' root\n',
+ 'Properties on \'%s\':\n' % os.path.abspath(sbox.ospath('gamma')),
+ ' im\n',
+ ' GammA\n'
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'pl', '-R',
+ '--show-inherited-props', '-v',
+ os.path.abspath(sbox.ospath('')))
+
+def wc_propop_on_url(sbox):
+ "perform wc specific operations on url"
+
+ sbox.build(create_wc = False)
+
+ svntest.actions.run_and_verify_svn(None, '.*E195000:.*path',
+ 'pl', '-r', 'PREV',
+ sbox.repo_url)
+
+ svntest.actions.run_and_verify_svn(None, '.*E195000:.*path',
+ 'pg', 'my:Q', '-r', 'PREV',
+ sbox.repo_url)
+
+def prop_conflict_root(sbox):
+ """property conflict on wc root"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_propset('propname', 'propval1', '')
+ sbox.simple_commit()
+ sbox.simple_propset('propname', 'propval2', '')
+ sbox.simple_commit()
+ sbox.simple_update(revision=2)
+ sbox.simple_propset('propname', 'propvalconflict', '')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' C'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('', status=' C')
+ extra_files = ['dir_conflicts.prej']
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ make_local_props,
+ commit_props,
+ update_props,
+ downdate_props,
+ remove_props,
+ update_conflict_props,
+ commit_conflict_dirprops,
+ commit_replacement_props,
+ revert_replacement_props,
+ inappropriate_props,
+ copy_inherits_special_props,
+ revprop_change,
+ prop_value_conversions,
+ binary_props,
+ recursive_base_wc_ops,
+ url_props_ops,
+ removal_schedule_added_props,
+ update_props_on_wc_root,
+ props_on_replaced_file,
+ depthy_wc_proplist,
+ depthy_url_proplist,
+ invalid_propnames,
+ perms_on_symlink,
+ remove_custom_ns_props,
+ props_over_time,
+ invalid_propvalues,
+ same_replacement_props,
+ added_moved_file,
+ delete_nonexistent_property,
+ post_revprop_change_hook,
+ rm_of_replaced_file,
+ prop_reject_grind,
+ obstructed_subdirs,
+ atomic_over_ra,
+ propget_redirection,
+ file_matching_dir_prop_reject,
+ pristine_props_listed,
+ inheritable_ignores,
+ almost_known_prop_names,
+ peg_rev_base_working,
+ xml_unsafe_author,
+ xml_unsafe_author2,
+ dir_prop_conflict_details,
+ iprops_list_abspath,
+ wc_propop_on_url,
+ prop_conflict_root,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/redirect_tests.py b/subversion/tests/cmdline/redirect_tests.py
new file mode 100755
index 0000000..e8196a3
--- /dev/null
+++ b/subversion/tests/cmdline/redirect_tests.py
@@ -0,0 +1,283 @@
+#!/usr/bin/env python
+#
+# redirect_tests.py: Test ra_dav handling of server-side redirects
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, re
+
+# Our testing module
+import svntest
+
+# (abbreviations)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+# Regular expression which matches the redirection notification
+redirect_regex = re.compile(r"^Redirecting to URL '.*'")
+
+# Generic UUID-matching regular expression
+uuid_regex = re.compile(r"[a-fA-F0-9]{8}(-[a-fA-F0-9]{4}){3}-[a-fA-F0-9]{12}")
+
+
+def verify_url(wc_path, url, wc_path_is_file=False):
+ # check that we have a Repository Root and Repository UUID
+ name = os.path.basename(wc_path)
+ expected = {'Path' : re.escape(wc_path),
+ 'URL' : url,
+ 'Repository Root' : '.*',
+ 'Revision' : '.*',
+ 'Node Kind' : 'directory',
+ 'Repository UUID' : uuid_regex,
+ }
+ if wc_path_is_file:
+ expected.update({'Name' : name,
+ 'Node Kind' : 'file',
+ })
+ svntest.actions.run_and_verify_info([expected], wc_path)
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def temporary_redirect(sbox):
+ "temporary redirect should error out"
+
+ sbox.build(create_wc=False)
+ wc_dir = sbox.add_wc_path("my")
+ co_url = sbox.redirected_root_url(temporary=True)
+
+ # Try various actions against the repository, expecting an error
+ # that indicates that some relocation has occurred.
+ exit_code, out, err = svntest.main.run_svn('.*moved temporarily.*',
+ 'info', co_url)
+ exit_code, out, err = svntest.main.run_svn('.*moved temporarily.*',
+ 'co', co_url, wc_dir)
+ exit_code, out, err = svntest.main.run_svn('.*moved temporarily.*',
+ 'mkdir', '-m', 'MKDIR',
+ co_url + '/newdir')
+ exit_code, out, err = svntest.main.run_svn('.*moved temporarily.*',
+ 'delete', '-m', 'DELETE',
+ co_url + '/iota')
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_checkout(sbox):
+ "redirected checkout"
+
+ sbox.build(create_wc=False)
+ wc_dir = sbox.add_wc_path("my")
+ co_url = sbox.redirected_root_url()
+
+ # Checkout the working copy via its redirect URL
+ exit_code, out, err = svntest.main.run_svn(None, 'co', co_url, wc_dir)
+ if err:
+ raise svntest.Failure
+ if not redirect_regex.match(out[0]):
+ raise svntest.Failure
+
+ # Verify that we have the expected URL.
+ verify_url(wc_dir, sbox.repo_url)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_update(sbox):
+ "redirected update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ relocate_url = sbox.redirected_root_url()
+
+ # Relocate (by cheating) the working copy to the redirect URL. When
+ # we then update, we'll expect to find ourselves automagically back
+ # to the original URL. (This is because we can't easily introduce a
+ # redirect to the Apache configuration from the test suite here.)
+ svntest.actions.no_relocate_validation()
+ exit_code, out, err = svntest.main.run_svn(None, 'sw', '--relocate',
+ sbox.repo_url, relocate_url,
+ wc_dir)
+ svntest.actions.do_relocate_validation()
+
+ # Now update the working copy.
+ exit_code, out, err = svntest.main.run_svn(None, 'up', wc_dir)
+ if err:
+ raise svntest.Failure
+ if not re.match("^Updating '.*':", out[0]):
+ raise svntest.Failure
+ if not redirect_regex.match(out[1]):
+ raise svntest.Failure
+
+ # Verify that we have the expected URL.
+ verify_url(wc_dir, sbox.repo_url)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_nonroot_update(sbox):
+ "redirected update of non-repos-root wc"
+
+ sbox.build(create_wc=False)
+ wc_dir = sbox.wc_dir
+ checkout_url = sbox.repo_url + '/A'
+ relocate_url = sbox.redirected_root_url() + '/A'
+
+ # Checkout a subdir of the repository root.
+ exit_code, out, err = svntest.main.run_svn(None, 'co',
+ checkout_url, wc_dir)
+ if err:
+ raise svntest.Failure
+
+ # Relocate (by cheating) the working copy to the redirect URL. When
+ # we then update, we'll expect to find ourselves automagically back
+ # to the original URL. (This is because we can't easily introduce a
+ # redirect to the Apache configuration from the test suite here.)
+ svntest.actions.no_relocate_validation()
+ exit_code, out, err = svntest.main.run_svn(None, 'sw', '--relocate',
+ checkout_url, relocate_url,
+ wc_dir)
+ svntest.actions.do_relocate_validation()
+
+ # Now update the working copy.
+ exit_code, out, err = svntest.main.run_svn(None, 'up', wc_dir)
+ if err:
+ raise svntest.Failure
+ if not re.match("^Updating '.*':", out[0]):
+ raise svntest.Failure
+ if not redirect_regex.match(out[1]):
+ raise svntest.Failure
+
+ # Verify that we have the expected URL.
+ verify_url(wc_dir, checkout_url)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_externals(sbox):
+ "redirected externals"
+
+ sbox.build()
+
+ sbox.simple_propset('svn:externals',
+ '^/A/B/E/alpha fileX\n'
+ '^/A/B/F dirX',
+ 'A/C')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ wc_dir = sbox.add_wc_path("my")
+ co_url = sbox.redirected_root_url()
+ exit_code, out, err = svntest.main.run_svn(None, 'co', co_url, wc_dir)
+ if err:
+ raise svntest.Failure
+ if not redirect_regex.match(out[0]):
+ raise svntest.Failure
+
+ verify_url(wc_dir, sbox.repo_url)
+ verify_url(sbox.ospath('A/C/fileX'), sbox.repo_url + '/A/B/E/alpha',
+ wc_path_is_file=True)
+ verify_url(sbox.ospath('A/C/dirX'), sbox.repo_url + '/A/B/F')
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_copy(sbox):
+ "redirected copy"
+
+ sbox.build(create_wc=False)
+
+ # E170011 = SVN_ERR_RA_SESSION_URL_MISMATCH
+ expected_error = "svn: E170011: Repository moved permanently"
+
+ # This tests the actual copy handling
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'failed copy',
+ sbox.redirected_root_url() + '/A',
+ sbox.redirected_root_url() + '/A_copied')
+
+ # This tests the cmdline handling of '^/copy-of-A'
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'failed copy',
+ sbox.redirected_root_url() + '/A',
+ '^/copy-of-A')
+
+ # E170011 = SVN_ERR_RA_SESSION_URL_MISMATCH
+ expected_error = "svn: E170011: Repository moved temporarily"
+
+ # This tests the actual copy handling
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'failed copy',
+ sbox.redirected_root_url(temporary=True) + '/A',
+ sbox.redirected_root_url(temporary=True) + '/A_copied')
+
+ # This tests the cmdline handling of '^/copy-of-A'
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'cp', '-m', 'failed copy',
+ sbox.redirected_root_url(temporary=True) + '/A',
+ '^/copy-of-A')
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_ra_type_dav)
+def redirected_commands(sbox):
+ "redirected commands"
+
+ sbox.build(create_wc=False)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'log',
+ sbox.redirected_root_url() + '/A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ls',
+ sbox.redirected_root_url() + '/A')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'info',
+ sbox.redirected_root_url() + '/A')
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ temporary_redirect,
+ redirected_checkout,
+ redirected_update,
+ redirected_nonroot_update,
+ redirected_externals,
+ redirected_copy,
+ redirected_commands,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/relocate_tests.py b/subversion/tests/cmdline/relocate_tests.py
new file mode 100755
index 0000000..063c252
--- /dev/null
+++ b/subversion/tests/cmdline/relocate_tests.py
@@ -0,0 +1,441 @@
+#!/usr/bin/env python
+#
+# switch_tests.py: testing `svn switch'.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, re, os
+
+# Our testing module
+import svntest
+from svntest import verify, actions, main
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo
+from externals_tests import change_external
+from svntest.deeptrees import do_routine_switching
+
+#----------------------------------------------------------------------
+
+def relocate_deleted_missing_copied(sbox):
+ "relocate with deleted, missing and copied entries"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete A/mu to create a deleted entry for mu in A/.svn/entries
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', mu_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/mu')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Remove A/B/F to create a missing entry
+ svntest.main.safe_rmtree(os.path.join(wc_dir, 'A', 'B', 'F'))
+
+ # Copy A/D to A/D2
+ D_path = os.path.join(wc_dir, 'A', 'D')
+ D2_path = os.path.join(wc_dir, 'A', 'D2')
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ D_path, D2_path)
+ # Delete within the copy
+ D2G_path = os.path.join(wc_dir, 'A', 'D2', 'G')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', D2G_path)
+
+ expected_status.add({
+ 'A/D2' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/D2/gamma' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D2/G' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/D2/G/pi' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/D2/G/rho' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/D2/G/tau' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/D2/H' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D2/H/chi' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D2/H/omega' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D2/H/psi' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ expected_status.tweak('A/B/F', status='! ', wc_rev='1')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Relocate
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 2, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate',
+ repo_url, other_repo_url, wc_dir)
+
+ # Deleted and missing entries should be preserved, so update should
+ # show only A/B/F being reinstated
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(verb='Restored'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/mu')
+ expected_disk.add({
+ 'A/D2' : Item(),
+ 'A/D2/gamma' : Item("This is the file 'gamma'.\n"),
+ 'A/D2/H' : Item(),
+ 'A/D2/H/chi' : Item("This is the file 'chi'.\n"),
+ 'A/D2/H/omega' : Item("This is the file 'omega'.\n"),
+ 'A/D2/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+
+ expected_status.add({
+ 'A/B/F' : Item(status=' ', wc_rev='2'),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A/D2', 'A/D2/gamma',
+ 'A/D2/H', 'A/D2/H/chi', 'A/D2/H/omega', 'A/D2/H/psi',
+ wc_rev='-')
+ expected_status.tweak('A/D2/G', 'A/D2/G/pi', 'A/D2/G/rho', 'A/D2/G/tau',
+ copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Commit to verify that copyfrom URLs have been relocated
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D2' : Item(verb='Adding'),
+ 'A/D2/G' : Item(verb='Deleting'),
+ })
+ expected_status.tweak('A/D2', 'A/D2/gamma',
+ 'A/D2/H', 'A/D2/H/chi', 'A/D2/H/omega', 'A/D2/H/psi',
+ status=' ', wc_rev='3', copied=None)
+ expected_status.remove('A/D2/G', 'A/D2/G/pi', 'A/D2/G/rho', 'A/D2/G/tau')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+#----------------------------------------------------------------------
+
+@Issue(2380)
+def relocate_beyond_repos_root(sbox):
+ "relocate with prefixes longer than repo root"
+ sbox.build(read_only=True, create_wc=False)
+
+ wc_backup = sbox.add_wc_path('backup')
+
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ A_url = repo_url + "/A"
+ A_wc_dir = wc_dir
+ other_A_url = other_repo_url + "/A"
+ other_B_url = other_repo_url + "/B"
+
+ svntest.main.safe_rmtree(wc_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ repo_url + '/A', wc_dir)
+
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0)
+
+ # A relocate that changes the repo path part of the URL shouldn't work.
+ # This tests for issue #2380.
+ svntest.actions.run_and_verify_svn(None,
+ ".*Invalid relocation destination.*",
+ 'relocate',
+ A_url, other_B_url, A_wc_dir)
+
+ # Another way of trying to change the fs path, leading to an invalid
+ # repository root.
+ svntest.actions.run_and_verify_svn(None,
+ ".*is not the root.*",
+ 'relocate',
+ repo_url, other_B_url, A_wc_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'relocate',
+ A_url, other_A_url, A_wc_dir)
+
+ # Check that we can contact the repository, meaning that the
+ # relocate actually changed the URI. Escape the expected URI to
+ # avoid problems from any regex meta-characters it may contain
+ # (e.g. '+').
+ expected_infos = [
+ { 'URL' : re.escape(other_A_url) + '$',
+ 'Path' : '.+',
+ 'Repository UUID' : '.+',
+ 'Revision' : '.+',
+ 'Node Kind' : '.+',
+ 'Last Changed Date' : '.+' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, A_wc_dir, '-rHEAD')
+
+#----------------------------------------------------------------------
+# Issue 2578.
+def relocate_and_propset(sbox):
+ "out of date propset should fail after a relocate"
+
+ # Create virgin repos and working copy
+ svntest.main.safe_rmtree(sbox.repo_dir, 1)
+ svntest.main.create_repos(sbox.repo_dir)
+ svntest.actions.guarantee_greek_repository(
+ sbox.repo_dir, svntest.main.options.server_minor_version)
+
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+
+ # checkout
+ svntest.main.safe_rmtree(wc_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Relocate
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ repo_url, other_repo_url, wc_dir)
+
+ # Remove gamma from the working copy.
+ D_path = os.path.join(wc_dir, 'A', 'D')
+ gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ # Create expected commit output.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ # After committing, status should show no sign of gamma.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma')
+
+ # Commit the deletion of gamma and verify.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now gamma should be marked as `deleted' under the hood, at
+ # revision 2. Meanwhile, A/D is still lagging at revision 1.
+
+ # Make a propchange on A/D
+ svntest.main.run_svn(None, 'ps', 'foo', 'bar', D_path)
+
+ # Commit and *expect* a repository Merge failure:
+ svntest.actions.run_and_verify_commit(wc_dir,
+ None,
+ None,
+ ".*[Oo]ut of date.*")
+
+#----------------------------------------------------------------------
+
+def single_file_relocate(sbox):
+ "relocate a single file"
+
+ # Create virgin repos and working copy
+ svntest.main.safe_rmtree(sbox.repo_dir, 1)
+ svntest.actions.guarantee_greek_repository(
+ sbox.repo_dir, svntest.main.options.server_minor_version)
+
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(sbox.wc_dir, 'iota')
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ iota_url = repo_url + '/iota'
+ greek_dump_dir = sbox.add_wc_path('greek-dump')
+
+ # checkout
+ svntest.main.safe_rmtree(wc_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ repo_url, wc_dir)
+
+ # Relocate
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ other_iota_url = other_repo_url + '/iota'
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+ svntest.actions.run_and_verify_svn(None,
+ ".*Cannot relocate.*",
+ 'relocate',
+ iota_url, other_iota_url, iota_path)
+
+#----------------------------------------------------------------------
+
+def relocate_with_switched_children(sbox):
+ "relocate a directory with switched children"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup (and verify) some switched things
+ do_routine_switching(sbox.wc_dir, sbox.repo_url, False)
+
+ # Relocate
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+
+ # Do the switch and check the results in three ways.
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ repo_url, other_repo_url, wc_dir)
+
+ # Attempt to commit changes and examine results
+ expected_output = svntest.wc.State(wc_dir, { })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B', 'iota',
+ switched='S')
+ expected_status.remove('A/B/E', 'A/B/F', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/lambda')
+ expected_status.add({
+ 'A/B/pi' : Item(status=' ', wc_rev='1'),
+ 'A/B/rho' : Item(status=' ', wc_rev='1'),
+ 'A/B/tau' : Item(status=' ', wc_rev='1'),
+ })
+
+ # This won't actually do a commit, because nothing should be modified.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Check the URLs of various nodes.
+ info_output = {
+ wc_dir: '.*.other$',
+ os.path.join(wc_dir, 'iota'): '.*.other/A/D/gamma$',
+ os.path.join(wc_dir, 'A', 'B'): '.*.other/A/D/G$',
+ os.path.join(wc_dir, 'A', 'B', 'pi'): '.*.other/A/D/G/pi$',
+ }
+
+ for path, pattern in info_output.items():
+ expected_info = { 'URL' : pattern }
+ svntest.actions.run_and_verify_info([expected_info], path)
+
+#----------------------------------------------------------------------
+
+
+### regression test for issue #3597
+@Issue(3597)
+def relocate_with_relative_externals(sbox):
+ "relocate a directory containing relative externals"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+
+ # Add a relative external.
+ change_external(os.path.join(wc_dir, 'A', 'B'),
+ "^/A/D/G G-ext\n../D/H H-ext", commit=True)
+ svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir)
+
+ # A second wc not at the repository root
+ other_wc = sbox.add_wc_path('other')
+ svntest.main.safe_rmtree(other_wc, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ repo_url + '/A/B', other_wc)
+ # Move our repository to another location.
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ svntest.main.copy_repos(repo_dir, other_repo_dir, 2, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+
+ # Now relocate our working copy.
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ repo_url, other_repo_url, wc_dir)
+
+ # Check the URLs of the externals -- were they updated to point to the
+ # .other repository URL?
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.other/A/D/G$' }],
+ os.path.join(wc_dir, 'A', 'B', 'G-ext'))
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.other/A/D/H$' }],
+ os.path.join(wc_dir, 'A', 'B', 'H-ext'))
+
+ # Relocate with prefix too long to be valid for externals.
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ repo_url + '/A/B',
+ other_repo_url + '/A/B',
+ other_wc)
+
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.other/A/D/G$' }],
+ os.path.join(other_wc, 'G-ext'))
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.other/A/D/H$' }],
+ os.path.join(other_wc, 'H-ext'))
+
+def prefix_partial_component(sbox):
+ """prefix with a partial component"""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+ other1_repo_dir, other1_repo_url = sbox.add_repo_path('xxxother')
+ other2_repo_dir, other2_repo_url = sbox.add_repo_path('yyyother')
+
+ # Relocate to 'xxxother'.
+ svntest.main.copy_repos(repo_dir, other1_repo_dir, 1, 0)
+ svntest.main.safe_rmtree(repo_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ repo_url, other1_repo_url, wc_dir)
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.xxxother$' }],
+ wc_dir)
+
+ # Now relocate from 'xxx' to 'yyy' omitting 'other'.
+ svntest.main.copy_repos(other1_repo_dir, other2_repo_dir, 1, 0)
+ svntest.main.safe_rmtree(other1_repo_url, 1)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ other1_repo_url[:-5],
+ other2_repo_url[:-5],
+ wc_dir)
+ svntest.actions.run_and_verify_info([{ 'URL' : '.*.yyyother$' }],
+ wc_dir)
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ relocate_deleted_missing_copied,
+ relocate_beyond_repos_root,
+ relocate_and_propset,
+ single_file_relocate,
+ relocate_with_switched_children,
+ relocate_with_relative_externals,
+ prefix_partial_component,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/resolve_tests.py b/subversion/tests/cmdline/resolve_tests.py
new file mode 100755
index 0000000..699b32f
--- /dev/null
+++ b/subversion/tests/cmdline/resolve_tests.py
@@ -0,0 +1,690 @@
+#!/usr/bin/env python
+#
+# resolve_tests.py: testing 'svn resolve'
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os, stat
+import time
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+from svntest.mergetrees import set_up_branch
+from svntest.mergetrees import expected_merge_output
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+# 'svn resolve --accept [ base | mine-full | theirs-full ]' was segfaulting
+# on 1.6.x. Prior to this test, the bug was only caught by the Ruby binding
+# tests, see http://svn.haxx.se/dev/archive-2010-01/0088.shtml.
+def automatic_conflict_resolution(sbox):
+ "resolve -R --accept [base | mf | tf]"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_COPY_path = os.path.join(wc_dir, "A_COPY")
+ psi_COPY_path = os.path.join(wc_dir, "A_COPY", "D", "H", "psi")
+
+ # Branch A to A_COPY in r2, then make some changes under 'A' in r3-6.
+ wc_disk, wc_status = set_up_branch(sbox)
+
+ # Make a change on the A_COPY branch such that a subsequent merge
+ # conflicts.
+ svntest.main.file_write(psi_COPY_path, "Branch content.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'log msg', wc_dir)
+ def do_text_conflicting_merge():
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', A_COPY_path)
+ svntest.actions.run_and_verify_svn(
+ expected_merge_output([[3]], [
+ "C %s\n" % psi_COPY_path,
+ " U %s\n" % A_COPY_path],
+ target=A_COPY_path, text_conflicts=1),
+ [], 'merge', '-c3', '--allow-mixed-revisions',
+ sbox.repo_url + '/A',
+ A_COPY_path)
+
+ # Test 'svn resolve -R --accept base'
+ do_text_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([psi_COPY_path],
+ '-R', '--accept', 'base',
+ A_COPY_path)
+ wc_disk.tweak('A_COPY/D/H/psi', contents="This is the file 'psi'.\n")
+ svntest.actions.verify_disk(wc_dir, wc_disk)
+
+ # Test 'svn resolve -R --accept mine-full'
+ do_text_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([psi_COPY_path],
+ '-R', '--accept', 'mine-full',
+ A_COPY_path)
+ wc_disk.tweak('A_COPY/D/H/psi', contents="Branch content.\n")
+ svntest.actions.verify_disk(wc_dir, wc_disk)
+
+ # Test 'svn resolve -R --accept theirs-full'
+ do_text_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([psi_COPY_path],
+ '-R', '--accept', 'tf',
+ A_COPY_path)
+ wc_disk.tweak('A_COPY/D/H/psi', contents="New content")
+ svntest.actions.verify_disk(wc_dir, wc_disk)
+
+#----------------------------------------------------------------------
+# Test for issue #3707 'property conflicts not handled correctly by
+# svn resolve'.
+@Issue(3707)
+def prop_conflict_resolution(sbox):
+ "resolving prop conflicts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ iota_path = os.path.join(wc_dir, "iota")
+ mu_path = os.path.join(wc_dir, "A", "mu")
+ gamma_path = os.path.join(wc_dir, "A", "D", "gamma")
+ psi_path = os.path.join(wc_dir, "A", "D", "H", "psi")
+
+ # r2 - Set property 'propname:propval' on iota, A/mu, and A/D/gamma.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'propname', 'propval',
+ iota_path, mu_path, gamma_path)
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '-m', 'create some new properties',
+ wc_dir)
+
+ # r3 - Make some changes to the props from r2:
+ #
+ # iota : Delete property 'propname'
+ # A/mu : Change property 'propname' to 'incoming-conflict'
+ # A/D/gamma : Change property 'propname' to 'incoming-no-conflict'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'pd', 'propname', iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'propname', 'incoming-conflict',
+ mu_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'propname', 'incoming-no-conflict',
+ gamma_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit', '-m', 'delete a property',
+ wc_dir)
+
+ def do_prop_conflicting_up_and_resolve(resolve_accept,
+ resolved_deleted_prop_val_output,
+ resolved_edited_prop_val_output):
+
+ """Revert the WC, update it to r2, and set the following properties:
+
+ iota : 'propname' = 'local_edit'
+ 'newprop' = 'new-val-no-incoming'
+ A/mu : 'propname' = 'local_edit'
+ A/D/gamma : 'propname' = 'incoming-no-conflict'
+ A/D/H/psi : 'newprop' = 'new-val-no-incoming'
+
+ Update the WC, postponing conflicts, then run svn resolve -R
+ --accept=RESOLVE_ACCEPT.
+
+ Using svn propget, check that the resolution results in the following
+ properties:
+
+ iota : 'propname' = RESOLVED_DELETED_PROP_VAL_OUTPUT
+ 'newprop' = 'new-val-no-incoming'
+ A/mu : 'propname' = RESOLVED_EDITED_PROP_VAL_OUTPUT
+ A/D/gamma : 'propname' = 'incoming-no-conflict'
+ A/D/H/psi : 'newprop' = 'new-val-no-incoming'
+
+ RESOLVED_DELETED_PROP_VAL_OUTPUT and RESOLVED_EDITED_PROP_VAL_OUTPUT
+ both follow the rules for the expected_stdout arg to
+ run_and_verify_svn2()"""
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r2', wc_dir)
+
+ # Set some properties that will conflict when we update.
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ 'propname', 'local_edit',
+ iota_path, mu_path)
+
+ # Set a property that should always merge cleanly with the update.
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ 'propname', 'incoming-no-conflict',
+ gamma_path)
+
+ # Set a property that has no update coming.
+ svntest.actions.run_and_verify_svn(None, [], 'ps',
+ 'newprop', 'new-val-no-incoming',
+ psi_path,
+ iota_path)
+
+ # Update, postponing all conflict resolution.
+ svntest.actions.run_and_verify_svn(None, [], 'up',
+ '--accept=postpone', wc_dir)
+ svntest.actions.run_and_verify_resolve([iota_path, mu_path], '-R',
+ '--accept', resolve_accept, wc_dir)
+ if resolved_deleted_prop_val_output:
+ expected_deleted_stderr = []
+ else:
+ expected_deleted_stderr = '.*W200017: Property.*not found'
+
+ svntest.actions.run_and_verify_svn(
+ resolved_deleted_prop_val_output, expected_deleted_stderr,
+ 'pg', 'propname', iota_path)
+ svntest.actions.run_and_verify_svn(
+ ['new-val-no-incoming\n'], [], 'pg', 'newprop', iota_path)
+ svntest.actions.run_and_verify_svn(
+ resolved_edited_prop_val_output, [], 'pg', 'propname', mu_path)
+ svntest.actions.run_and_verify_svn(
+ ['incoming-no-conflict\n'], [], 'pg', 'propname', gamma_path)
+ svntest.actions.run_and_verify_svn(
+ ['new-val-no-incoming\n'], [], 'pg', 'newprop', psi_path)
+
+ # Test how svn resolve deals with prop conflicts and other local
+ # prop changes:
+ #
+ # 1) 'iota' - An incoming prop delete on a local prop modification.
+ # 2) 'A/mu' - An incoming prop edit on a local prop modification.
+ # 3) 'A/D/gamma' - An local, non-conflicted prop edit
+ #
+ # Previously this failed because svn resolve --accept=[theirs-conflict |
+ # theirs-full] removed the conflicts, but didn't install 'their' version
+ # of the conflicted properties.
+ do_prop_conflicting_up_and_resolve('mine-full',
+ ['local_edit\n'],
+ ['local_edit\n'])
+ do_prop_conflicting_up_and_resolve('mine-conflict',
+ ['local_edit\n'],
+ ['local_edit\n'])
+ do_prop_conflicting_up_and_resolve('working',
+ ['local_edit\n'],
+ ['local_edit\n'])
+ do_prop_conflicting_up_and_resolve('theirs-conflict',
+ [], # Prop deleted
+ ['incoming-conflict\n'])
+ do_prop_conflicting_up_and_resolve('theirs-full',
+ [], # Prop deleted
+ ['incoming-conflict\n'])
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_posix_os)
+def auto_resolve_executable_file(sbox):
+ "resolve file with executable bit set"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Mark iota as executable
+ sbox.simple_propset("svn:executable", '*', 'iota')
+ sbox.simple_commit() # r2
+
+ # Make a change to iota in r3
+ svntest.main.file_write(sbox.ospath('iota'), "boo\n")
+ sbox.simple_commit() # r3
+
+ # Update back to r2, and tweak iota to provoke a text conflict
+ sbox.simple_update(revision=2)
+ svntest.main.file_write(sbox.ospath('iota'), "bzzt\n")
+
+ # Get permission bits of iota
+ mode = os.stat(sbox.ospath('iota'))[stat.ST_MODE]
+
+ # Update back to r3, and auto-resolve the text conflict.
+ svntest.main.run_svn(False, 'update', wc_dir, '--accept', 'theirs-full')
+
+ # permission bits of iota should be unaffected
+ if mode != os.stat(sbox.ospath('iota'))[stat.ST_MODE]:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def resolved_on_wc_root(sbox):
+ "resolved on working copy root"
+
+ sbox.build()
+ wc = sbox.wc_dir
+
+ i = os.path.join(wc, 'iota')
+ B = os.path.join(wc, 'A', 'B')
+ g = os.path.join(wc, 'A', 'D', 'gamma')
+
+ # Create some conflicts...
+ # Commit mods
+ svntest.main.file_append(i, "changed iota.\n")
+ svntest.main.file_append(g, "changed gamma.\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', B)
+
+ expected_output = svntest.wc.State(wc, {
+ 'iota' : Item(verb='Sending'),
+ 'A/B' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_status.tweak('iota', 'A/B', 'A/D/gamma', wc_rev = 2)
+
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Go back to rev 1
+ expected_output = svntest.wc.State(wc, {
+ 'iota' : Item(status='U '),
+ 'A/B' : Item(status=' U'),
+ 'A/D/gamma' : Item(status='U '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc, 1)
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r1', wc)
+
+ # Deletions so that the item becomes unversioned and
+ # will have a tree-conflict upon update.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', i, B, g)
+
+ # Update so that conflicts appear
+ expected_output = svntest.wc.State(wc, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ 'A/D/gamma' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota',
+ 'A/B',
+ 'A/B/lambda',
+ 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F',
+ 'A/D/gamma')
+
+ expected_status = svntest.actions.get_virginal_state(wc, 2)
+ expected_status.tweak('iota', 'A/B', 'A/D/gamma',
+ status='D ', treeconflict='C')
+ expected_status.tweak('A/B/lambda', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', status='D ')
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ wc)
+ svntest.actions.run_and_verify_unquiet_status(wc, expected_status)
+
+ # Resolve recursively
+ svntest.actions.run_and_verify_resolved([i, B, g], '--depth=infinity', wc)
+
+ expected_status.tweak('iota', 'A/B', 'A/D/gamma', treeconflict=None)
+ svntest.actions.run_and_verify_unquiet_status(wc, expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.server_has_mergeinfo)
+def resolved_on_deleted_item(sbox):
+ "resolved on deleted item"
+
+ sbox.build()
+ wc = sbox.wc_dir
+
+ A = os.path.join(wc, 'A',)
+ B = os.path.join(wc, 'A', 'B')
+ g = os.path.join(wc, 'A', 'D', 'gamma')
+ A2 = os.path.join(wc, 'A2')
+ B2 = os.path.join(A2, 'B')
+ g2 = os.path.join(A2, 'D', 'gamma')
+
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+
+ # make a copy of A
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', A_url, A2_url, '-m', 'm')
+
+ expected_output = svntest.wc.State(wc, {
+ 'A2' : Item(status='A '),
+ 'A2/B' : Item(status='A '),
+ 'A2/B/lambda' : Item(status='A '),
+ 'A2/B/E' : Item(status='A '),
+ 'A2/B/E/alpha' : Item(status='A '),
+ 'A2/B/E/beta' : Item(status='A '),
+ 'A2/B/F' : Item(status='A '),
+ 'A2/mu' : Item(status='A '),
+ 'A2/C' : Item(status='A '),
+ 'A2/D' : Item(status='A '),
+ 'A2/D/gamma' : Item(status='A '),
+ 'A2/D/G' : Item(status='A '),
+ 'A2/D/G/pi' : Item(status='A '),
+ 'A2/D/G/rho' : Item(status='A '),
+ 'A2/D/G/tau' : Item(status='A '),
+ 'A2/D/H' : Item(status='A '),
+ 'A2/D/H/chi' : Item(status='A '),
+ 'A2/D/H/omega' : Item(status='A '),
+ 'A2/D/H/psi' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A2/mu' : Item(contents="This is the file 'mu'.\n"),
+ 'A2/D/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'A2/D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A2/D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A2/D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A2/D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'A2/D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'A2/D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'A2/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'A2/B/F' : Item(),
+ 'A2/B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A2/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A2/C' : Item(),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc, 2)
+ expected_status.add({
+ 'A2' : Item(),
+ 'A2/B' : Item(),
+ 'A2/B/lambda' : Item(),
+ 'A2/B/E' : Item(),
+ 'A2/B/E/alpha' : Item(),
+ 'A2/B/E/beta' : Item(),
+ 'A2/B/F' : Item(),
+ 'A2/mu' : Item(),
+ 'A2/C' : Item(),
+ 'A2/D' : Item(),
+ 'A2/D/gamma' : Item(),
+ 'A2/D/G' : Item(),
+ 'A2/D/G/pi' : Item(),
+ 'A2/D/G/rho' : Item(),
+ 'A2/D/G/tau' : Item(),
+ 'A2/D/H' : Item(),
+ 'A2/D/H/chi' : Item(),
+ 'A2/D/H/omega' : Item(),
+ 'A2/D/H/psi' : Item(),
+ })
+ expected_status.tweak(status=' ', wc_rev='2')
+
+ svntest.actions.run_and_verify_update(wc,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ wc)
+
+ # Create some conflicts...
+
+ # Modify the paths in the one directory.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'foo-val', B)
+ svntest.main.file_append(g, "Modified gamma.\n")
+
+ expected_output = svntest.wc.State(wc, {
+ 'A/B' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak('A/B', 'A/D/gamma', wc_rev='3')
+
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status)
+
+ # Delete the paths in the second directory.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', B2, g2)
+
+ expected_output = svntest.wc.State(wc, {
+ 'A2/B' : Item(verb='Deleting'),
+ 'A2/D/gamma' : Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A2/B', 'A2/B/lambda',
+ 'A2/B/E', 'A2/B/E/alpha', 'A2/B/E/beta',
+ 'A2/B/F',
+ 'A2/D/gamma')
+
+ svntest.actions.run_and_verify_commit(wc,
+ expected_output,
+ expected_status,
+ [],
+ A2)
+
+ # Now merge A to A2, creating conflicts...
+
+ expected_output = svntest.wc.State(A2, {
+ 'B' : Item(status=' ', treeconflict='C'),
+ 'D/gamma' : Item(status=' ', treeconflict='C'),
+ })
+ expected_mergeinfo_output = svntest.wc.State(A2, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = svntest.wc.State(A2, {
+ })
+ expected_disk = svntest.wc.State('', {
+ 'mu' : Item(contents="This is the file 'mu'.\n"),
+ 'D' : Item(),
+ 'D/H' : Item(),
+ 'D/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'D/G' : Item(),
+ 'D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'C' : Item(),
+ })
+
+ expected_skip = svntest.wc.State(wc, {
+ })
+
+ expected_status = svntest.wc.State(A2, {
+ '' : Item(status=' M', wc_rev='2'),
+ 'D' : Item(status=' ', wc_rev='2'),
+ 'D/gamma' : Item(status='! ', treeconflict='C'),
+ 'D/G' : Item(status=' ', wc_rev='2'),
+ 'D/G/pi' : Item(status=' ', wc_rev='2'),
+ 'D/G/rho' : Item(status=' ', wc_rev='2'),
+ 'D/G/tau' : Item(status=' ', wc_rev='2'),
+ 'D/H' : Item(status=' ', wc_rev='2'),
+ 'D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'B' : Item(status='! ', treeconflict='C'),
+ 'mu' : Item(status=' ', wc_rev='2'),
+ 'C' : Item(status=' ', wc_rev='2'),
+ })
+
+ svntest.actions.run_and_verify_merge(A2, None, None, A_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk, None, expected_skip,
+ [], dry_run = False)
+ svntest.actions.run_and_verify_unquiet_status(A2, expected_status)
+
+ # Now resolve by recursing on the working copy root.
+ svntest.actions.run_and_verify_resolved([B2, g2], '--depth=infinity', wc)
+
+ expected_status.remove('B', 'D/gamma')
+ svntest.actions.run_and_verify_unquiet_status(A2, expected_status)
+
+#----------------------------------------------------------------------
+
+def theirs_conflict_in_subdir(sbox):
+ "resolve to 'theirs-conflict' in sub-directory"
+
+ sbox.build()
+ wc = sbox.wc_dir
+ wc2 = sbox.add_wc_path('wc2')
+ svntest.actions.duplicate_dir(sbox.wc_dir, wc2)
+
+ alpha_path = os.path.join(wc, 'A', 'B', 'E', 'alpha')
+ alpha_path2 = os.path.join(wc2, 'A', 'B', 'E', 'alpha')
+
+ svntest.main.file_append(alpha_path, "Modified alpha.\n")
+ sbox.simple_commit(message='logmsg')
+
+ svntest.main.file_append(alpha_path2, "Modified alpha, too.\n")
+ svntest.main.run_svn(None, 'up', wc2)
+
+ svntest.actions.run_and_verify_resolve([alpha_path2],
+ '--accept=theirs-conflict',
+ alpha_path2)
+
+#----------------------------------------------------------------------
+
+# Regression test for issue #4238 "merge -cA,B with --accept option aborts
+# if rA conflicts".
+@Issue(4238)
+def multi_range_merge_with_accept(sbox):
+ "multi range merge with --accept keeps going"
+
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # Commit some changes
+ for c in [2, 3, 4]:
+ svntest.main.file_append('iota', 'Change ' + str(c) + '\n')
+ sbox.simple_commit()
+
+ sbox.simple_update(revision=1)
+
+ # The bug: with a request to merge -c4 then -c3, it merges -c4 which
+ # conflicts then auto-resolves the conflict, then errors out with
+ # 'svn: E155035: Can't merge into conflicted node 'iota'.
+ # ### We need more checking of the result to make this test robust, since
+ # it may not always just error out.
+ svntest.main.run_svn(None, 'merge', '-c4,3', '^/iota', 'iota',
+ '--accept=theirs-conflict')
+
+#----------------------------------------------------------------------
+
+# Test for issue #4647 'auto resolution mine-full fails on binary file'
+@Issue(4647)
+def automatic_binary_conflict_resolution(sbox):
+ "resolve -R --accept [base | mf | tf] binary file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_COPY_path = os.path.join(wc_dir, "A_COPY")
+
+ # Add a binary file to the project in revision 2.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+ svntest.main.run_svn(None, 'add', theta_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir)
+
+ # Branch A to A_COPY in revision 3.
+ svntest.main.run_svn(None, 'copy', wc_dir + "/A", A_COPY_path)
+ svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir)
+
+ # Modify the binary file on trunk and in the branch, so that both versions
+ # differ.
+ theta_branch_path = sbox.ospath('A_COPY/theta')
+ svntest.main.file_append_binary(theta_path, theta_contents)
+ svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir)
+ svntest.main.file_append_binary(theta_branch_path, theta_contents)
+ svntest.main.file_append_binary(theta_branch_path, theta_contents)
+ svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir)
+
+ # Run an svn update now to prevent mixed-revision working copy [1:4] error.
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+
+ def do_binary_conflicting_merge():
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '--recursive', A_COPY_path)
+ svntest.main.run_svn(None, 'merge', sbox.repo_url + "/A/theta",
+ wc_dir + "/A_COPY/theta")
+
+ # Test 'svn resolve -R --accept base'
+ # Regression until r1758160
+ do_binary_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([theta_branch_path],
+ '-R', '--accept', 'base',
+ A_COPY_path)
+
+ # Test 'svn resolve -R --accept theirs-full'
+ do_binary_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([theta_branch_path],
+ '-R', '--accept', 'tf',
+ A_COPY_path)
+
+ # Test 'svn resolve -R --accept working'
+ # Equivalent to 'svn resolved'
+ do_binary_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([theta_branch_path],
+ '-R', '--accept', 'working',
+ A_COPY_path)
+
+ # Test 'svn resolve -R --accept mine-full'
+ # There is no '.mine' for binary file conflicts. Same handling as 'working'
+ do_binary_conflicting_merge()
+ svntest.actions.run_and_verify_resolve([theta_branch_path],
+ '-R', '--accept', 'mine-full',
+ A_COPY_path)
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ automatic_conflict_resolution,
+ prop_conflict_resolution,
+ auto_resolve_executable_file,
+ resolved_on_wc_root,
+ resolved_on_deleted_item,
+ theirs_conflict_in_subdir,
+ multi_range_merge_with_accept,
+ automatic_binary_conflict_resolution,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+### End of file.
diff --git a/subversion/tests/cmdline/revert_tests.py b/subversion/tests/cmdline/revert_tests.py
new file mode 100755
index 0000000..39ce3c6
--- /dev/null
+++ b/subversion/tests/cmdline/revert_tests.py
@@ -0,0 +1,1690 @@
+#!/usr/bin/env python
+#
+# revert_tests.py: testing 'svn revert'.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import re, os, stat, shutil
+
+# Our testing module
+import svntest
+from svntest import wc, main, actions
+from svntest.actions import run_and_verify_svn
+from svntest.main import file_append, file_write, run_svn
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Helpers
+
+def revert_replacement_with_props(sbox, wc_copy):
+ """Helper implementing the core of
+ revert_{repos,wc}_to_wc_replace_with_props().
+
+ Uses a working copy (when wc_copy == True) or a URL (when wc_copy ==
+ False) source to copy from."""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Use a temp file to set properties with wildcards in their values
+ # otherwise Win32/VS2005 will expand them
+ prop_path = os.path.join(wc_dir, 'proptmp')
+ svntest.main.file_append(prop_path, '*')
+
+ # Set props on file which is copy-source later on
+ pi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'phony-prop', '-F', prop_path,
+ pi_path)
+ os.remove(prop_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:eol-style', 'LF', rho_path)
+
+ # Verify props having been set
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk.tweak('A/D/G/pi',
+ props={ 'phony-prop': '*' })
+ expected_disk.tweak('A/D/G/rho',
+ props={ 'svn:eol-style': 'LF' })
+
+ svntest.actions.verify_disk(wc_dir, expected_disk, True)
+
+ # Commit props
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi': Item(verb='Sending'),
+ 'A/D/G/rho': Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', wc_rev='2')
+ expected_status.tweak('A/D/G/rho', wc_rev='2')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Bring wc into sync
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # File scheduled for deletion
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ # Status before attempting copies
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # The copy shouldn't fail
+ if wc_copy:
+ pi_src = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ else:
+ pi_src = sbox.repo_url + '/A/D/G/pi'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', pi_src, rho_path)
+
+ # Verify both content and props have been copied
+ if wc_copy:
+ props = { 'phony-prop' : '*' }
+ else:
+ props = { 'phony-prop' : '*' }
+
+ expected_disk.tweak('A/D/G/rho',
+ contents="This is the file 'pi'.\n",
+ props=props)
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Now revert
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='2')
+ expected_output = ["Reverted '" + rho_path + "'\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Check disk status
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_disk.tweak('A/D/G/pi',
+ props={ 'phony-prop': '*' })
+ expected_disk.tweak('A/D/G/rho',
+ props={ 'svn:eol-style': 'LF' })
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+def revert_from_wc_root(sbox):
+ "revert relative to wc root"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # Mostly taken from basic_revert
+ # Modify some files and props.
+ beta_path = os.path.join('A', 'B', 'E', 'beta')
+ gamma_path = os.path.join('A', 'D', 'gamma')
+ iota_path = 'iota'
+ rho_path = os.path.join('A', 'D', 'G', 'rho')
+ zeta_path = os.path.join('A', 'D', 'H', 'zeta')
+ svntest.main.file_append(beta_path, "Added some text to 'beta'.\n")
+ svntest.main.file_append(iota_path, "Added some text to 'iota'.\n")
+ svntest.main.file_append(rho_path, "Added some text to 'rho'.\n")
+ svntest.main.file_append(zeta_path, "Added some text to 'zeta'.\n")
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', zeta_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ gamma_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ iota_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ '.')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'random-prop', 'propvalue',
+ 'A')
+
+ # Verify modified status.
+ expected_output = svntest.actions.get_virginal_state('', 1)
+ expected_output.tweak('A/B/E/beta', 'A/D/G/rho', status='M ')
+ expected_output.tweak('iota', status='MM')
+ expected_output.tweak('', 'A/D/gamma', 'A', status=' M')
+ expected_output.add({
+ 'A/D/H/zeta' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status('', expected_output)
+
+ # Run revert
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', beta_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', gamma_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', iota_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', rho_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', zeta_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '.')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', 'A')
+
+ # Verify unmodified status.
+ expected_output = svntest.actions.get_virginal_state('', 1)
+
+ svntest.actions.run_and_verify_status('', expected_output)
+
+@Issue(1663)
+def revert_reexpand_keyword(sbox):
+ "revert reexpands manually contracted keyword"
+
+ # This is for issue #1663. The bug is that if the only difference
+ # between a locally modified working file and the base version of
+ # same was that the former had a contracted keyword that would be
+ # expanded in the latter, then 'svn revert' wouldn't notice the
+ # difference, and therefore wouldn't revert. And why wouldn't it
+ # notice? Since text bases are always stored with keywords
+ # contracted, and working files are contracted before comparison
+ # with text base, there would appear to be no difference when the
+ # contraction is the only difference. For most commands, this is
+ # correct -- but revert's job is to restore the working file, not
+ # the text base.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ newfile_path = os.path.join(wc_dir, "newfile")
+ unexpanded_contents = "This is newfile: $Rev$.\n"
+
+ # Put an unexpanded keyword into iota.
+ svntest.main.file_write(newfile_path, unexpanded_contents)
+
+ # Commit, without svn:keywords property set.
+ svntest.main.run_svn(None, 'add', newfile_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'r2', newfile_path)
+
+ # Set the property and commit. This should expand the keyword.
+ svntest.main.run_svn(None, 'propset', 'svn:keywords', 'rev', newfile_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'r3', newfile_path)
+
+ # Verify that the keyword got expanded.
+ def check_expanded(path):
+ fp = open(path, 'r')
+ lines = fp.readlines()
+ fp.close()
+ if lines[0] != "This is newfile: $Rev: 3 $.\n":
+ raise svntest.Failure
+
+ check_expanded(newfile_path)
+
+ # Now un-expand the keyword again.
+ svntest.main.file_write(newfile_path, unexpanded_contents)
+
+ # Revert the file. The keyword should reexpand.
+ svntest.main.run_svn(None, 'revert', newfile_path)
+
+ # Verify that the keyword got re-expanded.
+ check_expanded(newfile_path)
+
+ # Ok, the first part of this test was written in 2004. We are now in 2011
+ # and note that there is more to test:
+
+ # If the recorded timestamp and size match the file then revert won't
+ # reinstall the file as the file was not modified when last compared in
+ # the repository normal form.
+ #
+ # The easiest way to get the information recorded would be calling cleanup,
+ # because that 'repairs' the recorded information. But some developers
+ # (including me) would call that cheating, so I just use a failed commit.
+
+ # Un-expand the keyword again.
+ svntest.main.file_write(newfile_path, unexpanded_contents)
+
+ # And now we trick svn in ignoring the file on newfile_path
+ newfile2_path = newfile_path + '2'
+ svntest.main.file_write(newfile2_path, 'This is file 2')
+ svntest.main.run_svn(None, 'add', newfile2_path)
+ os.remove(newfile2_path)
+
+ # This commit fails because newfile2_path is missing, but only after
+ # we call svn_wc__internal_file_modified_p() on new_file.
+ svntest.actions.run_and_verify_commit(wc_dir, None, None, ".*2' is scheduled"+
+ " for addition, but is missing.*",
+ newfile_path, newfile2_path,
+ '-m', "Shouldn't be committed")
+
+ # Revert the file. The file is not reverted!
+ svntest.actions.run_and_verify_svn([], [], 'revert', newfile_path)
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #1775:
+# Should be able to revert a file with no properties i.e. no prop-base
+@Issue(1775)
+def revert_replaced_file_without_props(sbox):
+ "revert a replaced file with no properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file1_path = os.path.join(wc_dir, 'file1')
+
+ # Add a new file, file1, that has no prop-base
+ svntest.main.file_append(file1_path, "This is the file 'file1' revision 2.")
+ svntest.actions.run_and_verify_svn(None, [], 'add', file1_path)
+
+ # commit file1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'file1' : Item(verb='Adding')
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'file1' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # delete file1
+ svntest.actions.run_and_verify_svn(None, [], 'rm', file1_path)
+
+ # test that file1 is scheduled for deletion.
+ expected_status.tweak('file1', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # recreate and add file1
+ svntest.main.file_append(file1_path, "This is the file 'file1' revision 3.")
+ svntest.actions.run_and_verify_svn(None, [], 'add', file1_path)
+
+ # Test to see if file1 is schedule for replacement
+ expected_status.tweak('file1', status='R ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # revert file1
+ svntest.actions.run_and_verify_svn(["Reverted '" + file1_path + "'\n"],
+ [], 'revert', file1_path)
+
+ # test that file1 really was reverted
+ expected_status.tweak('file1', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Note that issue #876 has been rejected. This now basically tests that
+# reverting the delete side of a move does *not* also revert the copy side.
+@Issue(876)
+def revert_moved_file(sbox):
+ "revert a moved file"
+
+ # svntest.factory.make(sbox, """svn mv iota iota_moved
+ # svn st
+ # svn revert iota
+ # svn st
+ # """)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota = os.path.join(wc_dir, 'iota')
+ iota_moved = os.path.join(wc_dir, 'iota_moved')
+
+ # svn mv iota iota_moved
+ expected_stdout = svntest.verify.UnorderedOutput([
+ 'A ' + iota_moved + '\n',
+ 'D ' + iota + '\n',
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mv', iota,
+ iota_moved)
+
+ # svn st
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='iota'),
+ })
+ expected_status.tweak('iota', status='D ', moved_to='iota_moved')
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # svn revert iota
+ expected_stdout = ["Reverted '" + iota + "'\n"]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'revert',
+ iota)
+
+ # svn st
+ expected_status.tweak('iota', status=' ', moved_to=None)
+ expected_status.tweak('iota_moved', moved_from=None)
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Test for issue 2135
+#
+# It is like merge_file_replace (in merge_tests.py), but reverts file
+# instead of commit.
+@Issue(2135)
+def revert_file_merge_replace_with_history(sbox):
+ "revert a merge replacement of file with history"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Deleting'),
+ })
+
+ expected_status.remove('A/D/G/rho')
+
+ # Commit rev 2
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ # create new rho file
+ svntest.main.file_write(rho_path, "new rho\n")
+
+ # Add the new file
+ svntest.actions.run_and_verify_svn(None, [], 'add', rho_path)
+
+ # Commit revsion 3
+ expected_status.add({
+ 'A/D/G/rho' : Item(status='A ', wc_rev='0')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ None)
+
+ # Update working copy
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/rho', contents='new rho\n' )
+ expected_status.tweak(wc_rev='3')
+ expected_status.tweak('A/D/G/rho', status=' ')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # merge changes from r3:1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho': Item(status='R ')
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_elision_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U')
+ })
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk.tweak('A/D/G/rho', contents="This is the file 'rho'.\n")
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+ # Now revert
+ svntest.actions.run_and_verify_svn(None,
+ [], 'revert', rho_path)
+
+ # test that rho really was reverted
+ expected_status.tweak('A/D/G/rho', copied=None, status=' ', wc_rev=3)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.tweak('A/D/G/rho', contents="new rho\n")
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+ # Make sure the revert removed the copy from information.
+ expected_infos = [
+ { 'Copied' : None }
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, rho_path)
+
+def revert_wc_to_wc_replace_with_props(sbox):
+ "revert svn cp PATH PATH replace file with props"
+
+ revert_replacement_with_props(sbox, 1)
+
+def revert_repos_to_wc_replace_with_props(sbox):
+ "revert svn cp URL PATH replace file with props"
+
+ revert_replacement_with_props(sbox, 0)
+
+def revert_after_second_replace(sbox):
+ "revert file after second replace"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # File scheduled for deletion
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path)
+
+ # Status before attempting copy
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Replace file for the first time
+ pi_src = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', pi_src, rho_path)
+
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now delete replaced file.
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', rho_path)
+
+ # Status should be same as after first delete
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Replace file for the second time
+ pi_src = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+
+ svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path)
+
+ expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now revert
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', wc_dir)
+
+ # Check disk status
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+
+#----------------------------------------------------------------------
+# Tests for issue #2517.
+#
+# Manual conflict resolution leads to spurious revert report.
+@Issue(2517)
+def revert_after_manual_conflict_resolution__text(sbox):
+ "revert after manual text-conflict resolution"
+
+ # Make two working copies
+ sbox.build()
+ wc_dir_1 = sbox.wc_dir
+ wc_dir_2 = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir_1, wc_dir_2)
+
+ # Cause a (text) conflict
+ iota_path_1 = os.path.join(wc_dir_1, 'iota')
+ iota_path_2 = os.path.join(wc_dir_2, 'iota')
+ svntest.main.file_write(iota_path_1, 'Modified iota text')
+ svntest.main.file_write(iota_path_2, 'Conflicting iota text')
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'r2', wc_dir_1)
+ svntest.main.run_svn(None,
+ 'update', wc_dir_2)
+
+ # Resolve the conflict "manually"
+ svntest.main.file_write(iota_path_2, 'Modified iota text')
+ os.remove(iota_path_2 + '.mine')
+ os.remove(iota_path_2 + '.r1')
+ os.remove(iota_path_2 + '.r2')
+
+ # Verify no output from status, diff, or revert
+ svntest.actions.run_and_verify_svn([], [], "status", wc_dir_2)
+ svntest.actions.run_and_verify_svn([], [], "diff", wc_dir_2)
+ svntest.actions.run_and_verify_svn([], [], "revert", "-R", wc_dir_2)
+
+def revert_after_manual_conflict_resolution__prop(sbox):
+ "revert after manual property-conflict resolution"
+
+ # Make two working copies
+ sbox.build()
+ wc_dir_1 = sbox.wc_dir
+ wc_dir_2 = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir_1, wc_dir_2)
+
+ # Cause a (property) conflict
+ iota_path_1 = os.path.join(wc_dir_1, 'iota')
+ iota_path_2 = os.path.join(wc_dir_2, 'iota')
+ svntest.main.run_svn(None, 'propset', 'foo', '1', iota_path_1)
+ svntest.main.run_svn(None, 'propset', 'foo', '2', iota_path_2)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'r2', wc_dir_1)
+ svntest.main.run_svn(None,
+ 'update', wc_dir_2)
+
+ # Resolve the conflict "manually"
+ svntest.main.run_svn(None, 'propset', 'foo', '1', iota_path_2)
+ os.remove(iota_path_2 + '.prej')
+
+ # Verify no output from status, diff, or revert
+ svntest.actions.run_and_verify_svn([], [], "status", wc_dir_2)
+ svntest.actions.run_and_verify_svn([], [], "diff", wc_dir_2)
+ svntest.actions.run_and_verify_svn([], [], "revert", "-R", wc_dir_2)
+
+def revert_propset__dir(sbox):
+ "revert a simple propset on a dir"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ a_path = os.path.join(wc_dir, 'A')
+ svntest.main.run_svn(None, 'propset', 'foo', 'x', a_path)
+ expected_output = re.escape("Reverted '" + a_path + "'")
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert",
+ a_path)
+
+def revert_propset__file(sbox):
+ "revert a simple propset on a file"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+ svntest.main.run_svn(None, 'propset', 'foo', 'x', iota_path)
+ expected_output = re.escape("Reverted '" + iota_path + "'")
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert",
+ iota_path)
+
+def revert_propdel__dir(sbox):
+ "revert a simple propdel on a dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ a_path = os.path.join(wc_dir, 'A')
+ svntest.main.run_svn(None, 'propset', 'foo', 'x', a_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'ps', a_path)
+ svntest.main.run_svn(None, 'propdel', 'foo', a_path)
+ expected_output = re.escape("Reverted '" + a_path + "'")
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert",
+ a_path)
+
+def revert_propdel__file(sbox):
+ "revert a simple propdel on a file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+ svntest.main.run_svn(None, 'propset', 'foo', 'x', iota_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'ps', iota_path)
+ svntest.main.run_svn(None, 'propdel', 'foo', iota_path)
+ expected_output = re.escape("Reverted '" + iota_path + "'")
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert",
+ iota_path)
+
+def revert_replaced_with_history_file_1(sbox):
+ "revert a committed replace-with-history == no-op"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ # Remember the original text of 'mu'
+ exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, [],
+ 'cat', mu_path)
+ # delete mu and replace it with a copy of iota
+ svntest.main.run_svn(None, 'rm', mu_path)
+ svntest.main.run_svn(None, 'mv', iota_path, mu_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' ', wc_rev=2)
+ expected_status.remove('iota')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota': Item(verb='Deleting'),
+ 'A/mu': Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # update the working copy
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # now revert back to the state in r1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(status='R '),
+ 'iota': Item(status='A ')
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_dir, {
+ '': Item(status=' U'),
+ })
+ expected_elision_output = svntest.wc.State(wc_dir, {
+ '': Item(status=' U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status='R ', copied='+', wc_rev='-')
+ expected_status.tweak('iota', status='A ', copied='+', wc_rev='-')
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_merge(wc_dir, '2', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip)
+
+ # and commit in r3
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status=' ', wc_rev=3)
+ expected_status.tweak('iota', status=' ', wc_rev=3)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota': Item(verb='Adding'),
+ 'A/mu': Item(verb='Replacing'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Verify the content of 'mu'
+ svntest.actions.run_and_verify_svn(text_r1, [], 'cat', mu_path)
+
+ # situation: no local modifications, mu has its original content again.
+
+ # revert 'mu' locally, shouldn't change a thing.
+ svntest.actions.run_and_verify_svn([], [], "revert",
+ mu_path)
+
+ # Verify the content of 'mu'
+ svntest.actions.run_and_verify_svn(text_r1, [], 'cat', mu_path)
+
+#----------------------------------------------------------------------
+# Test for issue #2804.
+@Issue(2804)
+def status_of_missing_dir_after_revert(sbox):
+ "status after schedule-delete, revert, and local rm"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ A_D_G_path = os.path.join(wc_dir, "A", "D", "G")
+
+ svntest.actions.run_and_verify_svn(None, [], "rm", A_D_G_path)
+ expected_output = re.escape("Reverted '" + A_D_G_path + "'")
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert",
+ A_D_G_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',
+ status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.main.safe_rmtree(A_D_G_path)
+ expected_status.tweak('A/D/G', status='! ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # When using single-db, we can get back to the virginal state.
+ svntest.actions.run_and_verify_svn(None, [], "revert",
+ "-R", A_D_G_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Test for issue #2804 with replaced directory
+@Issue(2804)
+def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox):
+ "status after replace+, revert, and local rm"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # delete A/D/G and commit
+ G_path = os.path.join(wc_dir, "A", "D", "G")
+ svntest.actions.run_and_verify_svn(None, [], "rm", G_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G': Item(verb='Deleting'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # copy A/D/G from A/B/E and commit
+ E_path = os.path.join(wc_dir, "A", "B", "E")
+ svntest.actions.run_and_verify_svn(None, [], "cp", E_path, G_path)
+ expected_status.add({
+ 'A/D/G' : Item(status=' ', wc_rev='3'),
+ 'A/D/G/alpha' : Item(status=' ', wc_rev='3'),
+ 'A/D/G/beta' : Item(status=' ', wc_rev='3')
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G': Item(verb='Adding'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # update the working copy
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # now rollback to r1, thereby reinstating the old 'G'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G': Item(status='R '),
+ 'A/D/G/rho': Item(status='A '),
+ 'A/D/G/pi': Item(status='A '),
+ 'A/D/G/tau': Item(status='A '),
+ })
+ expected_mergeinfo_output = svntest.wc.State(wc_dir, {
+ '': Item(status=' U'),
+ })
+ expected_elision_output = svntest.wc.State(wc_dir, {
+ '': Item(status=' U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('A/D/G', status='R ', copied='+', wc_rev='-')
+ expected_status.tweak('A/D/G/rho',
+ 'A/D/G/pi',
+ 'A/D/G/tau',
+ copied='+', wc_rev='-')
+ expected_status.add({
+ 'A/D/G/alpha' : Item(status='D ', wc_rev='3'),
+ 'A/D/G/beta' : Item(status='D ', wc_rev='3'),
+ })
+
+ expected_skip = wc.State(wc_dir, { })
+ expected_disk = svntest.main.greek_state.copy()
+ svntest.actions.run_and_verify_merge(wc_dir, '3', '1',
+ sbox.repo_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ dry_run = 0)
+
+ # now test if the revert works ok
+ revert_paths = [G_path] + [os.path.join(G_path, child)
+ for child in ['alpha', 'beta', 'pi', 'rho', 'tau']]
+
+ expected_output = svntest.verify.UnorderedOutput([
+ "Reverted '%s'\n" % path for path in revert_paths])
+
+ svntest.actions.run_and_verify_svn(expected_output, [], "revert", "-R",
+ G_path)
+
+ svntest.actions.run_and_verify_svn([], [],
+ "status", wc_dir)
+
+ svntest.main.safe_rmtree(G_path)
+
+ expected_output = svntest.verify.UnorderedOutput(
+ ["! " + G_path + "\n",
+ "! " + os.path.join(G_path, "alpha") + "\n",
+ "! " + os.path.join(G_path, "beta") + "\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [], "status",
+ wc_dir)
+
+# Test for issue #2928.
+@Issue(2928)
+def revert_replaced_with_history_file_2(sbox):
+ "reverted replace with history restores checksum"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = os.path.join(wc_dir, 'iota')
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ # Delete mu and replace it with a copy of iota
+ svntest.main.run_svn(None, 'rm', mu_path)
+ svntest.main.run_svn(None, 'cp', iota_path, mu_path)
+
+ # Revert mu.
+ svntest.main.run_svn(None, 'revert', mu_path)
+
+ # If we make local mods to the reverted mu the commit will
+ # fail if the checksum is incorrect.
+ svntest.main.file_write(mu_path, "new text")
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def revert_tree_conflicts_in_updated_files(sbox):
+ "revert tree conflicts in updated files"
+
+ # See use cases 1-3 in notes/tree-conflicts/use-cases.txt for background.
+
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+ G = os.path.join(wc_dir, 'A', 'D', 'G')
+ G_pi = os.path.join(G, 'pi')
+ G_rho = os.path.join(G, 'rho')
+ G_tau = os.path.join(G, 'tau')
+
+ # Duplicate wc for tests
+ wc_dir_2 = sbox.add_wc_path('2')
+ svntest.actions.duplicate_dir(wc_dir, wc_dir_2)
+ G2 = os.path.join(wc_dir_2, 'A', 'D', 'G')
+ G2_pi = os.path.join(G2, 'pi')
+ G2_rho = os.path.join(G2, 'rho')
+ G2_tau = os.path.join(G2, 'tau')
+
+ # Expectations
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Reverted '%s'\n" % G_pi,
+ "Reverted '%s'\n" % G_rho,
+ "Reverted '%s'\n" % G_tau,
+ ])
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/D/G/pi', status=' ')
+ expected_status.remove('A/D/G/rho')
+ expected_status.remove('A/D/G/tau')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/rho')
+ expected_disk.tweak('A/D/G/pi',
+ contents="This is the file 'pi'.\nIncoming edit.\n")
+ expected_disk.remove('A/D/G/tau')
+
+ # Revert individually in wc
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', G_pi, G_rho, G_tau)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+ # Expectations
+ expected_output = svntest.verify.UnorderedOutput(
+ ["Reverted '%s'\n" % G2_pi,
+ "Reverted '%s'\n" % G2_rho,
+ "Reverted '%s'\n" % G2_tau,
+ ])
+
+ expected_status.wc_dir = wc_dir_2
+
+ # Revert recursively in wc 2
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '-R', G2)
+ svntest.actions.run_and_verify_status(wc_dir_2, expected_status)
+ svntest.actions.verify_disk(wc_dir_2, expected_disk)
+
+def revert_add_over_not_present_dir(sbox):
+ "reverting an add over not present directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ main.run_svn(None, 'rm', os.path.join(wc_dir, 'A/C'))
+ sbox.simple_commit(message='Deleted dir')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/C')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ main.run_svn(None, 'mkdir', os.path.join(wc_dir, 'A/C'))
+
+ # This failed in some WC-NG intermediate format (r927318-r958992).
+ main.run_svn(None, 'revert', os.path.join(wc_dir, 'A/C'))
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def revert_added_tree(sbox):
+ "revert an added tree fails"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', sbox.ospath('X'), sbox.ospath('X/Y'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev=0),
+ 'X/Y' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Revert is non-recursive and fails, status is unchanged
+ expected_error = '.*Try \'svn revert --depth infinity\'.*'
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'revert', sbox.ospath('X'))
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@Issue(3834)
+def revert_child_of_copy(sbox):
+ "revert a child of a copied directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/E2'))
+
+
+ svntest.main.file_append(sbox.ospath('A/B/E2/beta'), 'extra text\n')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/beta' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # First revert removes text change, child is still copied
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E2/beta')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E2/beta'))
+ expected_status.tweak('A/B/E2/beta', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Second revert of child does nothing, child is still copied
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', sbox.ospath('A/B/E2/beta'))
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3783)
+def revert_non_recusive_after_delete(sbox):
+ "non-recursive revert after delete"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/B'))
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F',
+ 'A/B/lambda', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # This appears to work but gets the op-depth wrong
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B'))
+ expected_status.tweak('A/B', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', status='R ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Since the op-depth was wrong A/B/E erroneously remains deleted
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def revert_permissions_only(sbox):
+ "permission-only reverts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Helpers pinched/adapted from lock_tests.py. Put them somewhere common?
+ def check_writability(path, writable):
+ bits = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE
+ mode = os.stat(path)[0]
+ if bool(mode & bits) != writable:
+ raise svntest.Failure("path '%s' is unexpectedly %s (mode %o)"
+ % (path, ["writable", "read-only"][writable], mode))
+
+ def is_writable(path):
+ "Raise if PATH is not writable."
+ check_writability(path, True)
+
+ def is_readonly(path):
+ "Raise if PATH is not readonly."
+ check_writability(path, False)
+
+ def check_executability(path, executable):
+ bits = stat.S_IXGRP | stat.S_IXOTH | stat.S_IEXEC
+ mode = os.stat(path)[0]
+ if bool(mode & bits) != executable:
+ raise svntest.Failure("path '%s' is unexpectedly %s (mode %o)"
+ % (path,
+ ["executable", "non-executable"][executable],
+ mode))
+
+ def is_executable(path):
+ "Raise if PATH is not executable."
+ check_executability(path, True)
+
+ def is_non_executable(path):
+ "Raise if PATH is executable."
+ check_executability(path, False)
+
+
+ os.chmod(sbox.ospath('A/B/E/alpha'), svntest.main.S_ALL_READ) # read-only
+ is_readonly(sbox.ospath('A/B/E/alpha'))
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E/alpha'))
+ is_writable(sbox.ospath('A/B/E/alpha'))
+
+ if svntest.main.is_posix_os():
+ os.chmod(sbox.ospath('A/B/E/beta'), svntest.main.S_ALL_RWX) # executable
+ is_executable(sbox.ospath('A/B/E/beta'))
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/beta')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E/beta'))
+ is_non_executable(sbox.ospath('A/B/E/beta'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:needs-lock', '1',
+ sbox.ospath('A/B/E/alpha'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:executable', '1',
+ sbox.ospath('A/B/E/beta'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha': Item(verb='Sending'),
+ 'A/B/E/beta': Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', wc_rev='2')
+ expected_status.tweak('A/B/E/beta', wc_rev='2')
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ os.chmod(sbox.ospath('A/B/E/alpha'), svntest.main.S_ALL_RW) # not read-only
+ is_writable(sbox.ospath('A/B/E/alpha'))
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E/alpha'))
+ is_readonly(sbox.ospath('A/B/E/alpha'))
+
+ if svntest.main.is_posix_os():
+ os.chmod(sbox.ospath('A/B/E/beta'), svntest.main.S_ALL_RW) # not executable
+ is_non_executable(sbox.ospath('A/B/E/beta'))
+ expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/beta')]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', sbox.ospath('A/B/E/beta'))
+ is_executable(sbox.ospath('A/B/E/beta'))
+
+ # copied file is always writeable
+ sbox.simple_update()
+ expected_output = ["A %s\n" % sbox.ospath('A/B/E2')]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'copy',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/E2'))
+ is_writable(sbox.ospath('A/B/E2/alpha'))
+ svntest.actions.run_and_verify_svn([], [],
+ 'revert', sbox.ospath('A/B/E2/alpha'))
+ is_writable(sbox.ospath('A/B/E2/alpha'))
+
+@XFail()
+@Issue(3851)
+def revert_copy_depth_files(sbox):
+ "revert a copy with depth=files"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy',
+ sbox.ospath('A/B/E'),
+ sbox.ospath('A/B/E2'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ "Reverted '%s'\n" % sbox.ospath(path) for path in ['A/B/E2',
+ 'A/B/E2/alpha',
+ 'A/B/E2/beta']])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '--depth', 'files',
+ sbox.ospath('A/B/E2'))
+
+ expected_status.remove('A/B/E2', 'A/B/E2/alpha', 'A/B/E2/beta')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@XFail()
+@Issue(3851)
+def revert_nested_add_depth_immediates(sbox):
+ "revert a nested add with depth=immediates"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '--parents', sbox.ospath('A/X/Y'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/X' : Item(status='A ', wc_rev='0'),
+ 'A/X/Y' : Item(status='A ', wc_rev='0'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.verify.UnorderedOutput([
+ "Reverted '%s'\n" % sbox.ospath(path) for path in ['A/X', 'A/X/Y']])
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', '--depth', 'immediates',
+ sbox.ospath('A/X'))
+
+ expected_status.remove('A/X', 'A/X/Y')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def create_superflous_actual_node(sbox):
+ "create a superfluous actual node"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.file_append(sbox.ospath('A/B/E/alpha'), 'their text\n')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Create a NODES row with op-depth>0
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', '-r', '1',
+ sbox.repo_url + '/A/B/E/alpha',
+ sbox.ospath('alpha'))
+
+ # Merge to create an ACTUAL with a conflict
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'alpha' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.main.file_append(sbox.ospath('alpha'), 'my text\n')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '--accept', 'postpone',
+ '^/A/B/E/alpha', sbox.ospath('alpha'))
+ expected_status.tweak('alpha', status='CM', entry_status='A ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Clear merge property and remove conflict files
+ sbox.simple_propdel('svn:mergeinfo', 'alpha')
+ os.remove(sbox.ospath('alpha.merge-left.r1'))
+ os.remove(sbox.ospath('alpha.merge-right.r2'))
+ os.remove(sbox.ospath('alpha.working'))
+
+ expected_status.tweak('alpha', status='A ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3859)
+@SkipUnless(svntest.main.server_has_mergeinfo)
+def revert_empty_actual(sbox):
+ "revert with superfluous actual node"
+
+ create_superflous_actual_node(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Non-recursive code path works
+ svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('alpha')],
+ [],
+ 'revert', sbox.ospath('alpha'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3859)
+@SkipUnless(svntest.main.server_has_mergeinfo)
+def revert_empty_actual_recursive(sbox):
+ "recursive revert with superfluous actual node"
+
+ create_superflous_actual_node(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Recursive code path fails, the superfluous actual node suppresses the
+ # notification
+ svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('alpha')],
+ [],
+ 'revert', '-R', sbox.ospath('alpha'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3879)
+def revert_tree_conflicts_with_replacements(sbox):
+ "revert tree conflicts with replacements"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc = sbox.ospath
+
+ # Use case 1: local replace, incoming replace
+ # A/mu
+ # A/D/H --> A/D/H/chi, A/D/H/{loc,inc}_psi
+
+ # Use case 2: local edit, incoming replace
+ # A/D/gamma
+ # A/D/G --> A/D/G/pi, A/D/G/inc_rho
+
+ # Use case 3: local replace, incoming edit
+ # A/B/lambda
+ # A/B/E --> A/B/E/alpha, A/B/E/loc_beta
+
+ # Case 1: incoming replacements
+ sbox.simple_rm('A/mu', 'A/D/H')
+ file_write(wc('A/mu'), "A fresh file.\n")
+ os.mkdir(wc('A/D/H'))
+ file_write(wc('A/D/H/chi'), "A fresh file.\n")
+ file_write(wc('A/D/H/inc_psi'), "A fresh file.\n")
+ sbox.simple_add('A/mu', 'A/D/H')
+
+ # Case 2: incoming replacements
+ sbox.simple_rm('A/D/gamma', 'A/D/G')
+ file_write(wc('A/D/gamma'), "A fresh file.\n")
+ os.mkdir(wc('A/D/G'))
+ file_write(wc('A/D/G/pi'), "A fresh file.\n")
+ file_write(wc('A/D/G/inc_rho'), "A fresh file.\n")
+ sbox.simple_add('A/D/gamma','A/D/G')
+
+ # Case 3: incoming edits
+ file_append(wc('A/B/lambda'), "Incoming!\n")
+ file_write(wc('A/B/E/alpha'), "Incoming!.\n")
+
+ # Commit and roll back to r1.
+ sbox.simple_commit()
+ run_svn(None, 'up', wc_dir, '-r1', '-q')
+
+ # Case 1: local replacements
+ sbox.simple_rm('A/mu', 'A/D/H')
+ file_write(wc('A/mu'), "A fresh file.\n")
+ os.mkdir(wc('A/D/H'))
+ file_write(wc('A/D/H/chi'), "A fresh local file.\n")
+ file_write(wc('A/D/H/loc_psi'), "A fresh local file.\n")
+ sbox.simple_add('A/mu', 'A/D/H')
+
+ # Case 2: local edits
+ file_append(wc('A/D/gamma'), "Local change.\n")
+ file_append(wc('A/D/G/pi'), "Local change.\n")
+
+ # Case 3: local replacements
+ sbox.simple_rm('A/B/lambda', 'A/B/E')
+ file_write(wc('A/B/lambda'), "A fresh local file.\n")
+ os.mkdir(wc('A/B/E'))
+ file_write(wc('A/B/E/alpha'), "A fresh local file.\n")
+ file_write(wc('A/B/E/loc_beta'), "A fresh local file.\n")
+ sbox.simple_add('A/B/lambda', 'A/B/E')
+
+ # Update and check tree conflict status.
+ run_svn(None, 'up', wc_dir)
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status=' ', wc_rev=2),
+ 'A/B' : Item(status=' ', wc_rev=2),
+ 'A/B/E' : Item(status='R ', wc_rev=2, treeconflict='C'),
+ 'A/B/E/alpha' : Item(status='A ', wc_rev='-'),
+ 'A/B/E/beta' : Item(status='D ', wc_rev=2),
+ 'A/B/E/loc_beta' : Item(status='A ', wc_rev='-'),
+ 'A/B/F' : Item(status=' ', wc_rev=2),
+ 'A/B/lambda' : Item(status='R ', wc_rev=2, treeconflict='C'),
+ 'A/C' : Item(status=' ', wc_rev=2),
+ 'A/D' : Item(status=' ', wc_rev=2),
+ 'A/D/G' : Item(status='R ', wc_rev='-', copied='+',
+ treeconflict='C'),
+ 'A/D/G/inc_rho' : Item(status='D ', wc_rev=2),
+ 'A/D/G/pi' : Item(status='M ', wc_rev='-', copied='+'),
+ 'A/D/G/rho' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/G/tau' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/H' : Item(status='R ', wc_rev=2, treeconflict='C'),
+ 'A/D/H/chi' : Item(status='A ', wc_rev='-'),
+ 'A/D/H/inc_psi' : Item(status='D ', wc_rev=2),
+ 'A/D/H/loc_psi' : Item(status='A ', wc_rev='-'),
+ 'A/D/gamma' : Item(status='R ', wc_rev='-', copied='+',
+ treeconflict='C'),
+ 'A/mu' : Item(status='R ', wc_rev=2, treeconflict='C'),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ def cd_and_status_u(dir_target):
+ was_cwd = os.getcwd()
+ os.chdir(os.path.abspath(wc(dir_target)))
+ run_svn(None, 'status', '-u')
+ os.chdir(was_cwd)
+
+ cd_and_status_u('A')
+ cd_and_status_u('A/D')
+
+ # Until r1102143, the following 'status -u' commands failed with "svn:
+ # E165004: Two top-level reports with no target".
+ cd_and_status_u('A/D/G')
+ cd_and_status_u('A/D/H')
+
+ # Revert everything (i.e., accept "theirs-full").
+ svntest.actions.run_and_verify_revert([
+ wc('A/B/E'),
+ wc('A/B/E/alpha'), # incoming & local
+ wc('A/B/E/beta'),
+ wc('A/B/E/loc_beta'),
+ wc('A/B/lambda'),
+ wc('A/D/G'),
+ wc('A/D/G/pi'),
+ wc('A/D/G/inc_rho'), # incoming
+ wc('A/D/G/rho'),
+ wc('A/D/G/tau'),
+ wc('A/D/H'),
+ wc('A/D/H/chi'),
+ wc('A/D/H/inc_psi'), # incoming
+ wc('A/D/H/loc_psi'),
+ wc('A/D/gamma'),
+ wc('A/mu'),
+ ], '-R', wc_dir)
+
+ # Remove a few unversioned files that revert left behind.
+ os.remove(wc('A/B/E/loc_beta'))
+ os.remove(wc('A/D/H/loc_psi'))
+
+ # The update operation should have put all incoming items in place.
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status=' ', wc_rev=2),
+ 'A/B' : Item(status=' ', wc_rev=2),
+ 'A/B/E' : Item(status=' ', wc_rev=2),
+ 'A/B/E/alpha' : Item(status=' ', wc_rev=2),
+ 'A/B/E/beta' : Item(status=' ', wc_rev=2),
+ 'A/B/F' : Item(status=' ', wc_rev=2),
+ 'A/B/lambda' : Item(status=' ', wc_rev=2),
+ 'A/C' : Item(status=' ', wc_rev=2),
+ 'A/D' : Item(status=' ', wc_rev=2),
+ 'A/D/G' : Item(status=' ', wc_rev=2),
+ 'A/D/G/inc_rho' : Item(status=' ', wc_rev=2),
+ 'A/D/G/pi' : Item(status=' ', wc_rev=2),
+ 'A/D/H' : Item(status=' ', wc_rev=2),
+ 'A/D/H/chi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/inc_psi' : Item(status=' ', wc_rev=2),
+ 'A/D/gamma' : Item(status=' ', wc_rev=2),
+ 'A/mu' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+def create_no_text_change_conflict(sbox):
+ "create conflict with no text change"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ shutil.copyfile(sbox.ospath('A/B/E/alpha'), sbox.ospath('A/B/E/alpha-copy'))
+ svntest.main.file_append(sbox.ospath('A/B/E/alpha'), 'their text\n')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Update to create a conflict
+ svntest.main.file_append(sbox.ospath('A/B/E/alpha'), 'my text\n')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r1', '--accept', 'postpone',
+ wc_dir)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', status='C ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Reset the text with the file still marked as a conflict
+ os.remove(sbox.ospath('A/B/E/alpha'))
+ shutil.move(sbox.ospath('A/B/E/alpha-copy'), sbox.ospath('A/B/E/alpha'))
+
+@Issue(3859)
+def revert_no_text_change_conflict(sbox):
+ "revert conflict with no text change"
+
+ create_no_text_change_conflict(sbox)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(["Reverted '%s'\n"
+ % sbox.ospath('A/B/E/alpha')],
+ [],
+ 'revert', sbox.ospath('A/B/E/alpha'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3859)
+def revert_no_text_change_conflict_recursive(sbox):
+ "revert -R conflict with no text change"
+
+ create_no_text_change_conflict(sbox)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(["Reverted '%s'\n"
+ % sbox.ospath('A/B/E/alpha')],
+ [],
+ 'revert', '-R', wc_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3938)
+def revert_with_unversioned_targets(sbox):
+ "revert with unversioned targets"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ chi_path = sbox.ospath('A/D/H/chi')
+ delta_path = sbox.ospath('A/D/H/delta')
+ psi_path = sbox.ospath('A/D/H/psi')
+
+ chi_contents = "modified chi\n"
+ delta_contents = "This is the unversioned file 'delta'.\n"
+ psi_contents = "modified psi\n"
+
+ # touch delta
+ open(delta_path, 'w').write(delta_contents)
+
+ # modify chi psi
+ open(chi_path, 'w').write(chi_contents)
+ open(psi_path, 'w').write(psi_contents)
+
+ # revert
+ expected_output = svntest.verify.UnorderedOutput([
+ "Reverted '%s'\n" % sbox.ospath('A/D/H/chi'),
+ "Skipped '%s'\n" % sbox.ospath('A/D/H/delta'),
+ "Reverted '%s'\n" % sbox.ospath('A/D/H/psi'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'revert', chi_path, delta_path, psi_path)
+
+ # verify status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/H/delta': Item(status='? '),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # verify disk
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/D/H/delta': Item(delta_contents),
+ })
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+def revert_nonexistent(sbox):
+ 'svn revert -R nonexistent'
+ sbox.build(read_only=True)
+ svntest.actions.run_and_verify_svn('Skipped.*nonexistent', [],
+ 'revert', '-R', sbox.ospath('nonexistent'))
+
+@Issue(4168)
+def revert_obstructing_wc(sbox):
+ "revert with an obstructing working copy"
+
+ sbox.build(create_wc=False, read_only=True)
+ wc_dir = sbox.wc_dir
+
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.wc.State(wc_dir, {})
+
+ # Checkout wc as depth empty
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_dir,
+ expected_output, expected_disk,
+ [],
+ '--depth', 'empty')
+
+ # And create an obstructing working copy as A
+ svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_dir + '/A',
+ expected_output, expected_disk,
+ [],
+ '--depth', 'empty')
+
+ # Now try to fetch the entire wc, which will find an obstruction
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Skipped'),
+ 'iota' : Item(status='A '),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ # A is not versioned but exists
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None,
+ expected_status,
+ [], False,
+ wc_dir, '--set-depth', 'infinity')
+
+ # Revert should do nothing (no local changes), and report the obstruction
+ # (reporting the obstruction is nice for debugging, but not really required
+ # in this specific case, as the node was not modified)
+ svntest.actions.run_and_verify_svn("Skipped '.*A' -- .*obstruct.*", [],
+ 'revert', '-R', wc_dir)
+
+def revert_moved_dir_partial(sbox):
+ "partial revert moved_dir"
+
+ sbox.build(read_only = True)
+
+ sbox.simple_move('A', 'A_')
+ svntest.actions.run_and_verify_svn(None, [], 'revert', sbox.ospath('A'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ revert_from_wc_root,
+ revert_reexpand_keyword,
+ revert_replaced_file_without_props,
+ revert_moved_file,
+ revert_wc_to_wc_replace_with_props,
+ revert_file_merge_replace_with_history,
+ revert_repos_to_wc_replace_with_props,
+ revert_after_second_replace,
+ revert_after_manual_conflict_resolution__text,
+ revert_after_manual_conflict_resolution__prop,
+ revert_propset__dir,
+ revert_propset__file,
+ revert_propdel__dir,
+ revert_propdel__file,
+ revert_replaced_with_history_file_1,
+ status_of_missing_dir_after_revert,
+ status_of_missing_dir_after_revert_replaced_with_history_dir,
+ revert_replaced_with_history_file_2,
+ revert_tree_conflicts_in_updated_files,
+ revert_add_over_not_present_dir,
+ revert_added_tree,
+ revert_child_of_copy,
+ revert_non_recusive_after_delete,
+ revert_permissions_only,
+ revert_copy_depth_files,
+ revert_nested_add_depth_immediates,
+ revert_empty_actual,
+ revert_tree_conflicts_with_replacements,
+ revert_empty_actual_recursive,
+ revert_no_text_change_conflict,
+ revert_no_text_change_conflict_recursive,
+ revert_with_unversioned_targets,
+ revert_nonexistent,
+ revert_obstructing_wc,
+ revert_moved_dir_partial,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/schedule_tests.py b/subversion/tests/cmdline/schedule_tests.py
new file mode 100755
index 0000000..ee46175
--- /dev/null
+++ b/subversion/tests/cmdline/schedule_tests.py
@@ -0,0 +1,755 @@
+#!/usr/bin/env python
+#
+# schedule_tests.py: testing working copy scheduling
+# (adds, deletes, reversion)
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, logging, stat
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+#
+
+#######################################################################
+# Stage I - Schedules and modifications, verified with `svn status'
+#
+# These tests make schedule changes and local mods, and verify that status
+# output is as expected. In a second stage, reversion of these changes is
+# tested. Potentially, a third stage could test committing these same
+# changes.
+#
+# NOTE: these tests are run within the Stage II tests, not on their own.
+#
+
+def add_files(sbox):
+ "schedule: add some files"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create some files, then schedule them for addition
+ delta_path = sbox.ospath('delta')
+ zeta_path = sbox.ospath('A/B/zeta')
+ epsilon_path = sbox.ospath('A/D/G/epsilon')
+
+ svntest.main.file_append(delta_path, "This is the file 'delta'.")
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'.")
+ svntest.main.file_append(epsilon_path, "This is the file 'epsilon'.")
+
+ sbox.simple_add('delta', 'A/B/zeta', 'A/D/G/epsilon')
+
+ # Make sure the adds show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'delta' : Item(status='A ', wc_rev=0),
+ 'A/B/zeta' : Item(status='A ', wc_rev=0),
+ 'A/D/G/epsilon' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def add_directories(sbox):
+ "schedule: add some directories"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create some directories, then schedule them for addition
+ X_path = sbox.ospath('X')
+ Y_path = sbox.ospath('A/C/Y')
+ Z_path = sbox.ospath('A/D/H/Z')
+
+ os.mkdir(X_path)
+ os.mkdir(Y_path)
+ os.mkdir(Z_path)
+
+ sbox.simple_add('X', 'A/C/Y', 'A/D/H/Z')
+
+ # Make sure the adds show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev=0),
+ 'A/C/Y' : Item(status='A ', wc_rev=0),
+ 'A/D/H/Z' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def nested_adds(sbox):
+ "schedule: add some nested files and directories"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create some directories then schedule them for addition
+ X_path = sbox.ospath('X')
+ Y_path = sbox.ospath('A/C/Y')
+ Z_path = sbox.ospath('A/D/H/Z')
+
+ os.mkdir(X_path)
+ os.mkdir(Y_path)
+ os.mkdir(Z_path)
+
+ # Now, create some files and directories to put into our newly added
+ # directories
+ P_path = sbox.ospath('X/P')
+ Q_path = sbox.ospath('A/C/Y/Q')
+ R_path = sbox.ospath('A/D/H/Z/R')
+
+ os.mkdir(P_path)
+ os.mkdir(Q_path)
+ os.mkdir(R_path)
+
+ delta_path = sbox.ospath('X/delta')
+ epsilon_path = sbox.ospath('A/C/Y/epsilon')
+ upsilon_path = sbox.ospath('A/C/Y/upsilon')
+ zeta_path = sbox.ospath('A/D/H/Z/zeta')
+
+ svntest.main.file_append(delta_path, "This is the file 'delta'.")
+ svntest.main.file_append(epsilon_path, "This is the file 'epsilon'.")
+ svntest.main.file_append(upsilon_path, "This is the file 'upsilon'.")
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'.")
+
+ # Finally, let's try adding our new files and directories
+ sbox.simple_add('X', 'A/C/Y', 'A/D/H/Z')
+
+ # Make sure the adds show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X' : Item(status='A ', wc_rev=0),
+ 'A/C/Y' : Item(status='A ', wc_rev=0),
+ 'A/D/H/Z' : Item(status='A ', wc_rev=0),
+ 'X/P' : Item(status='A ', wc_rev=0),
+ 'A/C/Y/Q' : Item(status='A ', wc_rev=0),
+ 'A/D/H/Z/R' : Item(status='A ', wc_rev=0),
+ 'X/delta' : Item(status='A ', wc_rev=0),
+ 'A/C/Y/epsilon' : Item(status='A ', wc_rev=0),
+ 'A/C/Y/upsilon' : Item(status='A ', wc_rev=0),
+ 'A/D/H/Z/zeta' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def add_executable(sbox):
+ "schedule: add some executable files"
+
+ sbox.build(read_only = True)
+
+ def runTest(wc_dir, fileName, perm, executable):
+ file_ospath = sbox.ospath(fileName)
+ if executable:
+ expected_out = ["*\n"]
+ expected_err = []
+ else:
+ expected_out = []
+ expected_err = '.*W200017: Property.*not found'
+
+ # create an empty file
+ open(file_ospath, "w")
+
+ os.chmod(file_ospath, perm)
+ sbox.simple_add(fileName)
+ svntest.actions.run_and_verify_svn(expected_out, expected_err,
+ 'propget', "svn:executable", file_ospath)
+
+ test_cases = [
+ ("all_exe", svntest.main.S_ALL_RWX, 1),
+ ("none_exe", svntest.main.S_ALL_RW, 0),
+ ("user_exe", svntest.main.S_ALL_RW | stat.S_IXUSR, 1),
+ ("group_exe", svntest.main.S_ALL_RW | stat.S_IXGRP, 0),
+ ("other_exe", svntest.main.S_ALL_RW | stat.S_IXOTH, 0),
+ ]
+ for test_case in test_cases:
+ runTest(sbox.wc_dir, *test_case)
+
+#----------------------------------------------------------------------
+
+def delete_files(sbox):
+ "schedule: delete some files"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Schedule some files for deletion
+ sbox.simple_rm('iota', 'A/mu', 'A/D/G/rho', 'A/D/H/omega')
+
+ # Make sure the deletes show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', 'A/mu', 'A/D/G/rho', 'A/D/H/omega',
+ status='D ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def delete_dirs(sbox):
+ "schedule: delete some directories"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Schedule some directories for deletion (this is recursive!)
+ sbox.simple_rm('A/B/E', 'A/B/F', 'A/D/H')
+
+ # Make sure the deletes show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F',
+ 'A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi',
+ status='D ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#######################################################################
+# Stage II - Reversion of changes made in Stage I
+#
+# Each test in Stage II calls the corresponding Stage I test
+# and then also tests reversion of those changes.
+#
+
+def check_reversion(files, output):
+ expected_output = []
+ for file in files:
+ expected_output = expected_output + ["Reverted '" + file + "'\n"]
+ output.sort()
+ expected_output.sort()
+ if output != expected_output:
+ logger.warn("Expected output: %s", expected_output)
+ logger.warn("Actual output: %s", output)
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def revert_add_files(sbox):
+ "revert: add some files"
+
+ add_files(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Revert our changes recursively from wc_dir.
+ delta_path = sbox.ospath('delta')
+ zeta_path = sbox.ospath('A/B/zeta')
+ epsilon_path = sbox.ospath('A/D/G/epsilon')
+ files = [delta_path, zeta_path, epsilon_path]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+#----------------------------------------------------------------------
+
+def revert_add_directories(sbox):
+ "revert: add some directories"
+
+ add_directories(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Revert our changes recursively from wc_dir.
+ X_path = sbox.ospath('X')
+ Y_path = sbox.ospath('A/C/Y')
+ Z_path = sbox.ospath('A/D/H/Z')
+ files = [X_path, Y_path, Z_path]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+#----------------------------------------------------------------------
+
+def revert_nested_adds(sbox):
+ "revert: add some nested files and directories"
+
+ nested_adds(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Revert our changes recursively from wc_dir.
+ X_path = sbox.ospath('X')
+ Y_path = sbox.ospath('A/C/Y')
+ Z_path = sbox.ospath('A/D/H/Z')
+ files = ([X_path, Y_path, Z_path]
+ + [os.path.join(X_path, child)
+ for child in ['P', 'delta']]
+ + [os.path.join(Y_path, child)
+ for child in ['Q', 'epsilon', 'upsilon']]
+ + [os.path.join(Z_path, child)
+ for child in ['R', 'zeta']])
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_posix_os)
+def revert_add_executable(sbox):
+ "revert: add some executable files"
+
+ add_executable(sbox)
+ wc_dir = sbox.wc_dir
+
+ all_path = sbox.ospath('all_exe')
+ none_path = sbox.ospath('none_exe')
+ user_path = sbox.ospath('user_exe')
+ group_path = sbox.ospath('group_exe')
+ other_path = sbox.ospath('other_exe')
+ files = [all_path, none_path, user_path, group_path, other_path]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+#----------------------------------------------------------------------
+
+def revert_delete_files(sbox):
+ "revert: delete some files"
+
+ delete_files(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Revert our changes recursively from wc_dir.
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ omega_path = sbox.ospath('A/D/H/omega')
+ files = [iota_path, mu_path, omega_path, rho_path]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+#----------------------------------------------------------------------
+
+def revert_delete_dirs(sbox):
+ "revert: delete some directories"
+
+ delete_dirs(sbox)
+ wc_dir = sbox.wc_dir
+
+ # Revert our changes recursively from wc_dir.
+ E_path = sbox.ospath('A/B/E')
+ F_path = sbox.ospath('A/B/F')
+ H_path = sbox.ospath('A/D/H')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+ chi_path = sbox.ospath('A/D/H/chi')
+ omega_path = sbox.ospath('A/D/H/omega')
+ psi_path = sbox.ospath('A/D/H/psi')
+ files = [E_path, F_path, H_path,
+ alpha_path, beta_path, chi_path, omega_path, psi_path]
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'revert',
+ '--recursive',
+ wc_dir)
+ check_reversion(files, output)
+
+
+#######################################################################
+# Regression tests
+#
+
+#----------------------------------------------------------------------
+# Regression test for issue #863:
+#
+# Suppose here is a scheduled-add file or directory which is
+# also missing. If I want to make the working copy forget all
+# knowledge of the item ("unschedule" the addition), then either 'svn
+# revert' or 'svn rm' will make that happen by removing the entry from
+# .svn/entries file. While 'svn revert' does with no error,
+# 'svn rm' does it with error.
+@Issue(863)
+def unschedule_missing_added(sbox):
+ "unschedule addition on missing items"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create some files and dirs, then schedule them for addition
+ file1_path = sbox.ospath('file1')
+ file2_path = sbox.ospath('file2')
+ dir1_path = sbox.ospath('dir1')
+ dir2_path = sbox.ospath('dir2')
+
+ svntest.main.file_append(file1_path, "This is the file 'file1'.")
+ svntest.main.file_append(file2_path, "This is the file 'file2'.")
+ sbox.simple_add('file1', 'file2')
+ sbox.simple_mkdir('dir1', 'dir2')
+
+ # Make sure the 4 adds show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'file1' : Item(status='A ', wc_rev=0),
+ 'file2' : Item(status='A ', wc_rev=0),
+ 'dir1' : Item(status='A ', wc_rev=0),
+ 'dir2' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Poof, all 4 added things are now missing in action.
+ os.remove(file1_path)
+ os.remove(file2_path)
+ svntest.main.safe_rmtree(dir1_path)
+ svntest.main.safe_rmtree(dir2_path)
+
+ # Unschedule the additions, using 'svn rm' and 'svn revert'.
+ # FILE1_PATH will throw an error. DIR1_PATH will not since the stub is
+ # still available in the parent directory.
+ svntest.main.run_svn(svntest.verify.AnyOutput, 'rm', file1_path)
+ ### actually, the stub does not provide enough information to revert
+ ### the addition, so this command will fail. marking as XFail
+ sbox.simple_rm('dir1')
+ sbox.simple_revert('file2', 'dir2')
+
+ # 'svn st' should now show absolutely zero local mods.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Regression test for issue #962:
+#
+# Make sure 'rm foo; svn rm foo' works on files and directories.
+# Also make sure that the deletion is committable.
+@Issue(962)
+def delete_missing(sbox):
+ "schedule and commit deletion on missing items"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ H_path = sbox.ospath('A/D/H')
+
+ # Manually remove a file and a directory.
+ os.remove(mu_path)
+ svntest.main.safe_rmtree(H_path)
+
+ # Now schedule them for deletion anyway, and make sure no error is output.
+ sbox.simple_rm('A/mu', 'A/D/H')
+
+ # Commit the deletions.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Deleting'),
+ 'A/D/H' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/mu', 'A/D/H',
+ 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi')
+ expected_status.tweak(wc_rev=1)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Regression test for issue #854:
+# Revert . inside an svn added empty directory should generate an error.
+# Not anymore! wc-ng uses absolute paths for everything, which means we
+# can handle this case without too much trouble.
+@Issue(854)
+def revert_inside_newly_added_dir(sbox):
+ "revert inside a newly added dir"
+
+ sbox.build(read_only = True)
+
+ # Schedule a new directory for addition
+ sbox.simple_mkdir('foo')
+
+ # Now change into the newly added directory, revert and make sure
+ # no error is output.
+ os.chdir(sbox.ospath('foo'))
+ svntest.main.run_svn(None, 'revert', '.')
+
+#----------------------------------------------------------------------
+# Regression test for issue #1609:
+# 'svn status' should show a schedule-add directory as 'A' not '?'
+@Issue(1609)
+def status_add_deleted_directory(sbox):
+ "status after add of deleted directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # The original recipe:
+ #
+ # svnadmin create repo
+ # svn mkdir file://`pwd`/repo/foo -m r1
+ # svn co file://`pwd`/repo wc
+ # svn rm wc/foo
+ # rm -rf wc/foo
+ # svn ci wc -m r2
+ # svn mkdir wc/foo
+
+ A_path = sbox.ospath('A')
+
+ sbox.simple_rm('A')
+ svntest.main.safe_rmtree(A_path)
+ sbox.simple_commit()
+
+ sbox.simple_mkdir('A')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status = svntest.wc.State(wc_dir,
+ { '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status='A ', wc_rev=0),
+ 'iota' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Update will *not* remove the entry for A despite it being marked
+ # deleted.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [],
+ 'up', wc_dir)
+ expected_status.tweak('', 'iota', wc_rev=2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #939:
+# Recursive 'svn add' should still traverse already-versioned dirs.
+@Issue(939)
+@Issue(4241)
+def add_recursive_already_versioned(sbox):
+ "'svn add' should traverse already-versioned dirs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create some files, then schedule them for addition
+ delta_path = sbox.ospath('delta')
+ zeta_path = sbox.ospath('A/B/zeta')
+ epsilon_path = sbox.ospath('A/D/G/epsilon')
+
+ svntest.main.file_append(delta_path, "This is the file 'delta'.")
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'.")
+ svntest.main.file_append(epsilon_path, "This is the file 'epsilon'.")
+
+ # Make sure the adds show up as such in status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'delta' : Item(status='A ', wc_rev=0),
+ 'A/B/zeta' : Item(status='A ', wc_rev=0),
+ 'A/D/G/epsilon' : Item(status='A ', wc_rev=0),
+ })
+
+ # Perform the add with the --force flag, and check the status.
+ ### TODO: This part won't work -- you have to be inside the working copy
+ ### or else Subversion will think you're trying to add the working copy
+ ### to its parent directory, and will (possibly, if the parent directory
+ ### isn't versioned) fail.
+ svntest.main.run_svn(None, 'add', '--force', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now revert, and do the adds again from inside the working copy.
+ svntest.main.run_svn(None, 'revert', '--recursive', wc_dir)
+ saved_wd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.main.run_svn(None, 'add', '--force', '.')
+ os.chdir(saved_wd)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Regression test for the case where "svn mkdir" outside a working copy
+# would create a directory, but then not clean up after itself when it
+# couldn't add it to source control.
+def fail_add_directory(sbox):
+ "'svn mkdir' should clean up after itself on error"
+ # This test doesn't use a working copy
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ os.makedirs(sbox.wc_dir)
+
+ os.chdir(sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'mkdir', 'A')
+ if os.path.exists('A'):
+ raise svntest.Failure('svn mkdir created an unversioned directory')
+
+
+#----------------------------------------------------------------------
+# Regression test for #2440
+# Ideally this test should test for the exit status of the
+# 'svn rm non-existent' invocation.
+# As the corresponding change to get the exit code of svn binary invoked needs
+# a change in many testcase, for now this testcase checks the stderr.
+def delete_non_existent(sbox):
+ "'svn rm non-existent' should exit with an error"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput,
+ 'rm', '--force', 'non-existent')
+
+
+#----------------------------------------------------------------------
+# Problem encountered by cmpilato when he inadvertantly upset an
+# 'svn rm --keep-local' and had to retry it.
+def delete_redelete_fudgery(sbox):
+ "retry of manually upset --keep-local deletion"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ B_path = os.path.join(wc_dir, 'A', 'B')
+
+ # Delete 'A/B' using --keep-local, then remove at the OS level.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--keep-local', B_path)
+ svntest.main.safe_rmtree(B_path)
+
+ # Update the tree.
+ #
+ ### When WC-NG is running in single-DB mode (one .svn directory and
+ ### database for the whole working copy), I suspect that this update
+ ### will change. Today it re-adds the directory which we just
+ ### scheduled for deletion because the only record of that
+ ### scheduling is stored -- you guessed it -- the directory's .svn/
+ ### area... which we just deleted from disk.
+ ###
+ ### In single-DB-WC-NG-land, though, deleting the directory from
+ ### disk should have no bearing whatsoever on the scheduling
+ ### information stored now in the working copy root's one DB. That
+ ### could change the whole flow of this test, possible leading us to
+ ### remove it as altogether irrelevant. --cmpilato
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+
+ # Now try to run
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '--keep-local', B_path)
+
+def propset_on_deleted_should_fail(sbox):
+ "calling svn propset on a deleted node should fail"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota = os.path.join(wc_dir, 'iota')
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', iota)
+
+ svntest.actions.run_and_verify_svn(None, "svn: E155023: Can't set propert.*",
+ 'ps', 'prop', 'val', iota)
+
+@Issue(3468)
+def replace_dir_delete_child(sbox):
+ "replace a dir, then delete a child"
+ # The purpose of this test is to make sure that when a child of a
+ # replaced directory is deleted, the result can be committed.
+
+ sbox.build()
+
+ # Replace A/D/H with a copy of A/B
+ sbox.simple_rm('A/D/H')
+ sbox.simple_copy('A/B', 'A/D/H')
+
+ # Remove two children
+ sbox.simple_rm('A/D/H/lambda')
+ sbox.simple_rm('A/D/H/E')
+
+ # Don't look at what "svn status" says before commit. It's not clear
+ # what it should be and that's not the point of this test.
+
+ # Commit.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/D/H' : Item(verb='Replacing'),
+ 'A/D/H/lambda' : Item(verb='Deleting'),
+ 'A/D/H/E' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'A/D/H/F' : Item(status=' ', wc_rev=0),
+ })
+ expected_status.tweak('A/D/H', 'A/D/H/F', wc_rev=2)
+ expected_status.remove('A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi')
+
+ svntest.actions.run_and_verify_commit(sbox.wc_dir,
+ expected_output,
+ expected_status)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ revert_add_files,
+ revert_add_directories,
+ revert_nested_adds,
+ revert_add_executable,
+ revert_delete_files,
+ revert_delete_dirs,
+ unschedule_missing_added,
+ delete_missing,
+ revert_inside_newly_added_dir,
+ status_add_deleted_directory,
+ add_recursive_already_versioned,
+ fail_add_directory,
+ delete_non_existent,
+ delete_redelete_fudgery,
+ propset_on_deleted_should_fail,
+ replace_dir_delete_child,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/shelve_tests.py b/subversion/tests/cmdline/shelve_tests.py
new file mode 100755
index 0000000..a71ddbb
--- /dev/null
+++ b/subversion/tests/cmdline/shelve_tests.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+#
+# shelve_tests.py: testing shelving
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, stat, re, os, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+
+#----------------------------------------------------------------------
+
+def shelve_unshelve_verify(sbox):
+ """Round-trip: shelve; verify all changes are reverted;
+ unshelve; verify all changes are restored.
+ """
+
+ wc_dir = sbox.wc_dir
+
+ # Save the modified state
+ _, output, _ = svntest.main.run_svn(None, 'status', '-v', '-u', '-q',
+ wc_dir)
+ modified_state = svntest.wc.State.from_status(output, wc_dir)
+
+ # Shelve; check there are no longer any modifications
+ svntest.actions.run_and_verify_svn(None, [],
+ 'shelve', 'foo')
+ virginal_state = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, virginal_state)
+
+ # Unshelve; check the original modifications are here again
+ svntest.actions.run_and_verify_svn(None, [],
+ 'unshelve', 'foo')
+ svntest.actions.run_and_verify_status(wc_dir, modified_state)
+
+#----------------------------------------------------------------------
+
+def shelve_unshelve(sbox, modifier):
+ """Round-trip: build 'sbox'; apply changes by calling 'modifier(sbox)';
+ shelve and unshelve; verify changes are fully reverted and restored.
+ """
+
+ sbox.build()
+ was_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # Make some changes to the working copy
+ modifier(sbox)
+
+ shelve_unshelve_verify(sbox)
+
+ os.chdir(was_cwd)
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+def shelve_text_mods(sbox):
+ "shelve text mods"
+
+ def modifier(sbox):
+ sbox.simple_append('A/mu', 'appended mu text')
+
+ shelve_unshelve(sbox, modifier)
+
+#----------------------------------------------------------------------
+
+def shelve_prop_changes(sbox):
+ "shelve prop changes"
+
+ def modifier(sbox):
+ sbox.simple_propset('p', 'v', 'A')
+ sbox.simple_propset('p', 'v', 'A/mu')
+
+ shelve_unshelve(sbox, modifier)
+
+#----------------------------------------------------------------------
+
+def shelve_adds(sbox):
+ "shelve adds"
+
+ def modifier(sbox):
+ sbox.simple_append('A/new', 'A new file\n')
+ sbox.simple_add('A/new')
+ sbox.simple_append('A/new2', 'A new file\n')
+ sbox.simple_add('A/new2')
+ sbox.simple_propset('p', 'v', 'A/new2')
+
+ shelve_unshelve(sbox, modifier)
+
+#----------------------------------------------------------------------
+
+@XFail()
+@Issue(4709)
+def shelve_deletes(sbox):
+ "shelve deletes"
+
+ def modifier(sbox):
+ sbox.simple_rm('A/mu')
+
+ shelve_unshelve(sbox, modifier)
+
+#----------------------------------------------------------------------
+
+def shelve_from_inner_path(sbox):
+ "shelve from inner path"
+
+ def modifier(sbox):
+ sbox.simple_append('A/mu', 'appended mu text')
+
+ sbox.build()
+ was_cwd = os.getcwd()
+ os.chdir(sbox.ospath('A'))
+ sbox.wc_dir = '..'
+
+ modifier(sbox)
+ shelve_unshelve_verify(sbox)
+
+ os.chdir(was_cwd)
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ shelve_text_mods,
+ shelve_prop_changes,
+ shelve_adds,
+ shelve_deletes,
+ shelve_from_inner_path,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/special_tests.py b/subversion/tests/cmdline/special_tests.py
new file mode 100755
index 0000000..db612c1
--- /dev/null
+++ b/subversion/tests/cmdline/special_tests.py
@@ -0,0 +1,1348 @@
+#!/usr/bin/env python
+#
+# special_tests.py: testing special and reserved file handling
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, os, re, copy, stat
+
+# Our testing module
+import svntest
+
+from svntest.main import server_has_mergeinfo, run_svn, file_write
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+def general_symlink(sbox):
+ "general symlink handling"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # First try to just commit a symlink
+ newfile_path = sbox.ospath('newfile')
+
+ sbox.simple_append('linktarget', 'this is just a link target')
+ sbox.simple_add('linktarget')
+ sbox.simple_add_symlink('linktarget', 'newfile')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ 'linktarget' : Item(verb='Adding'),
+ })
+
+ # Run a diff and verify that we get the correct output
+ exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'diff',
+ wc_dir)
+
+ regex = '^\+link linktarget'
+ for line in stdout_lines:
+ if re.match(regex, line):
+ break
+ else:
+ raise svntest.Failure
+
+ # Commit and make sure everything is good
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=2),
+ 'linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ ## Now we should update to the previous version, verify that no
+ ## symlink is present, then update back to HEAD and see if the symlink
+ ## is regenerated properly.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1', wc_dir)
+
+ # Is the symlink gone?
+ if os.path.isfile(newfile_path) or os.path.islink(newfile_path):
+ raise svntest.Failure
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '2', wc_dir)
+
+ # Is the symlink back?
+ if svntest.main.is_posix_os():
+ new_target = os.readlink(newfile_path)
+ if new_target != 'linktarget':
+ raise svntest.Failure
+
+ ## Now change the target of the symlink, verify that it is shown as
+ ## modified and that a commit succeeds.
+ os.remove(newfile_path)
+ if svntest.main.is_posix_os():
+ os.symlink('A', newfile_path)
+ else:
+ sbox.simple_append('newfile', 'link A', truncate = True)
+
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([ "M newfile\n" ], [], 'st')
+
+ os.chdir(was_cwd)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=3),
+ 'linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+@SkipUnless(svntest.main.is_posix_os)
+def replace_file_with_symlink(sbox):
+ "replace a normal file with a special file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # First replace a normal file with a symlink and make sure we get an
+ # error
+ iota_path = os.path.join(wc_dir, 'iota')
+ os.remove(iota_path)
+ os.symlink('A', iota_path)
+
+ # Does status show the obstruction?
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([ "~ iota\n" ], [], 'st')
+
+ # And does a commit fail?
+ os.chdir(was_cwd)
+ exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
+ 'log msg',
+ wc_dir)
+
+ regex = 'svn: E145001: Commit failed'
+ for line in stderr_lines:
+ if re.match(regex, line):
+ break
+ else:
+ raise svntest.Failure
+
+
+@SkipUnless(svntest.main.is_posix_os)
+def import_export_symlink(sbox):
+ "import and export a symlink"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # create a new symlink to import
+ new_path = os.path.join(wc_dir, 'new_file')
+
+ os.symlink('linktarget', new_path)
+
+ # import this symlink into the repository
+ url = sbox.repo_url + "/dirA/dirB/new_link"
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'import',
+ '-m', 'log msg', new_path, url)
+
+ regex = "(Committed|Imported) revision [0-9]+."
+ for line in output:
+ if re.match(regex, line):
+ break
+ else:
+ raise svntest.Failure
+
+ # remove the unversioned link
+ os.remove(new_path)
+
+ # run update and verify that the symlink is put back into place
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # Is the symlink back?
+ link_path = wc_dir + "/dirA/dirB/new_link"
+ new_target = os.readlink(link_path)
+ if new_target != 'linktarget':
+ raise svntest.Failure
+
+ ## Now we will try exporting from both the working copy and the
+ ## repository directly, verifying that the symlink is created in
+ ## both cases.
+
+ for export_src, dest_dir in [(sbox.wc_dir, 'export-wc'),
+ (sbox.repo_url, 'export-url')]:
+ export_target = sbox.add_wc_path(dest_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'export', export_src, export_target)
+
+ # is the link at the correct place?
+ link_path = os.path.join(export_target, "dirA/dirB/new_link")
+ new_target = os.readlink(link_path)
+ if new_target != 'linktarget':
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# Regression test for issue 1986
+@Issue(1986)
+def copy_tree_with_symlink(sbox):
+ "'svn cp dir1 dir2' which contains a symlink"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a versioned symlink within directory 'A/D/H'.
+ newfile_path = sbox.ospath('A/D/H/newfile')
+ sbox.simple_append('A/D/H/linktarget', 'this is just a link target')
+ sbox.simple_add('A/D/H/linktarget')
+ sbox.simple_add_symlink('linktarget', 'A/D/H/newfile')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/newfile' : Item(verb='Adding'),
+ 'A/D/H/linktarget' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/D/H/newfile' : Item(status=' ', wc_rev=2),
+ 'A/D/H/linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ # Copy H to H2
+ H_path = os.path.join(wc_dir, 'A', 'D', 'H')
+ H2_path = os.path.join(wc_dir, 'A', 'D', 'H2')
+ svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, H2_path)
+
+ # 'svn status' should show just "A/D/H2 A +". Nothing broken.
+ expected_status.add({
+ 'A/D/H2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/H2/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H2/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H2/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ # linktarget and newfile are from r2, while h2 is from r1.
+ 'A/D/H2/linktarget' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '),
+ 'A/D/H2/newfile' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@SkipUnless(svntest.main.is_posix_os)
+def replace_symlink_with_file(sbox):
+ "replace a special file with a non-special file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new special file and commit it.
+ newfile_path = os.path.join(wc_dir, 'newfile')
+ linktarget_path = os.path.join(wc_dir, 'linktarget')
+ svntest.main.file_append(linktarget_path, 'this is just a link target')
+ os.symlink('linktarget', newfile_path)
+ svntest.main.run_svn(None, 'add', newfile_path, linktarget_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ 'linktarget' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=2),
+ 'linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+
+ # Now replace the symlink with a normal file and try to commit, we
+ # should get an error
+ os.remove(newfile_path)
+ svntest.main.file_append(newfile_path, "text of actual file")
+
+ # Does status show the obstruction?
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([ "~ newfile\n" ], [], 'st')
+
+ # And does a commit fail?
+ os.chdir(was_cwd)
+ exit_code, stdout_lines, stderr_lines = svntest.main.run_svn(1, 'ci', '-m',
+ 'log msg',
+ wc_dir)
+
+ regex = 'svn: E145001: Commit failed'
+ for line in stderr_lines:
+ if re.match(regex, line):
+ break
+ else:
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+def remove_symlink(sbox):
+ "remove a symlink"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Commit a symlink
+ newfile_path = os.path.join(wc_dir, 'newfile')
+ linktarget_path = os.path.join(wc_dir, 'linktarget')
+ svntest.main.file_append(linktarget_path, 'this is just a link target')
+ sbox.simple_add_symlink('linktarget', 'newfile')
+ sbox.simple_add('linktarget')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ 'linktarget' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=2),
+ 'linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now remove it
+ svntest.actions.run_and_verify_svn(None, [], 'rm', newfile_path)
+
+ # Commit and verify that it worked
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'linktarget' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+@Issue(2530)
+def merge_symlink_into_file(sbox):
+ "merge symlink into file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ d_url = sbox.repo_url + '/A/D'
+ dprime_url = sbox.repo_url + '/A/Dprime'
+
+ gamma_path = sbox.ospath('A/D/gamma')
+ gamma_prime_path = sbox.ospath('A/Dprime/gamma')
+
+ # create a copy of the D directory to play with
+ svntest.main.run_svn(None,
+ 'copy', d_url, dprime_url, '-m', 'copy')
+ svntest.main.run_svn(None,
+ 'update', sbox.wc_dir)
+
+ # remove A/Dprime/gamma
+ svntest.main.run_svn(None, 'delete', gamma_prime_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/Dprime/gamma' : Item(verb='Deleting'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # Commit a symlink in its place
+ linktarget_path = os.path.join(wc_dir, 'linktarget')
+ svntest.main.file_append(linktarget_path, 'this is just a link target')
+ sbox.simple_add_symlink('linktarget', 'A/Dprime/gamma')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/Dprime/gamma' : Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # merge the creation of the symlink into the original directory
+ svntest.main.run_svn(None,
+ 'merge', '-r', '2:4', dprime_url,
+ os.path.join(wc_dir, 'A', 'D'))
+
+ # now revert, we once got a strange error
+ svntest.main.run_svn(None, 'revert', '-R', wc_dir)
+
+ # assuming we got past the revert because someone fixed that bug, lets
+ # try the merge and a commit, since that apparently used to throw us for
+ # a loop, see issue 2530
+ svntest.main.run_svn(None,
+ 'merge', '-r', '2:4', dprime_url,
+ os.path.join(wc_dir, 'A', 'D'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Replacing'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+
+
+#----------------------------------------------------------------------
+def merge_file_into_symlink(sbox):
+ "merge file into symlink"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ d_url = sbox.repo_url + '/A/D'
+ dprime_url = sbox.repo_url + '/A/Dprime'
+
+ gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma')
+ gamma_prime_path = os.path.join(wc_dir, 'A', 'Dprime', 'gamma')
+
+ # create a copy of the D directory to play with
+ svntest.main.run_svn(None,
+ 'copy', d_url, dprime_url, '-m', 'copy')
+ svntest.main.run_svn(None,
+ 'update', sbox.wc_dir)
+
+ # remove A/Dprime/gamma
+ svntest.main.run_svn(None, 'delete', gamma_prime_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/Dprime/gamma' : Item(verb='Deleting'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # Commit a symlink in its place
+ linktarget_path = os.path.join(wc_dir, 'linktarget')
+ svntest.main.file_append(linktarget_path, 'this is just a link target')
+ sbox.simple_add_symlink('linktarget', 'A/Dprime/gamma')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/Dprime/gamma' : Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ svntest.main.file_write(gamma_path, 'changed file', 'w+')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Sending'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output, None)
+
+ # ok, now merge the change to the file into the symlink we created, this
+ # gives us a weird error
+ svntest.main.run_svn(None,
+ 'merge', '-r', '4:5', '--allow-mixed-revisions', d_url,
+ os.path.join(wc_dir, 'A', 'Dprime'))
+
+# Issue 2701: Tests to see repository with symlinks can be checked out on all
+# platforms.
+@Issue(2701)
+def checkout_repo_with_symlinks(sbox):
+ "checkout a repository containing symlinks"
+
+ svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
+ 'special_tests_data',
+ 'symlink.dump'),
+ create_wc=False)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'from': Item(status='A '),
+ 'to': Item(status='A '),
+ })
+
+ if svntest.main.is_os_windows():
+ expected_link_contents = 'link to'
+ else:
+ expected_link_contents = ''
+
+ expected_wc = svntest.wc.State('', {
+ 'from' : Item(contents=expected_link_contents),
+ 'to' : Item(contents=''),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir,
+ expected_output,
+ expected_wc)
+
+#----------------------------------------------------------------------
+# Issue 2716: 'svn diff' against a symlink to a directory within the wc
+@Issue(2716)
+def diff_symlink_to_dir(sbox):
+ "diff a symlink to a directory"
+
+ sbox.build(read_only = True)
+
+ # Create a symlink to A/D as link.
+ d_path = os.path.join('A', 'D')
+ sbox.simple_add_symlink('A/D', 'link')
+
+ os.chdir(sbox.wc_dir)
+
+ # Now diff the wc itself and check the results.
+ expected_output = [
+ "Index: link\n",
+ "===================================================================\n",
+ "--- link\t(nonexistent)\n",
+ "+++ link\t(working copy)\n",
+ "@@ -0,0 +1 @@\n",
+ "+link A/D\n",
+ "\ No newline at end of file\n",
+ "\n",
+ "Property changes on: link\n",
+ "___________________________________________________________________\n",
+ "Added: svn:special\n",
+ "## -0,0 +1 ##\n",
+ "+*\n",
+ "\\ No newline at end of property\n"
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff',
+ '.')
+ # We should get the same output if we the diff the symlink itself.
+ svntest.actions.run_and_verify_svn(expected_output, [], 'diff', 'link')
+
+#----------------------------------------------------------------------
+# Issue 2692 (part of): Check that the client can check out a repository
+# that contains an unknown special file type.
+@Issue(2692)
+def checkout_repo_with_unknown_special_type(sbox):
+ "checkout repository with unknown special file type"
+
+ svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
+ 'special_tests_data',
+ 'bad-special-type.dump'),
+ create_wc=False)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'special': Item(status='A '),
+ })
+ expected_wc = svntest.wc.State('', {
+ 'special' : Item(contents='gimble wabe'),
+ })
+ svntest.actions.run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir,
+ expected_output,
+ expected_wc)
+
+def replace_symlink_with_dir(sbox):
+ "replace a special file with a directory"
+
+ svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]),
+ 'special_tests_data',
+ 'symlink.dump'))
+
+ wc_dir = sbox.wc_dir
+ from_path = os.path.join(wc_dir, 'from')
+
+ # Now replace the symlink with a directory and try to commit, we
+ # should get an error
+ os.remove(from_path)
+ os.mkdir(from_path)
+
+ # Does status show the obstruction?
+ was_cwd = os.getcwd()
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([ "~ from\n" ], [], 'st')
+
+ # The commit shouldn't do anything.
+ # I'd expect a failed commit here, but replacing a file locally with a
+ # directory seems to make svn think the file is unchanged.
+ os.chdir(was_cwd)
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ error_re_string = '.*E145001: (Entry|Node).*has.*changed (special|kind).*'
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None, error_re_string)
+
+# test for issue #1808: svn up deletes local symlink that obstructs
+# versioned file
+@Issue(1808)
+def update_obstructing_symlink(sbox):
+ "symlink obstructs incoming delete"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = sbox.ospath('A/mu')
+
+ iota_abspath = os.path.abspath(sbox.ospath('iota'))
+
+ # delete mu and replace it with an (not-added) symlink
+ sbox.simple_rm('A/mu')
+ sbox.simple_symlink(iota_abspath, 'A/mu')
+
+ # delete pi and replace it with an added symlink
+ sbox.simple_rm('A/D/G/pi')
+ sbox.simple_add_symlink(iota_abspath, 'A/D/G/pi')
+
+ if not os.path.exists(mu_path):
+ raise svntest.Failure("mu should be there")
+
+ # Now remove mu and pi in the repository
+ svntest.main.run_svn(None, 'rm', '-m', 'log msg',
+ sbox.repo_url + '/A/mu',
+ sbox.repo_url + '/A/D/G/pi')
+
+ # We expect tree conflicts
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu': Item(status=' ', treeconflict='C'),
+ 'A/D/G/pi': Item(status=' ', treeconflict='C')
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/mu', status='? ', treeconflict='C',
+ wc_rev=None)
+
+ expected_status.tweak('A/D/G/pi', status='A ',treeconflict='C',
+ wc_rev='-')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output, None,
+ expected_status)
+
+ expected_info = [
+ {
+ 'Path': re.escape(sbox.ospath('A/D/G/pi')),
+ 'Tree conflict': 'local file replace, incoming file delete or move.*'
+ },
+ {
+ 'Path': re.escape(sbox.ospath('A/mu')),
+ 'Tree conflict': 'local file delete, incoming file delete or move.*'
+ }
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info,
+ sbox.ospath('A/D/G/pi'),
+ sbox.ospath('A/mu'))
+
+ # check that the symlink is still there
+ if not os.path.exists(mu_path):
+ raise svntest.Failure("mu should be there")
+ if svntest.main.is_posix_os():
+ target = os.readlink(mu_path)
+ if target != iota_abspath:
+ raise svntest.Failure("mu no longer points to the same location")
+
+def warn_on_reserved_name(sbox):
+ "warn when attempt operation on a reserved name"
+ sbox.build()
+ reserved_path = os.path.join(sbox.wc_dir, svntest.main.get_admin_name())
+ svntest.actions.run_and_verify_svn(
+ None,
+ ".*Skipping argument: E200025: '.+' ends in a reserved name.*",
+ 'lock', reserved_path)
+
+
+def propvalue_normalized(sbox):
+ "'ps svn:special' should normalize to '*'"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a "symlink"
+ iota2_path = sbox.ospath('iota2')
+ svntest.main.file_write(iota2_path, "symlink destination")
+ svntest.main.run_svn(None, 'add', iota2_path)
+ svntest.main.run_svn(None, 'propset', 'svn:special', 'yes', iota2_path)
+ if svntest.main.is_posix_os():
+ os.remove(iota2_path)
+ os.symlink("symlink destination", iota2_path)
+
+ # Property value should be SVN_PROP_BOOLEAN_TRUE
+ expected_propval = ['*']
+ svntest.actions.run_and_verify_svn(expected_propval, [],
+ 'propget', '--no-newline', 'svn:special',
+ iota2_path)
+
+ # Commit and check again.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iota2' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.run_svn(None, 'update', wc_dir)
+ svntest.actions.run_and_verify_svn(expected_propval, [],
+ 'propget', '--no-newline', 'svn:special',
+ iota2_path)
+
+
+# on users@: http://mid.gmane.org/1292856447.8650.24.camel@nimble.325Bayport
+@SkipUnless(svntest.main.is_posix_os)
+def unrelated_changed_special_status(sbox):
+ "commit foo while bar changed special status"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ os.chdir(os.path.join(sbox.wc_dir, 'A/D/H'))
+
+ open('chi', 'a').write('random local mod')
+ os.unlink('psi')
+ os.symlink('omega', 'psi') # omega is versioned!
+ svntest.main.run_svn(None, 'changelist', 'chi cl', 'chi')
+ svntest.actions.run_and_verify_svn(None, [], 'commit',
+ '--changelist', 'chi cl',
+ '-m', 'psi changed special status')
+
+#----------------------------------------------------------------------
+@Issue(3972)
+def symlink_destination_change(sbox):
+ "revert a symlink destination change"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new symlink and commit it.
+ newfile_path = os.path.join(wc_dir, 'newfile')
+ sbox.simple_add_symlink('linktarget', 'newfile')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newfile' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Modify the symlink to point somewhere else
+ os.remove(newfile_path)
+ if svntest.main.is_posix_os():
+ os.symlink('linktarget2', newfile_path)
+ else:
+ sbox.simple_append('newfile', 'link linktarget2', truncate = True)
+
+ expected_status.tweak('newfile', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Revert should restore the symlink to point to the original destination
+ svntest.main.run_svn(None, 'revert', '-R', wc_dir)
+ expected_status.tweak('newfile', status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Issue 3972, repeat revert produces no output
+ svntest.actions.run_and_verify_svn([], [], 'revert', '-R', wc_dir)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now replace the symlink with a normal file and try to commit, we
+
+#----------------------------------------------------------------------
+# This used to lose the special status in the target working copy
+# (disk and metadata).
+@Issue(3884)
+def merge_foreign_symlink(sbox):
+ "merge symlink-add from foreign repos"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a copy of this repository and associated working copy. Both
+ # should have nothing but a Greek tree in them, and the two
+ # repository UUIDs should differ.
+ sbox2 = sbox.clone_dependent(True)
+ sbox2.build()
+ wc_dir2 = sbox2.wc_dir
+
+ # convenience variables
+ zeta_path = sbox.ospath('A/zeta')
+ zeta2_path = sbox2.ospath('A/zeta')
+
+ # sbox2 r2: create zeta2 in sbox2
+ sbox2.simple_add_symlink('target', 'A/zeta')
+ sbox2.simple_commit('A/zeta')
+
+
+ # sbox1: merge that
+ svntest.main.run_svn(None, 'merge', '-c', '2', sbox2.repo_url,
+ sbox.ospath(''))
+
+ # Verify special status.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/zeta': Item(contents="link target", props={ 'svn:special': '*' })
+ })
+ svntest.actions.verify_disk(sbox.ospath(''), expected_disk, True)
+
+ # TODO: verify status:
+ # expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ # expected_status.add({
+ # 'A/zeta' : Item(status='A ', wc_rev='-', props={'svn:special': '*'}),
+ # })
+
+#----------------------------------------------------------------------
+# See also symlink_to_wc_svnversion().
+@Issue(2557,3987)
+@SkipUnless(svntest.main.is_posix_os)
+def symlink_to_wc_basic(sbox):
+ "operate on symlink to wc"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create a symlink
+ symlink_path = sbox.add_wc_path('2')
+ assert not os.path.islink(symlink_path)
+ os.symlink(os.path.basename(wc_dir), symlink_path) ### implementation detail
+ symlink_basename = os.path.basename(symlink_path)
+
+ # Some basic tests
+ wc_uuid = svntest.actions.get_wc_uuid(wc_dir)
+ expected_info = [{
+ 'Path' : re.escape(os.path.join(symlink_path)),
+ 'Working Copy Root Path' : re.escape(os.path.abspath(wc_dir)),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : wc_uuid,
+ 'Revision' : '1',
+ 'Node Kind' : 'directory',
+ 'Schedule' : 'normal',
+ }, {
+ 'Name' : 'iota',
+ 'Path' : re.escape(os.path.join(symlink_path, 'iota')),
+ 'Working Copy Root Path' : re.escape(os.path.abspath(wc_dir)),
+ 'Repository Root' : sbox.repo_url,
+ 'Repository UUID' : wc_uuid,
+ 'Revision' : '1',
+ 'Node Kind' : 'file',
+ 'Schedule' : 'normal',
+ }]
+ svntest.actions.run_and_verify_info(expected_info,
+ symlink_path, symlink_path + '/iota')
+
+#----------------------------------------------------------------------
+# Similar to #2557/#3987; see symlink_to_wc_basic().
+@Issue(2557,3987)
+@SkipUnless(svntest.main.is_posix_os)
+def symlink_to_wc_svnversion(sbox):
+ "svnversion on symlink to wc"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Create a symlink
+ symlink_path = sbox.add_wc_path('2')
+ assert not os.path.islink(symlink_path)
+ os.symlink(os.path.basename(wc_dir), symlink_path) ### implementation detail
+ symlink_basename = os.path.basename(symlink_path)
+
+ # Some basic tests
+ svntest.actions.run_and_verify_svnversion(symlink_path, sbox.repo_url,
+ [ "1\n" ], [])
+
+#----------------------------------------------------------------------
+# Regression in 1.7.0: Update fails to change a symlink
+def update_symlink(sbox):
+ "update a symlink"
+
+ svntest.actions.do_sleep_for_timestamps()
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = sbox.ospath('A/mu')
+ iota_path = sbox.ospath('iota')
+ symlink_path = sbox.ospath('symlink')
+
+ # create a symlink to /A/mu
+ sbox.simple_add_symlink("A/mu", 'symlink')
+ sbox.simple_commit()
+
+ # change the symlink to /iota
+ os.remove(symlink_path)
+ if svntest.main.is_posix_os():
+ os.symlink("iota", symlink_path)
+ else:
+ file_write(symlink_path, 'link iota')
+ sbox.simple_commit()
+
+ # update back to r2
+ svntest.main.run_svn(False, 'update', '-r', '2', wc_dir)
+
+ # now update to head; 1.7.0 throws an assertion here
+ expected_output = svntest.wc.State(wc_dir, {
+ 'symlink' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({'symlink': Item(contents="This is the file 'iota'.\n",
+ props={'svn:special' : '*'})})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'symlink' : Item(status=' ', wc_rev='3'),
+ })
+
+ if not svntest.main.is_posix_os():
+ expected_disk = None
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@Issue(4091)
+def replace_symlinks(sbox):
+ "replace symlinks"
+ sbox.build()
+ wc = sbox.ospath
+
+ # Some of these tests are implemented for git (in test script
+ # t/t9100-git-svn-basic.sh) using the Perl bindings for Subversion.
+ # Our issue #4091 is about 'svn update' failures in the git tests.
+
+ sbox.simple_mkdir('A/D/G/Z')
+ sbox.simple_mkdir('A/D/Gx')
+ sbox.simple_mkdir('A/D/Gx/Z')
+ sbox.simple_mkdir('A/D/Hx')
+ sbox.simple_mkdir('A/D/Y')
+ sbox.simple_mkdir('Ax')
+
+ sbox.simple_add_symlink('../Y', 'A/D/H/Z')
+ sbox.simple_add_symlink('../Y', 'A/D/Hx/Z')
+
+ for p in ['Ax/mu',
+ 'A/D/Gx/pi',
+ 'A/D/Hx/chi',
+ ]:
+ file_write(wc(p), 'This starts as a normal file.\n')
+ sbox.simple_add(p)
+ for p in ['iota.sh',
+ 'A/mu.sh',
+ 'Ax/mu.sh',
+ 'A/D/gamma.sh',
+ 'A/B/E/beta.sh',
+ 'A/D/G/rho.sh',
+ 'A/D/Gx/rho.sh',
+ 'A/D/H/psi.sh',
+ 'A/D/Hx/psi.sh',
+ ]:
+ file_write(wc(p), '#!/bin/sh\necho "hello, svn!"\n')
+ os.chmod(wc(p), svntest.main.S_ALL_RW | stat.S_IXUSR)
+ sbox.simple_add(p)
+ if not svntest.main.is_posix_os():
+ sbox.simple_propset('svn:executable', 'X', p)
+ sbox.simple_commit() # r2
+ sbox.simple_update()
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 2)
+ expected_status.add({
+ 'A/D/Y' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/G/rho.sh' : Item(status=' ', wc_rev=2),
+ 'A/D/Hx' : Item(status=' ', wc_rev=2),
+ 'A/D/Hx/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/Hx/chi' : Item(status=' ', wc_rev=2),
+ 'A/D/Hx/psi.sh' : Item(status=' ', wc_rev=2),
+ 'A/D/H/psi.sh' : Item(status=' ', wc_rev=2),
+ 'A/D/H/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/Gx' : Item(status=' ', wc_rev=2),
+ 'A/D/Gx/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/Gx/pi' : Item(status=' ', wc_rev=2),
+ 'A/D/Gx/rho.sh' : Item(status=' ', wc_rev=2),
+ 'A/D/gamma.sh' : Item(status=' ', wc_rev=2),
+ 'A/B/E/beta.sh' : Item(status=' ', wc_rev=2),
+ 'Ax' : Item(status=' ', wc_rev=2),
+ 'Ax/mu' : Item(status=' ', wc_rev=2),
+ 'Ax/mu.sh' : Item(status=' ', wc_rev=2),
+ 'A/mu.sh' : Item(status=' ', wc_rev=2),
+ 'iota.sh' : Item(status=' ', wc_rev=2),
+ })
+ expected_status_r2 = copy.deepcopy(expected_status)
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status_r2)
+
+ # Failing git-svn test: 'new symlink is added to a file that was
+ # also just made executable', i.e., in the same revision.
+ sbox.simple_propset("svn:executable", "X", 'A/B/E/alpha')
+ sbox.simple_add_symlink('alpha', 'A/B/E/sym-alpha')
+
+ # Add a symlink to a file made non-executable in the same revision.
+ sbox.simple_propdel("svn:executable", 'A/B/E/beta.sh')
+ sbox.simple_add_symlink('beta.sh', 'A/B/E/sym-beta.sh')
+
+ # Replace a normal {file, exec, dir} with a symlink to the same kind
+ # via Subversion replacement.
+ sbox.simple_rm('A/D/G/pi',
+ 'A/D/G/rho.sh',
+ #'A/D/G/Z', # Ooops, not compatible with --bin=svn1.6.
+ )
+ sbox.simple_add_symlink('../gamma', 'A/D/G/pi')
+ sbox.simple_add_symlink('../gamma.sh', 'A/D/G/rho.sh')
+ #sbox.simple_add_symlink('../Y', 'A/D/G/Z')
+
+ # Replace a symlink to {file, exec, dir} with a normal item of the
+ # same kind via Subversion replacement.
+ sbox.simple_rm('A/D/H/chi',
+ 'A/D/H/psi.sh',
+ #'A/D/H/Z',
+ )
+ sbox.simple_add_symlink('../gamma', 'A/D/H/chi')
+ sbox.simple_add_symlink('../gamma.sh', 'A/D/H/psi.sh')
+ #sbox.simple_add_symlink('../Y', 'A/D/H/Z')
+
+ # Replace a normal {file, exec} with a symlink to {exec, file} via
+ # Subversion replacement.
+ sbox.simple_rm('A/mu',
+ 'A/mu.sh')
+ sbox.simple_add_symlink('../iota2', 'A/mu')
+ sbox.simple_add_symlink('../iota', 'A/mu.sh')
+
+ # Ditto, without the Subversion replacement. Failing git-svn test
+ # 'executable file becomes a symlink to bar/zzz (file)'.
+ if svntest.main.is_posix_os():
+ os.remove(wc('Ax/mu'))
+ os.remove(wc('Ax/mu.sh'))
+ os.symlink('../iota2', wc('Ax/mu'))
+ os.symlink('../iota', wc('Ax/mu.sh'))
+ else:
+ # At least modify the file a bit
+
+ # ### Somehow this breaks the test when using multiline data?
+ # ### Is that intended behavior?
+
+ file_write(sbox.ospath('Ax/mu'), 'Link to iota2')
+ file_write(sbox.ospath('Ax/mu.sh'), 'Link to iota')
+
+ sbox.simple_propset('svn:special', 'X',
+ 'Ax/mu',
+ 'Ax/mu.sh')
+ sbox.simple_propdel('svn:executable', 'Ax/mu.sh')
+
+ ### TODO Replace a normal {file, exec, dir, dir} with a symlink to
+ ### {dir, dir, file, exec}. And the same symlink-to-normal.
+
+ expected_status.tweak('A/D/G/pi',
+ 'A/D/G/rho.sh',
+ 'A/D/H/psi.sh',
+ 'A/D/H/chi',
+ 'A/mu',
+ 'A/mu.sh',
+ status='RM')
+ expected_status.tweak('A/B/E/beta.sh',
+ 'A/B/E/alpha',
+ status=' M')
+ expected_status.tweak('Ax/mu',
+ 'Ax/mu.sh',
+ status='MM')
+ expected_status.add({
+ 'A/B/E/sym-alpha' : Item(status='A ', wc_rev=0),
+ 'A/B/E/sym-beta.sh' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ sbox.simple_commit() # r3
+ sbox.simple_update()
+
+ expected_status.tweak(status=' ', wc_rev=3)
+ expected_status_r3 = expected_status
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status_r3)
+
+ # Try updating from HEAD-1 to HEAD. This is currently XFAIL as the
+ # update to HEAD-1 produces a tree conflict.
+ run_svn(None, 'up', '-r2', sbox.wc_dir)
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status_r2)
+ sbox.simple_update()
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status_r3)
+
+
+@Issue(4102)
+@SkipUnless(svntest.main.is_posix_os)
+def externals_as_symlink_targets(sbox):
+ "externals as symlink targets"
+ sbox.build()
+ wc = sbox.ospath
+
+ # Control: symlink to normal dir and file.
+ os.symlink('E', wc('sym_E'))
+ os.symlink('mu', wc('sym_mu'))
+
+ # Test case: symlink to external dir and file.
+ sbox.simple_propset("svn:externals",
+ '^/A/B/E ext_E\n'
+ '^/A/mu ext_mu',
+ '')
+ sbox.simple_update()
+ os.symlink('ext_E', wc('sym_ext_E'))
+ os.symlink('ext_mu', wc('sym_ext_mu'))
+
+ # Adding symlinks to normal items and to a file external is OK.
+ sbox.simple_add('sym_E', 'sym_mu', 'sym_ext_mu')
+
+ ### Adding a symlink to an external dir failed with
+ ### svn: E200009: Could not add all targets because some targets are
+ ### already versioned
+ sbox.simple_add('sym_ext_E')
+
+ sbox.simple_commit()
+
+#----------------------------------------------------------------------
+@XFail()
+@Issue(4119)
+def cat_added_symlink(sbox):
+ "cat added symlink"
+
+ sbox.build(read_only = True)
+
+ kappa_path = sbox.ospath('kappa')
+ sbox.simple_add_symlink('iota', 'kappa')
+ svntest.actions.run_and_verify_svn("link iota", [],
+ "cat", kappa_path)
+
+#----------------------------------------------------------------------
+def incoming_symlink_changes(sbox):
+ "verify incoming symlink change behavior"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_add_symlink('iota', 's-replace')
+ sbox.simple_add_symlink('iota', 's-in-place')
+ sbox.simple_add_symlink('iota', 's-type')
+ sbox.simple_append('s-reverse', 'link iota')
+ sbox.simple_add('s-reverse')
+ sbox.simple_commit() # r2
+
+ # Replace s-replace
+ sbox.simple_rm('s-replace')
+ # Note that we don't use 'A/mu' as the length of that matches 'iota', which
+ # would make us depend on timestamp changes for detecting differences.
+ sbox.simple_add_symlink('A/D/G/pi', 's-replace')
+
+ # Change target of s-in-place
+ if svntest.main.is_posix_os():
+ os.remove(sbox.ospath('s-in-place'))
+ os.symlink('A/D/G/pi', sbox.ospath('s-in-place'))
+ else:
+ sbox.simple_append('s-in-place', 'link A/D/G/pi', truncate = True)
+
+ # r3
+ expected_output = svntest.wc.State(wc_dir, {
+ 's-replace' : Item(verb='Replacing'),
+ 's-in-place' : Item(verb='Sending'),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, None)
+
+ # r4
+ svntest.main.run_svnmucc('propdel', 'svn:special',
+ sbox.repo_url + '/s-type',
+ '-m', 'Turn s-type into a file')
+
+ # r5
+ svntest.main.run_svnmucc('propset', 'svn:special', 'X',
+ sbox.repo_url + '/s-reverse',
+ '-m', 'Turn s-reverse into a symlink')
+
+ # Currently we expect to see 'U'pdates, but we would like to see
+ # replacements
+ expected_output = svntest.wc.State(wc_dir, {
+ 's-reverse' : Item(status=' U'),
+ 's-type' : Item(status=' U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 5)
+ expected_status.add({
+ 's-type' : Item(status=' ', wc_rev='5'),
+ 's-replace' : Item(status=' ', wc_rev='5'),
+ 's-reverse' : Item(status=' ', wc_rev='5'),
+ 's-in-place' : Item(status=' ', wc_rev='5'),
+ })
+
+ # Update to HEAD/r5 to fetch the r4 and r5 symlink changes
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ check_props=True)
+
+ # Update back to r2, to prepare some local changes
+ expected_output = svntest.wc.State(wc_dir, {
+ # s-replace is D + A
+ 's-replace' : Item(status='A ', prev_status='D '),
+ 's-in-place' : Item(status='U '),
+ 's-reverse' : Item(status=' U'),
+ 's-type' : Item(status=' U'),
+ })
+ expected_status.tweak(wc_rev=2)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], True,
+ wc_dir, '-r', '2')
+
+ # Ok, now add a property on all of them to make future symlinkness changes
+ # a tree conflict
+ # ### We should also try this with a 'textual change'
+ sbox.simple_propset('x', 'y', 's-replace', 's-in-place', 's-reverse', 's-type')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 's-replace' : Item(prev_status = ' ', prev_treeconflict='C',
+ status=' ', treeconflict='A'),
+ 's-in-place' : Item(status='U '),
+ 's-reverse' : Item(status=' ', treeconflict='C'),
+ 's-type' : Item(status=' ', treeconflict='C'),
+ })
+ expected_status.tweak(wc_rev=5)
+ expected_status.tweak('s-replace', 's-reverse', 's-type', status='RM',
+ copied='+', treeconflict='C', wc_rev='-')
+ expected_status.tweak('s-in-place', status=' M')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+@Issue(4479)
+def multiline_special(sbox):
+ "multiline file with svn:special"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('iota', 'A second line.\n')
+ sbox.simple_commit();
+ tmp = sbox.get_tempname()
+ svntest.main.file_write(tmp, '*', 'w+')
+ svntest.main.run_svnmucc('propsetf', 'svn:special', tmp,
+ sbox.repo_url + '/iota',
+ '-m', 'set svn:special')
+
+ sbox.simple_update(revision=1);
+ sbox.simple_update();
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak()
+ expected_disk.tweak('iota',
+ contents="This is the file 'iota'.\nA second line.\n",
+ props={'svn:special' : '*'})
+ svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True)
+
+#----------------------------------------------------------------------
+@Issue(4482)
+@XFail(svntest.main.is_posix_os)
+def multiline_symlink_special(sbox):
+ "multiline link file with svn:special"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('dodgy-link1', 'link foo\n')
+ sbox.simple_append('dodgy-link2', 'link foo\nbar\n')
+ svntest.main.run_svnmucc('put', sbox.ospath('dodgy-link1'), 'dodgy-link1',
+ 'put', sbox.ospath('dodgy-link2'), 'dodgy-link2',
+ 'propset', 'svn:special', 'X', 'dodgy-link1',
+ 'propset', 'svn:special', 'X', 'dodgy-link2',
+ '-U', sbox.repo_url,
+ '-m', 'Create dodgy symlinks')
+ os.remove(sbox.ospath('dodgy-link1'))
+ os.remove(sbox.ospath('dodgy-link2'))
+
+ sbox.simple_update();
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'dodgy-link1' : Item(status=' ', wc_rev=2),
+ 'dodgy-link2' : Item(status=' ', wc_rev=2),
+ })
+ # XFAIL: Only content before \n used when creating the link but all
+ # content used when detecting modifications, so the pristine working
+ # copy shows up as modified.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ general_symlink,
+ replace_file_with_symlink,
+ import_export_symlink,
+ copy_tree_with_symlink,
+ replace_symlink_with_file,
+ remove_symlink,
+ merge_symlink_into_file,
+ merge_file_into_symlink,
+ checkout_repo_with_symlinks,
+ diff_symlink_to_dir,
+ checkout_repo_with_unknown_special_type,
+ replace_symlink_with_dir,
+ update_obstructing_symlink,
+ warn_on_reserved_name,
+ propvalue_normalized,
+ unrelated_changed_special_status,
+ symlink_destination_change,
+ merge_foreign_symlink,
+ symlink_to_wc_basic,
+ symlink_to_wc_svnversion,
+ update_symlink,
+ replace_symlinks,
+ externals_as_symlink_targets,
+ cat_added_symlink,
+ incoming_symlink_changes,
+ multiline_special,
+ multiline_symlink_special,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/special_tests_data/bad-special-type.dump b/subversion/tests/cmdline/special_tests_data/bad-special-type.dump
new file mode 100644
index 0000000..815b3d3
--- /dev/null
+++ b/subversion/tests/cmdline/special_tests_data/bad-special-type.dump
@@ -0,0 +1,47 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 51ec0b24-bd07-11db-97f6-8dc527e3df93
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-02-15T15:16:13.268177Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 143
+Content-length: 143
+
+K 7
+svn:log
+V 41
+Commit a special file with a unknown type
+K 10
+svn:author
+V 7
+malcolm
+K 8
+svn:date
+V 27
+2007-02-15T15:18:08.532835Z
+PROPS-END
+
+Node-path: special
+Node-kind: file
+Node-action: add
+Prop-content-length: 33
+Text-content-length: 11
+Text-content-md5: 12dfeb21c0626cc041b5de87de30c661
+Content-length: 44
+
+K 11
+svn:special
+V 1
+*
+PROPS-END
+gimble wabe
+
diff --git a/subversion/tests/cmdline/special_tests_data/symlink.dump b/subversion/tests/cmdline/special_tests_data/symlink.dump
new file mode 100644
index 0000000..28c9d43
--- /dev/null
+++ b/subversion/tests/cmdline/special_tests_data/symlink.dump
@@ -0,0 +1,58 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 82e4110f-07c2-41a8-ba25-4b48b5cb5f5b
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-01-10T21:16:13.268177Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 124
+Content-length: 124
+
+K 7
+svn:log
+V 22
+Commit a symbolic link
+K 10
+svn:author
+V 7
+malcolm
+K 8
+svn:date
+V 27
+2007-01-10T21:18:08.532835Z
+PROPS-END
+
+Node-path: from
+Node-kind: file
+Node-action: add
+Prop-content-length: 33
+Text-content-length: 7
+Text-content-md5: 2419f8600825e9410d1b9a62a34570ec
+Content-length: 40
+
+K 11
+svn:special
+V 1
+*
+PROPS-END
+link to
+
+Node-path: to
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/stat_tests.py b/subversion/tests/cmdline/stat_tests.py
new file mode 100755
index 0000000..afab961
--- /dev/null
+++ b/subversion/tests/cmdline/stat_tests.py
@@ -0,0 +1,2364 @@
+#!/usr/bin/env python
+#
+# stat_tests.py: testing the svn stat command
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import re
+import time
+import datetime
+import logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+UnorderedOutput = svntest.verify.UnorderedOutput
+
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+
+def status_unversioned_file_in_current_dir(sbox):
+ "status on unversioned file in current directory"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ svntest.main.file_append('foo', 'a new file')
+
+ svntest.actions.run_and_verify_svn([ "? foo\n" ], [],
+ 'stat', 'foo')
+
+#----------------------------------------------------------------------
+# Regression for issue #590
+@Issue(590)
+def status_update_with_nested_adds(sbox):
+ "run 'status -u' when nested additions are pending"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Create newdir and newfile
+ newdir_path = sbox.ospath('newdir')
+ newfile_path = sbox.ospath('newdir/newfile')
+ os.makedirs(newdir_path)
+ svntest.main.file_append(newfile_path, 'new text')
+
+ # Schedule newdir and newfile for addition (note that the add is recursive)
+ svntest.main.run_svn(None, 'add', newdir_path)
+
+ # Created expected output tree for commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'newdir' : Item(verb='Adding'),
+ 'newdir/newfile' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but newdir and newfile should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'newdir' : Item(status=' ', wc_rev=2),
+ 'newdir/newfile' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now we go to the backup working copy, still at revision 1.
+ # We will run 'svn st -u', and make sure that newdir/newfile is reported
+ # as a nonexistent (but pending) path.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 1)
+ expected_status.add({
+ 'newdir' : Item(status=' '),
+ 'newdir/newfile' : Item(status=' '),
+ })
+ svntest.actions.run_and_verify_unquiet_status(wc_backup,
+ expected_status)
+
+ # At one time an obstructing 'newdir' caused a SEGV on 'newdir/newfile'
+ os.makedirs(os.path.join(wc_backup, 'newdir'))
+ expected_status.tweak('newdir', status='? ')
+ svntest.actions.run_and_verify_unquiet_status(wc_backup,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# svn status -vN should include all entries in a directory
+def status_shows_all_in_current_dir(sbox):
+ "status -vN shows all items in current directory"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'stat', '-vN')
+
+ if (len(output) != len(os.listdir("."))):
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+@Issue(2127)
+def status_missing_file(sbox):
+ "status with a versioned file missing"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ os.remove('iota')
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'status')
+ for line in output:
+ if not re.match("! +iota", line):
+ raise svntest.Failure
+
+ # This invocation is for issue #2127.
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'status', '-u',
+ 'iota')
+ found_it = 0
+ for line in output:
+ if re.match("! +1 +iota", line):
+ found_it = 1
+ if not found_it:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def status_type_change(sbox):
+ "status on versioned items whose type has changed"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # First replace a versioned dir with a file and a versioned file
+ # with a versioned dir.
+ os.rename('iota', 'was_iota')
+ os.rename('A', 'iota')
+ os.rename('was_iota', 'A')
+
+ expected_output = [
+ '~ A\n',
+ '! A/mu\n',
+ '! A/B\n',
+ '! A/B/lambda\n',
+ '! A/B/E\n',
+ '! A/B/E/alpha\n',
+ '! A/B/E/beta\n',
+ '! A/B/F\n',
+ '! A/C\n',
+ '! A/D\n',
+ '! A/D/gamma\n',
+ '! A/D/G\n',
+ '! A/D/G/rho\n',
+ '! A/D/G/pi\n',
+ '! A/D/G/tau\n',
+ '! A/D/H\n',
+ '! A/D/H/chi\n',
+ '! A/D/H/omega\n',
+ '! A/D/H/psi\n',
+ '~ iota\n',
+ ]
+
+ expected_output = [s.replace('/', os.path.sep) for s in expected_output]
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+
+ # Now change the file that is obstructing the versioned dir into an
+ # unversioned dir.
+ os.remove('A')
+ os.mkdir('A')
+
+ # A is a directory again, so it is no longer missing, but it's
+ # descendants are
+ expected_output = [
+ '! A/mu\n',
+ '! A/B\n',
+ '! A/B/lambda\n',
+ '! A/B/E\n',
+ '! A/B/E/alpha\n',
+ '! A/B/E/beta\n',
+ '! A/B/F\n',
+ '! A/C\n',
+ '! A/D\n',
+ '! A/D/gamma\n',
+ '! A/D/G\n',
+ '! A/D/G/rho\n',
+ '! A/D/G/pi\n',
+ '! A/D/G/tau\n',
+ '! A/D/H\n',
+ '! A/D/H/chi\n',
+ '! A/D/H/omega\n',
+ '! A/D/H/psi\n',
+ '~ iota\n',
+ ]
+ # Fix separator for Windows
+ expected_output = [s.replace('/', os.path.sep) for s in expected_output]
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+
+ # Now change the versioned dir that is obstructing the file into an
+ # unversioned dir.
+ svntest.main.safe_rmtree('iota')
+ os.mkdir('iota')
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+
+#----------------------------------------------------------------------
+@SkipUnless(svntest.main.is_posix_os)
+def status_type_change_to_symlink(sbox):
+ "status on versioned items replaced by symlinks"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # "broken" symlinks
+ os.remove('iota')
+ os.symlink('foo', 'iota')
+ svntest.main.safe_rmtree('A/D')
+ os.symlink('bar', 'A/D')
+
+ expected_output = [
+ '~ A/D\n',
+ '! A/D/gamma\n',
+ '! A/D/G\n',
+ '! A/D/G/rho\n',
+ '! A/D/G/pi\n',
+ '! A/D/G/tau\n',
+ '! A/D/H\n',
+ '! A/D/H/chi\n',
+ '! A/D/H/omega\n',
+ '! A/D/H/psi\n',
+ '~ iota\n',
+ ]
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+
+ # "valid" symlinks
+ os.remove('iota')
+ os.remove('A/D')
+ os.symlink('A/mu', 'iota')
+ os.symlink('C', 'A/D')
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+
+#----------------------------------------------------------------------
+# Regression test for revision 3686.
+
+def status_with_new_files_pending(sbox):
+ "status -u with new files in the repository"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ svntest.main.file_append('newfile', 'this is a new file')
+ svntest.main.run_svn(None, 'add', 'newfile')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'logmsg')
+ svntest.main.run_svn(None,
+ 'up', '-r', '1')
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(None, [],
+ 'status', '-u')
+
+ # The bug fixed in revision 3686 was a segmentation fault.
+ # TODO: Check exit code.
+ # In the meantime, no output means there was a problem.
+ for line in output:
+ if line.find('newfile') != -1:
+ break
+ else:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def status_for_unignored_file(sbox):
+ "status for unignored file and directory"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # use a temp file to set properties with wildcards in their values
+ # otherwise Win32/VS2005 will expand them
+ svntest.main.file_append('proptmp', 'new*')
+ svntest.main.file_append('newfile', 'this is a new file')
+ os.makedirs('newdir')
+ svntest.main.run_svn(None, 'propset', 'svn:ignore', '-F', 'proptmp', '.')
+ os.remove('proptmp')
+
+ # status on the directory with --no-ignore
+ expected = svntest.verify.UnorderedOutput(
+ ['I newdir\n',
+ 'I newfile\n',
+ ' M .\n'])
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ 'status', '--no-ignore', '.')
+
+ # status specifying the file explicitly on the command line
+ expected = svntest.verify.UnorderedOutput(
+ ['I newdir\n',
+ 'I newfile\n'])
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ 'status', 'newdir', 'newfile')
+
+#----------------------------------------------------------------------
+
+def status_for_nonexistent_file(sbox):
+ "status on missing and unversioned file"
+
+ sbox.build(read_only = True)
+
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ exit_code, output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'status', 'nonexistent-file')
+
+ # there should *not* be a status line printed for the nonexistent file
+ for line in output:
+ if re.match(" +nonexistent-file", line):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def status_nonrecursive_update_different_cwd(sbox):
+ "status -v -N -u from different current directories"
+
+ # check combination of status -u and -N
+ # create A/C/J in repository
+ # create A/C/K in working copy
+ # check status output with -u and -N on target C
+ # check status output with -u and -N on target . (in C)
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ J_url = sbox.repo_url + '/A/C/J'
+ K_path = os.path.join(wc_dir, 'A', 'C', 'K' )
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'rev 2', J_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', K_path)
+
+ os.chdir(wc_dir)
+
+ expected_output = [
+ ' * %s\n' % os.path.join("C", "J"),
+ 'A - ? ? %s\n' % os.path.join("C", "K"),
+ ' * 1 1 jrandom C\n',
+ 'Status against revision: 2\n' ]
+
+ os.chdir('A')
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [],
+ 'status', '-v', '-N', '-u', 'C')
+
+ expected_output = [
+ ' * J\n',
+ 'A - ? ? K\n',
+ ' * 1 1 jrandom .\n',
+ 'Status against revision: 2\n']
+
+ os.chdir('C')
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [],
+ 'status', '-v', '-N', '-u', '.')
+
+
+#----------------------------------------------------------------------
+
+def status_file_needs_update(sbox):
+ "status -u indicates out-of-dateness"
+
+ # See this thread:
+ #
+ # http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=27975
+ #
+ # Basically, Andreas was seeing inconsistent results depending on
+ # whether or not he accompanied 'svn status -u' with '-v':
+ #
+ # % svn st -u
+ # Head revision: 67
+ # %
+ #
+ # ...and yet...
+ #
+ # % svn st -u -v
+ # 56 6 k cron-daily.pl
+ # * 56 44 k crontab.root
+ # 56 6 k gmls-lR.pl
+ # Head revision: 67
+ # %
+ #
+ # The first status should show the asterisk, too. There was never
+ # any issue for this bug, so this comment and the thread are your
+ # audit trail :-).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ other_wc = sbox.add_wc_path('other')
+
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ was_cwd = os.getcwd()
+
+ os.chdir(wc_dir)
+ svntest.main.file_append('crontab.root', 'New file crontab.root.\n')
+ svntest.main.run_svn(None, 'add', 'crontab.root')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+ os.chdir(was_cwd)
+ os.chdir(other_wc)
+ svntest.main.run_svn(None,
+ 'up')
+
+ os.chdir(was_cwd)
+ os.chdir(wc_dir)
+ svntest.main.file_append('crontab.root', 'New line in crontab.root.\n')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ # The `svntest.actions.run_and_verify_*_status' routines all pass
+ # the -v flag, which we don't want, as this bug never appeared when
+ # -v was passed. So we run status by hand:
+ os.chdir(was_cwd)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'status', '-u',
+ other_wc)
+
+ for line in out:
+ if re.match("\\s+\\*.*crontab\\.root$", line):
+ break
+ else:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def status_uninvited_parent_directory(sbox):
+ "status -u on outdated, added file shows only that"
+
+ # To reproduce, check out working copies wc1 and wc2, then do:
+ #
+ # $ cd wc1
+ # $ echo "new file" >> newfile
+ # $ svn add newfile
+ # $ svn ci -m 'log msg'
+ #
+ # $ cd ../wc2
+ # $ echo "new file" >> newfile
+ # $ svn add newfile
+ #
+ # $ cd ..
+ # $ svn st wc2/newfile
+ #
+ # You *should* get one line of status output, for newfile. The bug
+ # is that you get two instead, one for newfile, and one for its
+ # parent directory, wc2/.
+ #
+ # This bug was originally discovered during investigations into
+ # issue #1042, "fixed" in revision 4181, then later the fix was
+ # reverted because it caused other status problems (see the test
+ # status_file_needs_update(), which fails when 4181 is present).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ other_wc = sbox.add_wc_path('other')
+
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ was_cwd = os.getcwd()
+
+ os.chdir(wc_dir)
+ svntest.main.file_append('newfile', 'New file.\n')
+ svntest.main.run_svn(None, 'add', 'newfile')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg')
+
+ os.chdir(was_cwd)
+ os.chdir(other_wc)
+ svntest.main.file_append('newfile', 'New file.\n')
+ svntest.main.run_svn(None, 'add', 'newfile')
+
+ os.chdir(was_cwd)
+
+ # We don't want a full status tree here, just one line (or two, if
+ # the bug is present). So run status by hand:
+ os.chdir(was_cwd)
+ exit_code, out, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'status', '-u', os.path.join(other_wc, 'newfile'))
+
+ for line in out:
+ # The "/?" is just to allow for an optional trailing slash.
+ if re.match("\\s+\\*.*\.other/?$", line):
+ raise svntest.Failure
+
+@Issue(1289)
+def status_on_forward_deletion(sbox):
+ "status -u on working copy deleted in HEAD"
+ # See issue #1289.
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ top_url = sbox.repo_url
+ A_url = top_url + '/A'
+
+ svntest.main.run_svn(None,
+ 'rm', '-m', 'Remove A.', A_url)
+
+ svntest.main.safe_rmtree(wc_dir)
+ os.mkdir(wc_dir)
+
+ os.chdir(wc_dir)
+
+ svntest.main.run_svn(None,
+ 'co', '-r1', top_url + "@1", 'wc')
+ # If the bug is present, this will error with
+ #
+ # subversion/libsvn_wc/lock.c:513: (apr_err=155005)
+ # svn: Working copy not locked
+ # svn: directory '' not locked
+ #
+ svntest.actions.run_and_verify_svn(None, [], 'st', '-u', 'wc')
+
+ # Try again another way; the error would look like this:
+ #
+ # subversion/libsvn_repos/delta.c:207: (apr_err=160005)
+ # svn: Invalid filesystem path syntax
+ # svn: svn_repos_dir_delta: invalid editor anchoring; at least \
+ # one of the input paths is not a directory and there was \
+ # no source entry.
+ #
+ # (Dang! Hope a user never has to see that :-) ).
+ #
+ svntest.main.safe_rmtree('wc')
+ svntest.main.run_svn(None,
+ 'co', '-r1', A_url + "@1", 'wc')
+ svntest.actions.run_and_verify_svn(None, [], 'st', '-u', 'wc')
+
+#----------------------------------------------------------------------
+
+def get_last_changed_date(path):
+ "get the Last Changed Date for path using svn info"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'info', path)
+ for line in out:
+ if re.match("^Last Changed Date", line):
+ return line
+ logger.warn("Didn't find Last Changed Date for %s", path)
+ raise svntest.Failure
+
+# Helper for timestamp_behaviour test
+def get_text_timestamp(path):
+ "get the text-time for path using svn info"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'info', path)
+ for line in out:
+ if re.match("^Text Last Updated", line):
+ return line
+ logger.warn("Didn't find text-time for %s", path)
+ raise svntest.Failure("didn't find text-time")
+
+def no_text_timestamp(path):
+ "ensure no text-time for path using svn info"
+ exit_code, out, err = svntest.actions.run_and_verify_svn(None, [],
+ 'info', path)
+ for line in out:
+ if re.match("^Text Last Updated", line):
+ logger.warn("Found text-time for %s", path)
+ raise svntest.Failure("found text-time")
+
+# Helper for timestamp_behaviour test
+def text_time_behaviour(wc_dir, wc_path, status_path, expected_status, cmd):
+ "text-time behaviour"
+
+ # Pristine text and text-time
+ fp = open(wc_path, 'rb')
+ pre_text = fp.readlines()
+ pre_text_time = get_text_timestamp(wc_path)
+
+ # Modifying the text does not affect text-time
+ svntest.main.file_append(wc_path, "some mod")
+ expected_status.tweak(status_path, status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ text_time = get_text_timestamp(wc_path)
+ if text_time != pre_text_time:
+ raise svntest.Failure
+
+ # Manually reverting the text does not affect the text-time
+ fp = open(wc_path, 'wb')
+ fp.writelines(pre_text)
+ fp.close()
+ expected_status.tweak(status_path, status=' ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ text_time = get_text_timestamp(wc_path)
+ if text_time != pre_text_time:
+ raise svntest.Failure
+
+ # revert/cleanup change the text-time even though the text doesn't change
+ if cmd == 'cleanup':
+ svntest.actions.run_and_verify_svn(None, [], cmd, wc_dir)
+ else:
+ svntest.actions.run_and_verify_svn(None, [], cmd, wc_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ text_time = get_text_timestamp(wc_path)
+ if text_time == pre_text_time:
+ raise svntest.Failure
+
+# Is this really a status test? I'm not sure, but I don't know where
+# else to put it.
+@Issue(3773)
+def timestamp_behaviour(sbox):
+ "timestamp behaviour"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ iota_path = sbox.ospath('iota')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Sleep to ensure timestamps change
+ time.sleep(1.1)
+
+ # Check behaviour of revert on text-time
+ text_time_behaviour(wc_dir, iota_path, 'iota', expected_status, 'revert')
+
+ # Sleep to ensure timestamps change
+ time.sleep(1.1)
+
+ # Check behaviour of cleanup on text-time
+ text_time_behaviour(wc_dir, iota_path, 'iota', expected_status, 'cleanup')
+
+ # Create a config to enable use-commit-times
+ config_contents = '''\
+[auth]
+password-stores =
+
+[miscellany]
+use-commit-times = yes
+'''
+ config_dir = sbox.create_config_dir(config_contents)
+
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', sbox.repo_url,
+ other_wc,
+ '--config-dir', config_dir)
+
+ other_iota_path = os.path.join(other_wc, 'iota')
+ iota_text_timestamp = get_text_timestamp(other_iota_path)
+ iota_last_changed = get_last_changed_date(other_iota_path)
+ if (iota_text_timestamp[17] != ':' or
+ iota_text_timestamp[17:] != iota_last_changed[17:]):
+ raise svntest.Failure
+
+ # remove iota, run an update to restore it, and check the times
+ os.remove(other_iota_path)
+ expected_output = svntest.wc.State(other_wc, {
+ 'iota': Item(verb='Restored'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(other_wc, 1)
+ svntest.actions.run_and_verify_update(other_wc, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ other_wc, '--config-dir', config_dir)
+ iota_text_timestamp = get_text_timestamp(other_iota_path)
+ if (iota_text_timestamp[17] != ':' or
+ iota_text_timestamp[17:] != iota_last_changed[17:]):
+ raise svntest.Failure
+
+ iota_ts = iota_text_timestamp[19:44]
+
+ class TZ(datetime.tzinfo):
+ "A tzinfo to convert a time to iota's timezone."
+ def utcoffset(self, dt):
+ offset = (int(iota_ts[21:23]) * 60 + int(iota_ts[23:25]))
+ if iota_ts[20] == '-':
+ return datetime.timedelta(minutes=-offset)
+ return datetime.timedelta(minutes=offset)
+ def dst(self, dt):
+ return datetime.timedelta(0)
+
+ # get the timestamp on the file. whack any microseconds value, as svn
+ # doesn't record to that precision. we also use the TZ class to shift
+ # the timestamp into the same timezone as the expected timestamp.
+ mtime = datetime.datetime.fromtimestamp(os.path.getmtime(other_iota_path),
+ TZ()).replace(microsecond=0)
+ fmt = mtime.isoformat(' ')
+
+ # iota_ts looks like: 2009-04-13 14:30:57 +0200
+ # fmt looks like: 2009-04-13 14:30:57+02:00
+ if (fmt[:19] != iota_ts[:19]
+ or fmt[19:22] != iota_ts[20:23]
+ or fmt[23:25] != iota_ts[23:25]):
+ # NOTE: the two strings below won't *exactly* match (see just above),
+ # but the *numeric* portions of them should.
+ logger.warn("File timestamp on 'iota' does not match.")
+ logger.warn(" EXPECTED: %s", iota_ts)
+ logger.warn(" ACTUAL: %s", fmt)
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+@Issues(1617,2030)
+def status_on_unversioned_dotdot(sbox):
+ "status on '..' where '..' is not versioned"
+ # See issue #1617 (and #2030).
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ new_dir = sbox.ospath('new')
+ new_sub = sbox.ospath('new/sub')
+ new_subsub = sbox.ospath('new/sub/sub')
+ os.mkdir(new_dir)
+ os.mkdir(new_sub)
+ os.mkdir(new_subsub)
+
+ os.chdir(new_subsub)
+ svntest.actions.run_and_verify_svn2(None,
+ "svn: warning: W155(010|007):.*'.*'.*not",
+ 0, 'st', '..')
+
+#----------------------------------------------------------------------
+
+@Issue(2122)
+def status_on_partially_nonrecursive_wc(sbox):
+ "status -u in partially non-recursive wc"
+ # Based on issue #2122.
+ #
+ # $ svn co -N -r 213 svn://svn.debian.org/pkg-kde .
+ # A README
+ # Checked out revision 213.
+ #
+ # $ svn up -r 213 scripts www
+ # [ List of scripts/* files.]
+ # Updated to revision 213.
+ # [ List of www/* files.]
+ # Updated to revision 213.
+ #
+ # $ svn st -u
+ # * 213 www/IGNORE-ME
+ # * 213 www
+ # svn: subversion/libsvn_wc/status.c:910: tweak_statushash: \
+ # Assertion `repos_text_status == svn_wc_status_added' failed. \
+ # Aborted (core dumped)
+ #
+ # You might think that the intermediate "svn up -r 213 scripts www"
+ # step is unnecessary, but when I tried eliminating it, I got
+ #
+ # $ svn st -u
+ # subversion/libsvn_wc/lock.c:642: (apr_err=155005)
+ # svn: Working copy 'www' not locked
+ # $
+ #
+ # instead of the assertion error.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ top_url = sbox.repo_url
+ A_url = top_url + '/A'
+ D_url = top_url + '/A/D'
+ G_url = top_url + '/A/D/G'
+ H_url = top_url + '/A/D/H'
+ rho = sbox.ospath('A/D/G/rho')
+
+ # Commit a change to A/D/G/rho. This will be our equivalent of
+ # whatever change it was that happened between r213 and HEAD in the
+ # reproduction recipe. For us, it's r2.
+ svntest.main.file_append(rho, 'Whan that Aprille with his shoores soote\n')
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg', rho)
+
+ # Make the working copy weird in the right way, then try status -u.
+ D_wc = sbox.add_wc_path('D')
+ svntest.main.run_svn(None,
+ 'co', '-r1', '-N', D_url, D_wc)
+
+ os.chdir(D_wc)
+ svntest.main.run_svn(None,
+ 'up', '-r1', 'H')
+ svntest.main.run_svn(None,
+ 'st', '-u')
+
+
+def missing_dir_in_anchor(sbox):
+ "a missing dir in the anchor"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ foo_path = sbox.ospath('foo')
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'foo' : Item(status='A ', wc_rev=0),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # At one point this caused a "foo not locked" error
+ svntest.main.safe_rmtree(foo_path)
+ expected_status.tweak('foo', status='! ', entry_status='A ',
+ wc_rev='-', entry_rev='0')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def status_in_xml(sbox):
+ "status output in XML format"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ file_name = "iota"
+ file_path = os.path.join(wc_dir, file_name)
+ svntest.main.file_append(file_path, "test status --xml\n")
+
+ # Retrieve last changed date from svn log
+ exit_code, output, error = svntest.actions.run_and_verify_svn(
+ None, [], 'log', file_path, '--xml', '-rHEAD')
+
+ info_msg = "<date>"
+ for line in output:
+ if line.find(info_msg) >= 0:
+ time_str = line[:len(line)]
+ break
+ else:
+ raise svntest.Failure
+
+ expected_entries = {file_path : {'wcprops' : 'none',
+ 'wcitem' : 'modified',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author}}
+
+ svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-m', 'repo-to-repo copy',
+ sbox.repo_url + '/iota',
+ sbox.repo_url + '/iota2')
+
+ file_path = sbox.ospath('iota2')
+
+ expected_entries = {file_path : {'wcprops' : 'none',
+ 'wcitem' : 'none',
+ 'rprops' : 'none',
+ 'ritem' : 'added'}}
+
+ svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', '-m', 'repo delete',
+ sbox.repo_url + '/A/B/E/alpha')
+
+ expected_entries = {sbox.ospath('A/B/E/alpha')
+ : {'wcprops' : 'none',
+ 'wcitem' : 'normal',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author,
+ 'rprops' : 'none',
+ 'ritem' : 'deleted'}}
+
+ svntest.actions.run_and_verify_status_xml(expected_entries,
+ sbox.ospath('A/B/E/alpha'), '-u')
+
+#----------------------------------------------------------------------
+
+def status_ignored_dir(sbox):
+ "status on ignored directory"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ new_dir = sbox.ospath('dir.o')
+ new_dir_url = sbox.repo_url + "/dir.o"
+
+ svntest.actions.run_and_verify_svn(["Committing transaction...\n",
+ "Committed revision 2.\n"], [],
+ 'mkdir', new_dir_url, '-m', 'msg')
+
+ # Make a dir that is ignored by the default ignore patterns.
+ os.mkdir(new_dir)
+
+ # run_and_verify_status doesn't handle this weird kind of entry.
+ svntest.actions.run_and_verify_svn(['I * ' + new_dir + "\n",
+ ' * 1 ' + wc_dir + "\n",
+ 'Status against revision: 2\n'], [],
+ "status", "-u", wc_dir)
+
+#----------------------------------------------------------------------
+
+def status_unversioned_dir_in_wc(sbox):
+ "status on unversioned dir in working copy"
+ sbox.build(read_only = True)
+
+ # Create two unversioned directories within the test working copy
+ path = sbox.ospath('1/2')
+ os.makedirs(path)
+
+ expected_err = "svn: warning: (W155007|W155010): .*'.*(/|\\\\)" + \
+ os.path.basename(path) + \
+ "' was not found"
+ svntest.actions.run_and_verify_svn2([], expected_err, 0,
+ "status", path)
+
+#----------------------------------------------------------------------
+
+def status_missing_dir(sbox):
+ "status with a versioned directory missing"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ a_d_g = sbox.ospath('A/D/G')
+
+ # ok, blow away the A/D/G directory
+ svntest.main.safe_rmtree(a_d_g)
+
+ expected = [
+ '! A/D/G\n',
+ '! A/D/G/rho\n',
+ '! A/D/G/pi\n',
+ '! A/D/G/tau\n',
+ ]
+ expected = [ s.replace('A/D/G', a_d_g).replace('/', os.path.sep)
+ for s in expected ]
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [],
+ "status", wc_dir)
+
+ expected = [
+ "! 1 " + a_d_g + "\n",
+ "! 1 " + os.path.join(a_d_g, "rho") + "\n",
+ "! 1 " + os.path.join(a_d_g, "pi") + "\n",
+ "! 1 " + os.path.join(a_d_g, "tau") + "\n",
+ "Status against revision: 1\n"
+ ]
+
+ # now run status -u, we should be able to do this without crashing
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [],
+ "status", "-u", wc_dir)
+
+ # Finally run an explicit status request directly on the missing directory.
+ expected = [
+ "! A/D/G\n",
+ "! A/D/G/rho\n",
+ "! A/D/G/pi\n",
+ "! A/D/G/tau\n",
+ ]
+ expected = [ s.replace('A/D/G', a_d_g).replace('/', os.path.sep)
+ for s in expected ]
+
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [],
+ "status", a_d_g)
+
+def status_add_plus_conflict(sbox):
+ "status on conflicted added file"
+ sbox.build()
+ svntest.actions.do_sleep_for_timestamps()
+
+ wc_dir = sbox.wc_dir
+
+ branch_url = sbox.repo_url + '/branch'
+ trunk_url = sbox.repo_url + '/trunk'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'rev 2',
+ branch_url, trunk_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', wc_dir)
+
+ branch_file = sbox.ospath('branch/file')
+
+ svntest.main.file_write(branch_file, "line 1\nline2\nline3\n", 'wb+')
+
+ svntest.actions.run_and_verify_svn(None, [], 'add', branch_file)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit',
+ branch_file, '-m', 'rev 3')
+
+ svntest.main.file_write(branch_file, "line 1\nline3\n", 'wb')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit',
+ branch_file, '-m', 'rev 4')
+
+ svntest.main.file_write(branch_file, "line 1\nline2\n", 'wb')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'commit',
+ branch_file, '-m', 'rev 5')
+
+ trunk_dir = sbox.ospath('trunk')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge',
+ branch_url, '-r', '2:3', trunk_dir)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge',
+ branch_url, '-r', '4:5', trunk_dir)
+
+ lines = [
+ "? " + sbox.ospath('trunk/file.merge-left.r4') + "\n",
+ "? " + sbox.ospath('trunk/file.merge-right.r5') + "\n",
+ "? " + sbox.ospath('trunk/file.working') + "\n",
+ "C + " + sbox.ospath('trunk/file') + "\n",
+ ] + svntest.main.summary_of_conflicts(text_conflicts=1)
+ if svntest.main.server_has_mergeinfo():
+ lines.append(" M " + sbox.ospath('trunk') + "\n")
+
+ expected_output = svntest.verify.UnorderedOutput(lines)
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'status', wc_dir)
+
+#----------------------------------------------------------------------
+
+def inconsistent_eol(sbox):
+ "status with inconsistent eol style"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+
+ svntest.main.file_write(iota_path, "line 1\nline 2\n", "wb")
+
+ svntest.actions.run_and_verify_svn("property 'svn:eol-style' set on.*iota",
+ [],
+ 'propset', 'svn:eol-style', 'native',
+ sbox.ospath('iota'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make the eol style inconsistent and verify that status says nothing.
+ svntest.main.file_write(iota_path, "line 1\nline 2\r\n", "wb")
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Test for issue #2533
+@Issue(2533)
+def status_update_with_incoming_props(sbox):
+ "run 'status -u' variations w/ incoming propchanges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+
+ # Add a property to the root folder and a subdir
+ svntest.main.run_svn(None, 'propset', 'red', 'rojo', wc_dir)
+ svntest.main.run_svn(None, 'propset', 'black', 'bobo', A_path)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(verb='Sending'),
+ 'A' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('', wc_rev=2, status=' ')
+ expected_status.tweak('A', wc_rev=2, status=' ')
+
+ # Commit the working copy
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected trees for an update to revision 1.
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ 'A' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '1', wc_dir)
+
+ # Can't use run_and_verify_status here because the out-of-date
+ # information in the status output isn't copied in the status tree.
+ expected = svntest.verify.UnorderedOutput(
+ [" * 1 " + A_path + "\n",
+ " * 1 " + wc_dir + "\n",
+ "Status against revision: 2\n" ])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u",
+ wc_dir)
+
+ expected = svntest.verify.UnorderedOutput(
+ [" 1 1 jrandom " +
+ sbox.ospath('iota') + "\n",
+ " * 1 1 jrandom " + A_path + "\n",
+ " * 1 1 jrandom " + wc_dir + "\n",
+ "Status against revision: 2\n" ])
+
+ svntest.actions.run_and_verify_svn(expected, [],
+ "status", "-uvN",
+ wc_dir)
+
+ # Retrieve last changed date from svn log
+ exit_code, output, error = svntest.actions.run_and_verify_svn(None, [],
+ 'log', wc_dir,
+ '--xml', '-r1')
+
+ info_msg = "<date>"
+ for line in output:
+ if line.find(info_msg) >= 0:
+ time_str = line[:len(line)]
+ break
+ else:
+ raise svntest.Failure
+
+ expected_entries ={wc_dir : {'wcprops' : 'none',
+ 'wcitem' : 'normal',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author,
+ 'rprops' : 'modified',
+ 'ritem' : 'none'},
+ A_path : {'wcprops' : 'none',
+ 'wcitem' : 'normal',
+ 'wcrev' : '1',
+ 'crev' : '1',
+ 'author' : svntest.main.wc_author,
+ 'rprops' : 'modified',
+ 'ritem' : 'none'},
+ }
+
+ svntest.actions.run_and_verify_status_xml(expected_entries, wc_dir, '-uN')
+
+# more incoming prop updates.
+def status_update_verbose_with_incoming_props(sbox):
+ "run 'status -uv' w/ incoming propchanges"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+ B_path = os.path.join(A_path, 'B')
+ E_path = os.path.join(A_path, 'B', 'E')
+ G_path = os.path.join(A_path, 'D', 'G')
+ H_path = os.path.join(A_path, 'D', 'H')
+ # Add a property to the root folder and a subdir
+ svntest.main.run_svn(None, 'propset', 'red', 'rojo', D_path)
+ svntest.main.run_svn(None, 'propset', 'black', 'bobo', E_path)
+ svntest.main.run_svn(None, 'propset', 'black', 'bobo', wc_dir)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Sending'),
+ 'A/B/E' : Item(verb='Sending'),
+ '' : Item(verb='Sending'),
+ })
+ # Created expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D', wc_rev=2, status=' ')
+ expected_status.tweak('A/B/E', wc_rev=2, status=' ')
+ expected_status.tweak('', wc_rev=2, status=' ')
+
+ # Commit the working copy
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected trees for an update to revision 1.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(status=' U'),
+ 'A/B/E' : Item(status=' U'),
+ '' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways... INCLUDING PROPS
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '1', wc_dir)
+
+ # Can't use run_and_verify_status here because the out-of-date
+ # information in the status output isn't copied in the status tree.
+ common = " 1 1 jrandom "
+ expected = svntest.verify.UnorderedOutput(
+ [" " + common + os.path.join(E_path, 'alpha') + "\n",
+ " " + common + os.path.join(E_path, 'beta') + "\n",
+ " *" + common + os.path.join(E_path) + "\n",
+ " " + common + os.path.join(B_path, 'lambda') + "\n",
+ " " + common + os.path.join(B_path, 'F') + "\n",
+ " " + common + B_path + "\n",
+ " " + common + os.path.join(G_path, 'pi') + "\n",
+ " " + common + os.path.join(G_path, 'rho') + "\n",
+ " " + common + os.path.join(G_path, 'tau') + "\n",
+ " " + common + G_path + "\n",
+ " " + common + os.path.join(H_path, 'chi') + "\n",
+ " " + common + os.path.join(H_path, 'omega') + "\n",
+ " " + common + os.path.join(H_path, 'psi') + "\n",
+ " " + common + H_path + "\n",
+ " " + common + os.path.join(D_path, 'gamma') + "\n",
+ " *" + common + D_path + "\n",
+ " " + common + os.path.join(A_path, 'mu') + "\n",
+ " " + common + os.path.join(A_path, 'C') + "\n",
+ " " + common + A_path + "\n",
+ " " + common + sbox.ospath('iota') + "\n",
+ " *" + common + wc_dir + "\n",
+ "Status against revision: 2\n" ])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-uv", wc_dir)
+
+#----------------------------------------------------------------------
+# Test for issue #2468
+@Issue(2468)
+def status_nonrecursive_update(sbox):
+ "run 'status -uN' with incoming changes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+ mu_path = os.path.join(A_path, 'mu')
+ gamma_path = os.path.join(D_path, 'gamma')
+
+ # Change files in A and D and commit
+ svntest.main.file_append(mu_path, "new line of text")
+ svntest.main.file_append(gamma_path, "new line of text")
+
+ # Create expected trees for commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2, status=' ')
+ expected_status.tweak('A/D/gamma', wc_rev=2, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected trees for an update to revision 1.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ 'A/D/gamma' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+ # Check the remote status of folder A (non-recursively)
+ xout = [" * 1 " + sbox.ospath('A/mu') + "\n",
+ "Status against revision: 2\n" ]
+
+ svntest.actions.run_and_verify_svn(xout,
+ [],
+ "status", "-uN", A_path)
+
+def change_files(wc_dir, files):
+ """Make a basic change to the files.
+ files = a list of paths relative to the wc root directory
+ """
+
+ for file in files:
+ filepath = os.path.join(wc_dir, file)
+ svntest.main.file_append(filepath, "new line of text")
+
+def change_files_and_commit(wc_dir, files, baserev=1):
+ """Make a basic change to the files and commit them.
+ files = a list of paths relative to the wc root directory
+ """
+
+ change_files(wc_dir, files)
+
+ # Prepare expected trees for commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/gamma' : Item(verb='Sending')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ commitrev = baserev + 1
+ for file in files:
+ expected_output.add({file : Item(verb='Sending')})
+ expected_status.tweak(file, wc_rev=commitrev, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+def status_depth_local(sbox):
+ "run 'status --depth=X' with local changes"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+
+ mu_path = os.path.join(A_path, 'mu')
+ gamma_path = os.path.join(D_path, 'gamma')
+
+ # make some changes to the greek tree
+ change_files(wc_dir, ['A/mu', 'A/D/gamma'])
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', A_path)
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', D_path)
+
+ # for all the possible types of depth, check the status
+
+ # depth=empty
+ expected = svntest.verify.UnorderedOutput(
+ [" M %s\n" % A_path])
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "--depth=empty", A_path)
+
+ # depth=files
+ expected = svntest.verify.UnorderedOutput(
+ [" M %s\n" % A_path,
+ "M %s\n" % mu_path])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "--depth=files", A_path)
+
+ # depth=immediates
+ expected = svntest.verify.UnorderedOutput(
+ [" M %s\n" % A_path,
+ " M %s\n" % D_path,
+ "M %s\n" % mu_path])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "--depth=immediates", A_path)
+
+ # depth=infinity (the default)
+ expected = svntest.verify.UnorderedOutput(
+ [" M %s\n" % A_path,
+ " M %s\n" % D_path,
+ "M %s\n" % mu_path,
+ "M %s\n" % gamma_path])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "--depth=infinity", A_path)
+
+def status_depth_update(sbox):
+ "run 'status --depth=X -u' with incoming changes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+
+ mu_path = os.path.join(A_path, 'mu')
+ gamma_path = os.path.join(D_path, 'gamma')
+
+ # add some files, change directory properties
+ change_files_and_commit(wc_dir, ['A/mu', 'A/D/gamma'])
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', A_path)
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', D_path)
+ sbox.simple_commit(message='log message')
+
+ # update to r1
+ svntest.main.run_svn(None, 'up', '-r', '1', wc_dir)
+
+ # for all the possible types of depth, check the status
+
+ # depth=empty
+ expected = svntest.verify.UnorderedOutput(
+ [" * 1 %s\n" % A_path,
+ "Status against revision: 3\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=empty", A_path)
+
+ # depth=files
+ expected = svntest.verify.UnorderedOutput(
+ [" * 1 %s\n" % mu_path,
+ " * 1 %s\n" % A_path,
+ "Status against revision: 3\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=files",
+ A_path)
+
+ # depth=immediates
+ expected = svntest.verify.UnorderedOutput(
+ [" * 1 %s\n" % A_path,
+ " * 1 %s\n" % D_path,
+ " * 1 %s\n" % mu_path,
+ "Status against revision: 3\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=immediates",
+ A_path)
+
+ # depth=infinity (the default)
+ expected = svntest.verify.UnorderedOutput(
+ [" * 1 %s\n" % A_path,
+ " * 1 %s\n" % D_path,
+ " * 1 %s\n" % mu_path,
+ " * 1 %s\n" % gamma_path,
+ "Status against revision: 3\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=infinity",
+ A_path)
+
+
+#----------------------------------------------------------------------
+def status_depth_update_local_modifications(sbox):
+ "run 'status --depth=X -u' with local changes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ D_path = os.path.join(A_path, 'D')
+
+ mu_path = os.path.join(A_path, 'mu')
+ gamma_path = os.path.join(D_path, 'gamma')
+
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', A_path)
+ svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', D_path)
+
+ svntest.main.file_append(mu_path, 'modified')
+ svntest.main.file_append(gamma_path, 'modified')
+
+ # depth=empty
+ expected = svntest.verify.UnorderedOutput(
+ [" M 1 %s\n" % A_path,
+ "Status against revision: 1\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=empty", A_path)
+
+ expected = svntest.verify.UnorderedOutput(
+ ["M 1 %s\n" % mu_path,
+ "Status against revision: 1\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=empty", mu_path)
+
+ # depth=files
+ expected = svntest.verify.UnorderedOutput(
+ ["M 1 %s\n" % mu_path,
+ " M 1 %s\n" % A_path,
+ "Status against revision: 1\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=files",
+ A_path)
+
+ # depth=immediates
+ expected = svntest.verify.UnorderedOutput(
+ [" M 1 %s\n" % A_path,
+ " M 1 %s\n" % D_path,
+ "M 1 %s\n" % mu_path,
+ "Status against revision: 1\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=immediates",
+ A_path)
+
+ # depth=infinity (the default)
+ expected = svntest.verify.UnorderedOutput(
+ [" M 1 %s\n" % A_path,
+ " M 1 %s\n" % D_path,
+ "M 1 %s\n" % mu_path,
+ "M 1 %s\n" % gamma_path,
+ "Status against revision: 1\n"])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "--depth=infinity",
+ A_path)
+
+#----------------------------------------------------------------------
+# Test for issue #2420
+@Issue(2420)
+def status_dash_u_deleted_directories(sbox):
+ "run 'status -u' with locally deleted directories"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ A_path = sbox.ospath('A')
+ B_path = os.path.join(A_path, 'B')
+
+ # delete the B directory
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', B_path)
+
+ # now run status -u on B and its children
+ was_cwd = os.getcwd()
+
+ os.chdir(A_path)
+
+ # check status -u of B
+ expected = svntest.verify.UnorderedOutput(
+ ["D 1 1 jrandom %s\n" % \
+ "B",
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "lambda"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E", "alpha"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E", "beta"),
+ "D 1 1 jrandom %s\n" %
+ os.path.join("B", "F"),
+ "Status against revision: 1\n" ])
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "-v", "B")
+
+ expected = \
+ ["D 1 %s\n" % "B",
+ "Status against revision: 1\n" ]
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "B")
+
+
+ # again, but now from inside B, should give the same output
+ if not os.path.exists('B'):
+ os.mkdir('B')
+ os.chdir("B")
+ expected = svntest.verify.UnorderedOutput(
+ ["D 1 1 jrandom %s\n" % \
+ ".",
+ "D 1 1 jrandom %s\n" % \
+ "lambda",
+ "D 1 1 jrandom %s\n" % \
+ "E",
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("E", "alpha"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("E", "beta"),
+ "D 1 1 jrandom %s\n" % \
+ "F",
+ "Status against revision: 1\n" ])
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "-v", ".")
+
+ expected = \
+ ["D 1 %s\n" % ".",
+ "Status against revision: 1\n" ]
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", ".")
+
+ # check status -u of B/E
+ expected = svntest.verify.UnorderedOutput(
+ ["D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E", "alpha"),
+ "D 1 1 jrandom %s\n" % \
+ os.path.join("B", "E", "beta"),
+ "Status against revision: 1\n" ])
+
+ os.chdir(was_cwd)
+ os.chdir(A_path)
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u", "-v",
+ os.path.join("B", "E"))
+
+
+ expected = [ "Status against revision: 1\n" ]
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u",
+ os.path.join("B", "E"))
+
+#----------------------------------------------------------------------
+
+# Test for issue #2737: show obstructed status for versioned directories
+# replaced by local directories.
+@Issue(2737)
+def status_dash_u_type_change(sbox):
+ "status -u on versioned items whose type changed"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.chdir(wc_dir)
+
+ # Change the versioned file iota into an unversioned dir.
+ os.remove('iota')
+ os.mkdir('iota')
+
+ xout = ["~ 1 iota\n",
+ "Status against revision: 1\n" ]
+
+ svntest.actions.run_and_verify_svn(xout,
+ [],
+ "status", "-u")
+
+ # Change the versioned directory A into an unversioned dir.
+ svntest.main.safe_rmtree('A')
+ os.mkdir('A')
+
+ output = [
+ "! 1 A/mu\n",
+ "! 1 A/B\n",
+ "! 1 A/B/lambda\n",
+ "! 1 A/B/E\n",
+ "! 1 A/B/E/alpha\n",
+ "! 1 A/B/E/beta\n",
+ "! 1 A/B/F\n",
+ "! 1 A/C\n",
+ "! 1 A/D\n",
+ "! 1 A/D/gamma\n",
+ "! 1 A/D/G\n",
+ "! 1 A/D/G/rho\n",
+ "! 1 A/D/G/pi\n",
+ "! 1 A/D/G/tau\n",
+ "! 1 A/D/H\n",
+ "! 1 A/D/H/chi\n",
+ "! 1 A/D/H/omega\n",
+ "! 1 A/D/H/psi\n",
+ "~ 1 iota\n",
+ "Status against revision: 1\n"
+ ]
+
+ expected = svntest.verify.UnorderedOutput(
+ [s.replace('/', os.path.sep)
+ for s in output])
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-u")
+
+#----------------------------------------------------------------------
+
+def status_with_tree_conflicts(sbox):
+ "status with tree conflicts"
+
+ # Status messages reflecting tree conflict status.
+ # These tests correspond to use cases 1-3 in
+ # notes/tree-conflicts/use-cases.txt.
+
+ svntest.actions.build_greek_tree_conflicts(sbox)
+ wc_dir = sbox.wc_dir
+ G = sbox.ospath('A/D/G')
+ pi = os.path.join(G, 'pi')
+ rho = os.path.join(G, 'rho')
+ tau = os.path.join(G, 'tau')
+
+ # check status of G
+ expected = svntest.verify.UnorderedOutput(
+ ["A + C %s\n" % rho,
+ " > local file edit, incoming file delete or move upon update\n",
+ "D C %s\n" % pi,
+ " > local file delete, incoming file edit upon update\n",
+ "! C %s\n" % tau,
+ " > local file delete, incoming file delete or move upon update\n",
+ ] + svntest.main.summary_of_conflicts(tree_conflicts=3))
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", G)
+
+ # check status of G, with -v
+ expected = svntest.verify.UnorderedOutput(
+ [" 2 2 jrandom %s\n" % G,
+ "D C 2 2 jrandom %s\n" % pi,
+ " > local file delete, incoming file edit upon update\n",
+ "A + C - 1 jrandom %s\n" % rho,
+ " > local file edit, incoming file delete or move upon update\n",
+ "! C %s\n" % tau,
+ " > local file delete, incoming file delete or move upon update\n",
+ ] + svntest.main.summary_of_conflicts(tree_conflicts=3))
+
+ svntest.actions.run_and_verify_svn(expected,
+ [],
+ "status", "-v", G)
+
+ # check status of G, with -xml
+ exit_code, output, error = svntest.main.run_svn(None, 'status', G, '--xml',
+ '-v')
+
+ should_be_victim = {
+ G: False,
+ pi: True,
+ rho: True,
+ tau: True,
+ }
+
+ real_entry_count = 0
+ output_str = r"".join(output)
+ # skip the first string, which contains only 'status' and 'target' elements
+ entries = output_str.split("<entry")[1:]
+
+ for entry in entries:
+ # get the entry's path
+ m = re.search('path="([^"]+)"', entry)
+ if m:
+ real_entry_count += 1
+ path = m.group(1)
+ # check if the path should be a victim
+ m = re.search('tree-conflicted="true"', entry)
+ if (m is None) and should_be_victim[path]:
+ logger.warn("ERROR: expected '%s' to be a tree conflict victim.", path)
+ logger.warn("ACTUAL STATUS OUTPUT:")
+ logger.warn(output_str)
+ raise svntest.Failure
+ if m and not should_be_victim[path]:
+ logger.warn("ERROR: did NOT expect '%s' to be a tree conflict victim.",
+ path)
+ logger.warn("ACTUAL STATUS OUTPUT:")
+ logger.warn(output_str)
+ raise svntest.Failure
+
+ if real_entry_count != len(should_be_victim):
+ logger.warn("ERROR: 'status --xml' output is incomplete.")
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# Regression for issue #3742
+@Issue(3742)
+def status_nested_wc_old_format(sbox):
+ "status on wc with nested old-format wc"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ os.mkdir(sbox.ospath('subdir'))
+ os.mkdir(sbox.ospath('subdir/.svn'))
+ svntest.main.file_append(sbox.ospath('subdir/.svn/format'),
+ '10\n') # format 10 was the Subversion 1.6 format
+ os.chdir(wc_dir)
+ svntest.actions.run_and_verify_svn([ "? subdir\n" ], [], 'st')
+
+#----------------------------------------------------------------------
+# Regression test for issue #3855 "status doesn't show 'K' on a locked
+# deleted node".
+@Issue(3855)
+def status_locked_deleted(sbox):
+ "status with locked deleted file"
+
+ sbox.build()
+ iota_path = sbox.ospath('iota')
+
+ sbox.simple_rm('iota')
+ svntest.actions.run_and_verify_svn(None, [], 'lock',
+ os.path.join(sbox.wc_dir, 'iota'))
+ svntest.actions.run_and_verify_svn(['D K %s\n' % iota_path], [],
+ 'status', iota_path)
+
+@Issue(3774)
+def wc_wc_copy_timestamp(sbox):
+ "timestamp on wc-wc copies"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ time.sleep(1.1)
+ svntest.main.file_append(sbox.ospath('A/D/H/psi'), 'modified\n')
+ svntest.actions.run_and_verify_svn(None, [], 'copy',
+ sbox.ospath('A/D/H'),
+ sbox.ospath('A/D/H2'))
+
+ expected_output = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_output.tweak('A/D/H/psi', status='M ')
+ expected_output.add({
+ 'A/D/H2' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/H2/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H2/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H2/psi' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+ # Since copied chi is unmodified the text_timestamp should "match"
+ # the working file but it's not easy to confirm that directly. We
+ # can confirm that the copied is different from the source.
+ chi_src_timestamp = get_text_timestamp(sbox.ospath('A/D/H/chi'))
+ chi_dst_timestamp1 = get_text_timestamp(sbox.ospath('A/D/H2/chi'))
+ if chi_src_timestamp == chi_dst_timestamp1:
+ raise svntest.Failure("chi timestamps should be different")
+
+ # Since copied psi is modified the text_timestamp should not "match"
+ # the working file, again difficult to confirm directly. It happens
+ # that the current implementation leaves it equal to the source.
+ psi_src_timestamp = get_text_timestamp(sbox.ospath('A/D/H/psi'))
+ psi_dst_timestamp = get_text_timestamp(sbox.ospath('A/D/H2/psi'))
+ if psi_src_timestamp != psi_dst_timestamp:
+ raise svntest.Failure("psi timestamps should be the same")
+
+ # Cleanup repairs timestamps, so this should be a no-op.
+ svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir)
+ chi_dst_timestamp2 = get_text_timestamp(sbox.ospath('A/D/H2/chi'))
+ if chi_dst_timestamp2 != chi_dst_timestamp1:
+ raise svntest.Failure("chi timestamps should be the same")
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_output)
+
+@Issue(3908)
+def wclock_status(sbox):
+ "verbose/non-verbose on locked working copy"
+
+ sbox.build(read_only=True)
+ wc_dir = sbox.wc_dir
+
+ # Recursive lock
+ svntest.actions.lock_admin_dir(sbox.ospath('A/D'), True)
+
+ # Verbose status
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D', 'A/D/G', 'A/D/H', locked='L')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Non-verbose status
+ expected_output = svntest.verify.UnorderedOutput([
+ ' L %s\n' % sbox.ospath(path) for path in ['A/D',
+ 'A/D/G',
+ 'A/D/H']
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'status', wc_dir)
+
+ # Second non-recursive lock
+ svntest.actions.lock_admin_dir(sbox.ospath('A/B'))
+
+ expected_status.tweak('A/B', locked='L')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_output = svntest.verify.UnorderedOutput([
+ ' L %s\n' % sbox.ospath(path) for path in ['A/B',
+ 'A/D',
+ 'A/D/G',
+ 'A/D/H']
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'status', wc_dir)
+
+
+@Issue(4072)
+@XFail()
+def modified_modulo_translation(sbox):
+ "modified before translation, unmodified after"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # iota is a shell script.
+ sbox.simple_propset('svn:eol-style', 'LF', 'iota')
+ sbox.simple_commit()
+
+ # CRLF it.
+ open(sbox.ospath('iota'), 'wb').write("This is the file 'iota'.\r\n")
+
+ # Run status. Expect some output.
+ # TODO: decide how such files should show in the output; whether they
+ # always show, or only with some --flag; and adjust this accordingly.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'status', wc_dir)
+
+ # Expect the file to be renormalized (to LF) after a revert.
+ sbox.simple_revert('iota')
+ svntest.actions.run_and_verify_svn([], [], 'status', wc_dir)
+
+def status_not_present(sbox):
+ "no status on not-present and excluded nodes"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # iota is a shell script.
+ sbox.simple_rm('iota', 'A/C')
+ svntest.main.run_svn(None, 'up', '--set-depth', 'exclude',
+ sbox.ospath('A/mu'), sbox.ospath('A/B'))
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svn([], [],'status',
+ sbox.ospath('iota'),
+ sbox.ospath('A/B'),
+ sbox.ospath('A/C'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('no-file'))
+
+# Skip this test is a .svn dir exists in the root directory
+@Skip(lambda: os.path.exists("/%s" % svntest.main.get_admin_name()))
+def status_unversioned_dir(sbox):
+ "status on unversioned dir"
+ sbox.build(read_only = True, create_wc = False)
+
+ # Run svn status on "/", which we assume exists and isn't a WC.
+ # This should work on UNIX-like systems and Windows systems
+ expected_err = "svn: warning: W1550(07|10): .*'.*(/|\\\\)" + \
+ "' is not a working copy"
+ svntest.actions.run_and_verify_svn2([], expected_err, 0,
+ "status", "/")
+
+def status_case_changed(sbox):
+ "status reporting on case changed nodes directly"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ os.rename(sbox.ospath('iota'), sbox.ospath('iOTA'))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'iOTA' : Item(status='? '),
+ })
+ expected_status.tweak('iota', status='! ')
+
+ # First run status on the directory
+ svntest.actions.run_and_verify_unquiet_status(wc_dir,
+ expected_status)
+
+ # Now on the missing iota directly, which should give the same
+ # result, even on case insenstive filesystems
+ expected_status = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='! ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_unquiet_status(sbox.ospath('iota'),
+ expected_status)
+
+ # And on the unversioned iOTA
+ expected_status = svntest.wc.State(wc_dir, {
+ 'iOTA' : Item(status='? '),
+ })
+ svntest.actions.run_and_verify_unquiet_status(sbox.ospath('iOTA'),
+ expected_status)
+
+
+def move_update_timestamps(sbox):
+ "timestamp behaviour for move-update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/B/E/beta', 'X\nY\nZ\n', truncate=True)
+ sbox.simple_commit()
+ sbox.simple_append('A/B/E/alpha', 'modified alpha')
+ sbox.simple_append('A/B/E/beta', 'XX\nY\nZ\n', truncate=True)
+ sbox.simple_commit()
+ sbox.simple_update('', 2)
+
+ sbox.simple_append('A/B/E/beta', 'local beta')
+ src_time = get_text_timestamp(sbox.ospath('A/B/E/alpha'))
+ sbox.simple_move("A/B/E", "A/B/E2")
+ alpha_dst_time = get_text_timestamp(sbox.ospath('A/B/E2/alpha'))
+ beta_dst_time = get_text_timestamp(sbox.ospath('A/B/E2/beta'))
+ if src_time != alpha_dst_time:
+ raise svntest.Failure("move failed to copy timestamp")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='U'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='U'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('A/B/E',
+ status='D ', treeconflict='C', moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ')
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', wc_rev='-', copied='+',
+ moved_from='A/B/E'),
+ 'A/B/E2/alpha' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B/E2/beta' : Item(status='M ', wc_rev='-', copied='+'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item("This is the file 'alpha'.\n"),
+ 'A/B/E2/beta' : Item("X\nY\nZ\nlocal beta"),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ time.sleep(1.1)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', treeconflict=None)
+ expected_status.tweak('A/B/E2/beta', status='M ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_disk.tweak('A/B/E2/beta', contents="XX\nY\nZ\nlocal beta")
+ expected_disk.tweak('A/B/E2/alpha', contents="This is the file 'alpha'.\nmodified alpha")
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+ # alpha is pristine so gets a new timestamp
+ new_time = get_text_timestamp(sbox.ospath('A/B/E2/alpha'))
+ if new_time == alpha_dst_time:
+ raise svntest.Failure("move failed to update timestamp")
+
+ # beta is modified so timestamp is removed
+ no_text_timestamp(sbox.ospath('A/B/E2/beta'))
+
+@Issue(4398)
+def status_path_handling(sbox):
+ "relative/absolute path handling"
+
+ sbox.build(read_only=True)
+
+ # target is a relative path to a subdir
+ wc_dir = sbox.wc_dir
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # target is an absolute path to a subdir
+ cwd = os.getcwd()
+ abs_wc_dir = os.path.join(cwd, wc_dir)
+ expected_status = svntest.actions.get_virginal_state(abs_wc_dir, 1)
+ svntest.actions.run_and_verify_status(abs_wc_dir, expected_status)
+
+ # target is an absolute path to a parent dir
+ os.chdir(sbox.ospath('A/B'))
+ expected_status = svntest.actions.get_virginal_state(abs_wc_dir, 1)
+ svntest.actions.run_and_verify_status(abs_wc_dir, expected_status)
+
+ # target is a relative path to a parent dir
+ rel_wc_dir = os.path.join('..', '..')
+ expected_status = svntest.actions.get_virginal_state(rel_wc_dir, 1)
+ svntest.actions.run_and_verify_status(rel_wc_dir, expected_status)
+
+def status_move_missing_direct(sbox):
+ "move information when status is called directly"
+
+ sbox.build()
+ sbox.simple_copy('A', 'Z')
+ sbox.simple_commit('')
+ sbox.simple_update('')
+
+ sbox.simple_move('Z', 'ZZ')
+ sbox.simple_move('A', 'Z')
+ sbox.simple_move('Z/B', 'ZB')
+ sbox.simple_mkdir('Z/B')
+ sbox.simple_move('ZB/E', 'Z/B/E')
+
+ # Somehow 'svn status' now shows different output for 'ZB/E'
+ # when called directly and via an ancestor, as this handles
+ # multi-layer in a different way
+
+ # Note that the status output may change over different Subversion revisions,
+ # but the status on a node should be identical anyway 'svn status' is called
+ # on it.
+
+ expected_output = [
+ 'A + %s\n' % sbox.ospath('ZB'),
+ ' > moved from %s\n' % os.path.join('..', 'Z', 'B'),
+ 'D + %s\n' % sbox.ospath('ZB/E'),
+ ' > moved to %s\n' % os.path.join('..', 'Z', 'B', 'E'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'status',
+ sbox.ospath('ZB'), '--depth', 'immediates')
+
+ # And calling svn status on just 'ZB/E' should have the same result for this node
+ # except that we calculate the relative path from a different base
+ expected_output = [
+ 'D + %s\n' % sbox.ospath('ZB/E'),
+ ' > moved to %s\n' % os.path.join('..', '..', 'Z', 'B', 'E'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'status',
+ sbox.ospath('ZB/E'), '--depth', 'empty')
+
+def status_move_missing_direct_base(sbox):
+ "move when status is called directly with base"
+
+ sbox.build()
+ sbox.simple_copy('A', 'Z')
+ sbox.simple_mkdir('Q')
+ sbox.simple_mkdir('Q/ZB')
+ sbox.simple_mkdir('Q/ZB/E')
+ sbox.simple_commit('')
+ sbox.simple_update('')
+
+ sbox.simple_rm('Q')
+ sbox.simple_mkdir('Q')
+
+ sbox.simple_move('Z', 'ZZ')
+ sbox.simple_move('A', 'Z')
+ sbox.simple_move('Z/B', 'Q/ZB')
+ sbox.simple_mkdir('Z/B')
+ sbox.simple_move('Q/ZB/E', 'Z/B/E')
+
+ # Somehow 'svn status' now shows different output for 'Q/ZB/E'
+ # when called directly and via an ancestor, as this handles
+ # multi-layer in a different way
+
+ # Note that the status output may change over different Subversion revisions,
+ # but the status on a node should be identical anyway 'svn status' is called
+ # on it.
+
+ # This test had a different result as status_move_missing_direct at the time of
+ # writing this test.
+
+ expected_output = [
+ 'A + %s\n' % sbox.ospath('Q/ZB'),
+ ' > moved from %s\n' % os.path.join('..', '..', 'Z', 'B'),
+ 'D + %s\n' % sbox.ospath('Q/ZB/E'),
+ ' > moved to %s\n' % os.path.join('..', '..', 'Z', 'B', 'E'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'status',
+ sbox.ospath('Q/ZB'), '--depth', 'immediates')
+
+ # And calling svn status on just 'ZB/E' should have the same result for this node,
+ # except that the moved_to information is calculated from the node itself
+ expected_output = [
+ 'D + %s\n' % sbox.ospath('Q/ZB/E'),
+ ' > moved to %s\n' % os.path.join('..', '..', '..', 'Z', 'B', 'E'),
+ ]
+ svntest.actions.run_and_verify_svn(expected_output, [], 'status',
+ sbox.ospath('Q/ZB/E'), '--depth', 'empty')
+
+def status_missing_conflicts(sbox):
+ "status missing certain conflicts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_propset('q', 'r', 'A/B/E/alpha', 'A/B/E/beta')
+ sbox.simple_commit()
+
+ sbox.simple_move('A/B/E/alpha', 'alpha')
+ sbox.simple_move('A/B/E/beta', 'beta')
+
+ sbox.simple_rm('A/B/E')
+
+ svntest.main.run_svn(False, 'update', sbox.ospath('A/B/E'), '-r', '1',
+ '--accept=postpone')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C', wc_rev=1)
+ expected_status.tweak('A/B/E/alpha', status='D ', treeconflict='C', wc_rev=1,
+ moved_to='alpha')
+ expected_status.tweak('A/B/E/beta', status='D ', treeconflict='C', wc_rev=1,
+ moved_to='beta')
+ expected_status.add({
+ 'alpha' : Item(status='A ', copied='+', moved_from='A/B/E/alpha', wc_rev='-'),
+ 'beta' : Item(status='A ', copied='+', moved_from='A/B/E/beta', wc_rev='-')
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ expected_info = [
+ {
+ 'Tree conflict': 'local file moved away, incoming file edit upon update.*'
+ },
+ {
+ 'Tree conflict': 'local file moved away, incoming file edit upon update.*'
+ }
+ ]
+ svntest.actions.run_and_verify_info(expected_info,
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/B/E/beta'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--accept=mine-conflict',
+ '--depth=empty', sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', treeconflict=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now replace with directory
+ sbox.simple_mkdir('A/B/E')
+ expected_status.tweak('A/B/E', status='R ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ svntest.actions.run_and_verify_info(expected_info,
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/B/E/beta'))
+
+ #Recreate scenario for file
+ sbox.simple_rm('A/B/E', 'alpha', 'beta')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R', sbox.ospath('A/B/E'))
+
+ sbox.simple_update('A/B/E', revision=2)
+
+ sbox.simple_move('A/B/E/alpha', 'alpha')
+ sbox.simple_move('A/B/E/beta', 'beta')
+
+ sbox.simple_rm('A/B/E')
+ svntest.main.run_svn(False, 'update', sbox.ospath('A/B/E'), '-r', '1',
+ '--accept=postpone')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--accept=mine-conflict',
+ '--depth=empty', sbox.ospath('A/B/E'))
+
+ sbox.simple_append('A/B/E', 'something')
+ expected_status.tweak('A/B/E', status='D ')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ sbox.simple_add('A/B/E')
+
+ # In the entries world A/B/E doesn't have children..
+ expected_status.tweak('A/B/E', status='R ', entry_kind='file')
+
+ # Tree conflicts still in db
+ svntest.actions.run_and_verify_info(expected_info,
+ sbox.ospath('A/B/E/alpha'),
+ sbox.ospath('A/B/E/beta'))
+
+ # But not in status!
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ status_unversioned_file_in_current_dir,
+ status_update_with_nested_adds,
+ status_shows_all_in_current_dir,
+ status_missing_file,
+ status_type_change,
+ status_type_change_to_symlink,
+ status_with_new_files_pending,
+ status_for_unignored_file,
+ status_for_nonexistent_file,
+ status_file_needs_update,
+ status_uninvited_parent_directory,
+ status_on_forward_deletion,
+ timestamp_behaviour,
+ status_on_unversioned_dotdot,
+ status_on_partially_nonrecursive_wc,
+ missing_dir_in_anchor,
+ status_in_xml,
+ status_ignored_dir,
+ status_unversioned_dir_in_wc,
+ status_missing_dir,
+ status_nonrecursive_update_different_cwd,
+ status_add_plus_conflict,
+ inconsistent_eol,
+ status_update_with_incoming_props,
+ status_update_verbose_with_incoming_props,
+ status_nonrecursive_update,
+ status_dash_u_deleted_directories,
+ status_depth_local,
+ status_depth_update,
+ status_depth_update_local_modifications,
+ status_dash_u_type_change,
+ status_with_tree_conflicts,
+ status_nested_wc_old_format,
+ status_locked_deleted,
+ wc_wc_copy_timestamp,
+ wclock_status,
+ modified_modulo_translation,
+ status_not_present,
+ status_unversioned_dir,
+ status_case_changed,
+ move_update_timestamps,
+ status_path_handling,
+ status_move_missing_direct,
+ status_move_missing_direct_base,
+ status_missing_conflicts,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnadmin_tests.py b/subversion/tests/cmdline/svnadmin_tests.py
new file mode 100755
index 0000000..40b5e97
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests.py
@@ -0,0 +1,3907 @@
+#!/usr/bin/env python
+#
+# svnadmin_tests.py: testing the 'svnadmin' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import logging
+import re
+import shutil
+import sys
+import threading
+import time
+import gzip
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest.verify import SVNExpectedStdout, SVNExpectedStderr
+from svntest.verify import SVNUnexpectedStderr
+from svntest.verify import UnorderedOutput
+from svntest.main import SVN_PROP_MERGEINFO
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco
+Item = svntest.wc.StateItem
+
+def check_hotcopy_bdb(src, dst):
+ "Verify that the SRC BDB repository has been correctly copied to DST."
+ ### TODO: This function should be extended to verify all hotcopied files,
+ ### not just compare the output of 'svnadmin dump'. See check_hotcopy_fsfs().
+ exit_code, origout, origerr = svntest.main.run_svnadmin("dump", src,
+ '--quiet')
+ exit_code, backout, backerr = svntest.main.run_svnadmin("dump", dst,
+ '--quiet')
+ if origerr or backerr or origout != backout:
+ raise svntest.Failure
+
+def check_hotcopy_fsfs_fsx(src, dst):
+ # Walk the source and compare all files to the destination
+ for src_dirpath, src_dirs, src_files in os.walk(src):
+ # Verify that the current directory exists in the destination
+ dst_dirpath = src_dirpath.replace(src, dst)
+ if not os.path.isdir(dst_dirpath):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "destination" % dst_dirpath)
+ # Verify that all dirents in the current directory also exist in source
+ for dst_dirent in os.listdir(dst_dirpath):
+ # Ignore auto-created empty lock files as they may or may not
+ # be present and are neither required by nor do they harm to
+ # the destination repository.
+ if dst_dirent == 'pack-lock':
+ continue
+ if dst_dirent == 'write-lock':
+ continue
+
+ # Ignore auto-created rep-cache.db-journal file
+ if dst_dirent == 'rep-cache.db-journal':
+ continue
+
+ src_dirent = os.path.join(src_dirpath, dst_dirent)
+ if not os.path.exists(src_dirent):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "source" % src_dirent)
+ # Compare all files in this directory
+ for src_file in src_files:
+ # Ignore auto-created empty lock files as they may or may not
+ # be present and are neither required by nor do they harm to
+ # the destination repository.
+ if src_file == 'pack-lock':
+ continue
+ if src_file == 'write-lock':
+ continue
+
+ # Ignore auto-created rep-cache.db-journal file
+ if src_file == 'rep-cache.db-journal':
+ continue
+
+ src_path = os.path.join(src_dirpath, src_file)
+ dst_path = os.path.join(dst_dirpath, src_file)
+ if not os.path.isfile(dst_path):
+ raise svntest.Failure("%s does not exist in hotcopy "
+ "destination" % dst_path)
+
+ # Special case for db/uuid: Only the UUID in the first line needs
+ # to match. Source and target must have the same number of lines
+ # (due to having the same format).
+ if src_path == os.path.join(src, 'db', 'uuid'):
+ lines1 = open(src_path, 'rb').read().split(b"\n")
+ lines2 = open(dst_path, 'rb').read().split(b"\n")
+ if len(lines1) != len(lines2):
+ raise svntest.Failure("%s differs in number of lines"
+ % dst_path)
+ if lines1[0] != lines2[0]:
+ raise svntest.Failure("%s contains different uuid: '%s' vs. '%s'"
+ % (dst_path, lines1[0], lines2[0]))
+ continue
+
+ # Special case for rep-cache: It will always differ in a byte-by-byte
+ # comparison, so compare db tables instead.
+ if src_file == 'rep-cache.db':
+ db1 = svntest.sqlite3.connect(src_path)
+ db2 = svntest.sqlite3.connect(dst_path)
+ schema1 = db1.execute("pragma user_version").fetchone()[0]
+ schema2 = db2.execute("pragma user_version").fetchone()[0]
+ if schema1 != schema2:
+ raise svntest.Failure("rep-cache schema differs: '%s' vs. '%s'"
+ % (schema1, schema2))
+ # Can't test newer rep-cache schemas with an old built-in SQLite.
+ if schema1 >= 2 and svntest.sqlite3.sqlite_version_info < (3, 8, 2):
+ continue
+
+ rows1 = []
+ rows2 = []
+ for row in db1.execute("select * from rep_cache order by hash"):
+ rows1.append(row)
+ for row in db2.execute("select * from rep_cache order by hash"):
+ rows2.append(row)
+ if len(rows1) != len(rows2):
+ raise svntest.Failure("number of rows in rep-cache differs")
+ for i in range(len(rows1)):
+ if rows1[i] != rows2[i]:
+ raise svntest.Failure("rep-cache row %i differs: '%s' vs. '%s'"
+ % (i, rows1[i], rows2[i]))
+ continue
+
+ # Special case for revprop-generation: It will always be zero in
+ # the hotcopy destination (i.e. a fresh cache generation)
+ if src_file == 'revprop-generation':
+ f2 = open(dst_path, 'r')
+ revprop_gen = int(f2.read().strip())
+ if revprop_gen != 0:
+ raise svntest.Failure("Hotcopy destination has non-zero " +
+ "revprop generation")
+ continue
+
+ f1 = open(src_path, 'rb')
+ f2 = open(dst_path, 'rb')
+ while True:
+ offset = 0
+ BUFSIZE = 1024
+ buf1 = f1.read(BUFSIZE)
+ buf2 = f2.read(BUFSIZE)
+ if not buf1 or not buf2:
+ if not buf1 and not buf2:
+ # both at EOF
+ break
+ elif buf1:
+ raise svntest.Failure("%s differs at offset %i" %
+ (dst_path, offset))
+ elif buf2:
+ raise svntest.Failure("%s differs at offset %i" %
+ (dst_path, offset))
+ if len(buf1) != len(buf2):
+ raise svntest.Failure("%s differs in length" % dst_path)
+ for i in range(len(buf1)):
+ if buf1[i] != buf2[i]:
+ raise svntest.Failure("%s differs at offset %i"
+ % (dst_path, offset))
+ offset += 1
+ f1.close()
+ f2.close()
+
+def check_hotcopy_fsfs(src, dst):
+ "Verify that the SRC FSFS repository has been correctly copied to DST."
+ check_hotcopy_fsfs_fsx(src, dst)
+
+def check_hotcopy_fsx(src, dst):
+ "Verify that the SRC FSX repository has been correctly copied to DST."
+ check_hotcopy_fsfs_fsx(src, dst)
+
+#----------------------------------------------------------------------
+
+# How we currently test 'svnadmin' --
+#
+# 'svnadmin create': Create an empty repository, test that the
+# root node has a proper created-revision,
+# because there was once a bug where it
+# didn't.
+#
+# Note also that "svnadmin create" is tested
+# implicitly every time we run a python test
+# script. (An empty repository is always
+# created and then imported into; if this
+# subcommand failed catastrophically, every
+# test would fail and we would know instantly.)
+#
+# 'svnadmin createtxn'
+# 'svnadmin rmtxn': See below.
+#
+# 'svnadmin lstxns': We don't care about the contents of transactions;
+# we only care that they exist or not.
+# Therefore, we can simply parse transaction headers.
+#
+# 'svnadmin dump': A couple regression tests that ensure dump doesn't
+# error out, and one to check that the --quiet option
+# really does what it's meant to do. The actual
+# contents of the dump aren't verified at all.
+#
+# ### TODO: someday maybe we could parse the contents of trees too.
+#
+######################################################################
+# Helper routines
+
+
+def get_txns(repo_dir):
+ "Get the txn names using 'svnadmin lstxns'."
+
+ exit_code, output_lines, error_lines = svntest.main.run_svnadmin('lstxns',
+ repo_dir)
+ txns = sorted([output_lines.strip(x) for x in output_lines])
+
+ return txns
+
+def patch_format(repo_dir, shard_size):
+ """Rewrite the format of the FSFS or FSX repository REPO_DIR so
+ that it would use sharding with SHARDS revisions per shard."""
+
+ format_path = os.path.join(repo_dir, "db", "format")
+ contents = open(format_path, 'rb').read()
+ processed_lines = []
+
+ for line in contents.split(b"\n"):
+ if line.startswith(b"layout "):
+ processed_lines.append(("layout sharded %d" % shard_size).encode())
+ else:
+ processed_lines.append(line)
+
+ new_contents = b"\n".join(processed_lines)
+ os.chmod(format_path, svntest.main.S_ALL_RW)
+ open(format_path, 'wb').write(new_contents)
+
+def is_sharded(repo_dir):
+ """Return whether the FSFS repository REPO_DIR is sharded."""
+
+ format_path = os.path.join(repo_dir, "db", "format")
+ contents = open(format_path, 'rb').read()
+
+ for line in contents.split(b"\n"):
+ if line.startswith(b"layout sharded"):
+ return True
+
+ return False
+
+def load_and_verify_dumpstream(sbox, expected_stdout, expected_stderr,
+ revs, check_props, dump, *varargs):
+ """Load the array of lines passed in DUMP into the current tests'
+ repository and verify the repository content using the array of
+ wc.States passed in REVS. If CHECK_PROPS is True, check properties
+ of each rev's items. VARARGS are optional arguments passed to the
+ 'load' command."""
+
+ dump = svntest.main.ensure_list(dump)
+
+ exit_code, output, errput = svntest.main.run_command_stdin(
+ svntest.main.svnadmin_binary, expected_stderr, 0, True, dump,
+ 'load', '--quiet', sbox.repo_dir, *varargs)
+
+ if expected_stdout:
+ if expected_stdout is svntest.verify.AnyOutput:
+ if len(output) == 0:
+ raise SVNExpectedStdout
+ else:
+ svntest.verify.compare_and_display_lines(
+ "Standard output", "STDOUT:", expected_stdout, output)
+
+ if expected_stderr:
+ if expected_stderr is svntest.verify.AnyOutput:
+ if len(errput) == 0:
+ raise SVNExpectedStderr
+ else:
+ svntest.verify.compare_and_display_lines(
+ "Standard error output", "STDERR:", expected_stderr, errput)
+ # The expected error occurred, so don't try to verify the result
+ return
+
+ if revs:
+ # verify revs as wc states
+ for rev in range(len(revs)):
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ "update", "-r%s" % (rev+1),
+ sbox.wc_dir)
+
+ rev_tree = revs[rev]
+ svntest.actions.verify_disk(sbox.wc_dir, rev_tree, check_props)
+
+def load_dumpstream(sbox, dump, *varargs):
+ "Load dump text without verification."
+ return load_and_verify_dumpstream(sbox, None, None, None, False, dump,
+ *varargs)
+
+class FSFS_Index:
+ """Manages indexes of a rev file in a FSFS format 7 repository.
+ The interface returns P2L information and allows for item offsets
+ and lengths to be modified. """
+
+ def __init__(self, sbox, revision):
+ self.by_item = { }
+ self.revision = revision
+ self.repo_dir = sbox.repo_dir
+
+ self._read()
+
+ def _read(self):
+ """ Read P2L index using svnfsfs. """
+ exit_code, output, errput = svntest.main.run_svnfsfs('dump-index',
+ '-r' + str(self.revision),
+ self.repo_dir)
+ svntest.verify.verify_outputs("Error while dumping index",
+ [], errput, [], [])
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ self.by_item.clear()
+ for line in output:
+ values = line.split()
+ if len(values) >= 4 and values[0] != 'Start':
+ item = int(values[4])
+ self.by_item[item] = values
+
+ def _write(self):
+ """ Rewrite indexes using svnfsfs. """
+ by_offset = {}
+ for key in self.by_item:
+ values = self.by_item[key]
+ by_offset[int(values[0], 16)] = values
+
+ lines = []
+ for (offset, values) in sorted(by_offset.items()):
+ values = by_offset[offset]
+ line = values[0] + ' ' + values[1] + ' ' + values[2] + ' ' + \
+ values[3] + ' ' + values[4] + '\n';
+ lines.append(line.encode())
+
+ exit_code, output, errput = svntest.main.run_command_stdin(
+ svntest.main.svnfsfs_binary, 0, 0, False, lines,
+ 'load-index', self.repo_dir)
+
+ svntest.verify.verify_outputs("Error while rewriting index",
+ output, errput, [], [])
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ def get_item(self, item):
+ """ Return offset, length and type of ITEM. """
+ values = self.by_item[item]
+
+ offset = int(values[0], 16)
+ len = int(values[1], 16)
+ type = values[2]
+
+ return (offset, len, type)
+
+ def modify_item(self, item, offset, len):
+ """ Modify offset and length of ITEM. """
+ values = self.by_item[item]
+
+ values[0] = '%x' % offset
+ values[1] = '%x' % len
+
+ self._write()
+
+def repo_format(sbox):
+ """ Return the repository format number for SBOX."""
+
+ format_file = open(os.path.join(sbox.repo_dir, "db", "format"))
+ format = int(format_file.read()[:1])
+ format_file.close()
+
+ return format
+
+def set_changed_path_list(sbox, revision, changes):
+ """ Replace the changed paths list in the revision file REVISION in SBOX
+ with the text CHANGES."""
+
+ idx = None
+
+ # read full file
+ fp = open(fsfs_file(sbox.repo_dir, 'revs', str(revision)), 'r+b')
+ contents = fp.read()
+ length = len(contents)
+
+ if repo_format(sbox) < 7:
+ # replace the changed paths list
+ header = contents[contents.rfind(b'\n', length - 64, length - 1):]
+ body_len = int(header.split(b' ')[1])
+
+ else:
+ # read & parse revision file footer
+ footer_length = contents[length-1];
+ if isinstance(footer_length, str):
+ footer_length = ord(footer_length)
+
+ footer = contents[length - footer_length - 1:length-1]
+ l2p_offset = int(footer.split(b' ')[0])
+ l2p_checksum = footer.split(b' ')[1]
+ p2l_offset = int(footer.split(b' ')[2])
+ p2l_checksum = footer.split(b' ')[3]
+
+ idx = FSFS_Index(sbox, revision)
+ (offset, item_len, item_type) = idx.get_item(1)
+
+ # split file contents
+ body_len = offset
+ indexes = contents[l2p_offset:length - footer_length - 1]
+
+ # construct new footer, include indexes as are
+ file_len = body_len + len(changes) + 1
+ p2l_offset += file_len - l2p_offset
+
+ header = str(file_len).encode() + b' ' + l2p_checksum + b' ' \
+ + str(p2l_offset).encode() + b' ' + p2l_checksum
+ header += bytes([len(header)])
+ header = b'\n' + indexes + header
+
+ contents = contents[:body_len] + changes + header
+
+ # set new contents
+ fp.seek(0)
+ fp.write(contents)
+ fp.truncate()
+ fp.close()
+
+ if repo_format(sbox) >= 7:
+ idx.modify_item(1, offset, len(changes) + 1)
+
+######################################################################
+# Tests
+
+
+#----------------------------------------------------------------------
+
+# dump stream tests need a dump file
+
+def clean_dumpfile():
+ return \
+ [ b"SVN-fs-dump-format-version: 2\n\n",
+ b"UUID: 668cc64a-31ed-0310-8ccb-b75d75bb44e3\n\n",
+ b"Revision-number: 0\n",
+ b"Prop-content-length: 56\n",
+ b"Content-length: 56\n\n",
+ b"K 8\nsvn:date\nV 27\n2005-01-08T21:48:13.838745Z\nPROPS-END\n\n\n",
+ b"Revision-number: 1\n",
+ b"Prop-content-length: 98\n",
+ b"Content-length: 98\n\n",
+ b"K 7\nsvn:log\nV 0\n\nK 10\nsvn:author\nV 4\nerik\n",
+ b"K 8\nsvn:date\nV 27\n2005-01-08T21:51:16.313791Z\nPROPS-END\n\n\n",
+ b"Node-path: A\n",
+ b"Node-kind: file\n",
+ b"Node-action: add\n",
+ b"Prop-content-length: 35\n",
+ b"Text-content-length: 5\n",
+ b"Text-content-md5: e1cbb0c3879af8347246f12c559a86b5\n",
+ b"Content-length: 40\n\n",
+ b"K 12\nsvn:keywords\nV 2\nId\nPROPS-END\ntext\n\n\n"]
+
+dumpfile_revisions = \
+ [ svntest.wc.State('', { 'A' : svntest.wc.StateItem(contents="text\n") }) ]
+
+#----------------------------------------------------------------------
+def extra_headers(sbox):
+ "loading of dumpstream with extra headers"
+
+ sbox.build(empty=True)
+
+ dumpfile = clean_dumpfile()
+
+ dumpfile[3:3] = \
+ [ b"X-Comment-Header: Ignored header normally not in dump stream\n" ]
+
+ load_and_verify_dumpstream(sbox,[],[], dumpfile_revisions, False, dumpfile,
+ '--ignore-uuid')
+
+#----------------------------------------------------------------------
+# Ensure loading continues after skipping a bit of unknown extra content.
+def extra_blockcontent(sbox):
+ "load success on oversized Content-length"
+
+ sbox.build(empty=True)
+
+ dumpfile = clean_dumpfile()
+
+ # Replace "Content-length" line with two lines
+ dumpfile[8:9] = \
+ [ b"Extra-content-length: 10\n",
+ b"Content-length: 108\n\n" ]
+ # Insert the extra content after "PROPS-END\n"
+ dumpfile[11] = dumpfile[11][:-2] + b"extra text\n\n\n"
+
+ load_and_verify_dumpstream(sbox,[],[], dumpfile_revisions, False, dumpfile,
+ '--ignore-uuid')
+
+#----------------------------------------------------------------------
+def inconsistent_headers(sbox):
+ "load failure on undersized Content-length"
+
+ sbox.build(empty=True)
+
+ dumpfile = clean_dumpfile()
+
+ dumpfile[-2] = b"Content-length: 30\n\n"
+
+ load_and_verify_dumpstream(sbox, [], svntest.verify.AnyOutput,
+ dumpfile_revisions, False, dumpfile)
+
+#----------------------------------------------------------------------
+# Test for issue #2729: Datestamp-less revisions in dump streams do
+# not remain so after load
+@Issue(2729)
+def empty_date(sbox):
+ "preserve date-less revisions in load"
+
+ sbox.build(empty=True)
+
+ dumpfile = clean_dumpfile()
+
+ # Replace portions of the revision data to drop the svn:date revprop.
+ dumpfile[7:11] = \
+ [ b"Prop-content-length: 52\n",
+ b"Content-length: 52\n\n",
+ b"K 7\nsvn:log\nV 0\n\nK 10\nsvn:author\nV 4\nerik\nPROPS-END\n\n\n"
+ ]
+
+ load_and_verify_dumpstream(sbox,[],[], dumpfile_revisions, False, dumpfile,
+ '--ignore-uuid')
+
+ # Verify that the revision still lacks the svn:date property.
+ svntest.actions.run_and_verify_svn([], '.*(E195011|E200017).*svn:date',
+ "propget", "--revprop", "-r1", "svn:date",
+ sbox.wc_dir)
+
+#----------------------------------------------------------------------
+
+def dump_copied_dir(sbox):
+ "'svnadmin dump' on copied directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ old_C_path = os.path.join(wc_dir, 'A', 'C')
+ new_C_path = os.path.join(wc_dir, 'A', 'B', 'C')
+ svntest.main.run_svn(None, 'cp', old_C_path, new_C_path)
+ sbox.simple_commit(message='log msg')
+
+ exit_code, output, errput = svntest.main.run_svnadmin("dump", repo_dir)
+ if svntest.verify.compare_and_display_lines(
+ "Output of 'svnadmin dump' is unexpected.",
+ 'STDERR', ["* Dumped revision 0.\n",
+ "* Dumped revision 1.\n",
+ "* Dumped revision 2.\n"], errput):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def dump_move_dir_modify_child(sbox):
+ "'svnadmin dump' on modified child of copied dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ B_path = os.path.join(wc_dir, 'A', 'B')
+ Q_path = os.path.join(wc_dir, 'A', 'Q')
+ svntest.main.run_svn(None, 'cp', B_path, Q_path)
+ svntest.main.file_append(os.path.join(Q_path, 'lambda'), 'hello')
+ sbox.simple_commit(message='log msg')
+ exit_code, output, errput = svntest.main.run_svnadmin("dump", repo_dir)
+ svntest.verify.compare_and_display_lines(
+ "Output of 'svnadmin dump' is unexpected.",
+ 'STDERR', ["* Dumped revision 0.\n",
+ "* Dumped revision 1.\n",
+ "* Dumped revision 2.\n"], errput)
+
+ exit_code, output, errput = svntest.main.run_svnadmin("dump", "-r",
+ "0:HEAD", repo_dir)
+ svntest.verify.compare_and_display_lines(
+ "Output of 'svnadmin dump' is unexpected.",
+ 'STDERR', ["* Dumped revision 0.\n",
+ "* Dumped revision 1.\n",
+ "* Dumped revision 2.\n"], errput)
+
+#----------------------------------------------------------------------
+
+def dump_quiet(sbox):
+ "'svnadmin dump --quiet'"
+
+ sbox.build(create_wc = False)
+
+ exit_code, dump, errput = svntest.main.run_svnadmin("dump", sbox.repo_dir,
+ '--quiet')
+ svntest.verify.compare_and_display_lines(
+ "Output of 'svnadmin dump --quiet' is unexpected.",
+ 'STDERR', [], errput)
+
+#----------------------------------------------------------------------
+
+def hotcopy_dot(sbox):
+ "'svnadmin hotcopy PATH .'"
+ sbox.build()
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ os.mkdir(backup_dir)
+ cwd = os.getcwd()
+
+ os.chdir(backup_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ None, [],
+ "hotcopy", os.path.join(cwd, sbox.repo_dir), '.')
+
+ os.chdir(cwd)
+
+ if svntest.main.is_fs_type_fsfs():
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ if svntest.main.is_fs_type_bdb():
+ check_hotcopy_bdb(sbox.repo_dir, backup_dir)
+ if svntest.main.is_fs_type_fsx():
+ check_hotcopy_fsx(sbox.repo_dir, backup_dir)
+
+#----------------------------------------------------------------------
+
+# This test is redundant for FSFS. The hotcopy_dot and hotcopy_incremental
+# tests cover this check for FSFS already.
+@SkipUnless(svntest.main.is_fs_type_bdb)
+def hotcopy_format(sbox):
+ "'svnadmin hotcopy' checking db/format file"
+ sbox.build()
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ exit_code, output, errput = svntest.main.run_svnadmin("hotcopy",
+ sbox.repo_dir,
+ backup_dir)
+ if errput:
+ logger.warn("Error: hotcopy failed")
+ raise svntest.Failure
+
+ # verify that the db/format files are the same
+ fp = open(os.path.join(sbox.repo_dir, "db", "format"))
+ contents1 = fp.read()
+ fp.close()
+
+ fp2 = open(os.path.join(backup_dir, "db", "format"))
+ contents2 = fp2.read()
+ fp2.close()
+
+ if contents1 != contents2:
+ logger.warn("Error: db/format file contents do not match after hotcopy")
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def setrevprop(sbox):
+ "setlog, setrevprop, delrevprop; bypass hooks"
+ sbox.build()
+
+ # Try a simple log property modification.
+ iota_path = os.path.join(sbox.wc_dir, "iota")
+ mu_path = sbox.ospath('A/mu')
+ svntest.actions.run_and_verify_svnadmin([], [],
+ "setlog", sbox.repo_dir, "-r0",
+ "--bypass-hooks",
+ iota_path)
+
+ # Make sure it fails without --bypass-hooks. (We haven't called
+ # svntest.actions.enable_revprop_changes().)
+ #
+ # Note that we attempt to set the log message to a different value than the
+ # successful call.
+ svntest.actions.run_and_verify_svnadmin([], svntest.verify.AnyOutput,
+ "setlog", sbox.repo_dir, "-r0",
+ mu_path)
+
+ # Verify that the revprop value matches what we set when retrieved
+ # through the client.
+ svntest.actions.run_and_verify_svn([ "This is the file 'iota'.\n", "\n" ],
+ [], "propget", "--revprop", "-r0",
+ "svn:log", sbox.wc_dir)
+
+ # Try an author property modification.
+ foo_path = os.path.join(sbox.wc_dir, "foo")
+ svntest.main.file_write(foo_path, "foo")
+
+ exit_code, output, errput = svntest.main.run_svnadmin("setrevprop",
+ sbox.repo_dir,
+ "-r0", "svn:author",
+ foo_path)
+ if errput:
+ logger.warn("Error: 'setrevprop' failed")
+ raise svntest.Failure
+
+ # Verify that the revprop value matches what we set when retrieved
+ # through the client.
+ svntest.actions.run_and_verify_svn([ "foo\n" ], [], "propget",
+ "--revprop", "-r0", "svn:author",
+ sbox.wc_dir)
+
+ # Delete the property.
+ svntest.actions.run_and_verify_svnadmin([], [],
+ "delrevprop", "-r0", sbox.repo_dir,
+ "svn:author")
+ svntest.actions.run_and_verify_svnlook([], ".*E200017.*svn:author.*",
+ "propget", "--revprop", "-r0",
+ sbox.repo_dir, "svn:author")
+
+def verify_windows_paths_in_repos(sbox):
+ "verify a repository containing paths like 'c:hi'"
+
+ # setup a repo with a directory 'c:hi'
+ sbox.build(create_wc = False)
+ repo_url = sbox.repo_url
+ chi_url = sbox.repo_url + '/c:hi'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ chi_url)
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ # unfortunately, some backends needs to do more checks than other
+ # resulting in different progress output
+ if svntest.main.is_fs_log_addressing():
+ svntest.verify.compare_and_display_lines(
+ "Error while running 'svnadmin verify'.",
+ 'STDOUT', ["* Verifying metadata at revision 0 ...\n",
+ "* Verifying repository metadata ...\n",
+ "* Verified revision 0.\n",
+ "* Verified revision 1.\n",
+ "* Verified revision 2.\n"], output)
+ elif svntest.main.fs_has_rep_sharing() and not svntest.main.is_fs_type_bdb():
+ svntest.verify.compare_and_display_lines(
+ "Error while running 'svnadmin verify'.",
+ 'STDOUT', ["* Verifying repository metadata ...\n",
+ "* Verified revision 0.\n",
+ "* Verified revision 1.\n",
+ "* Verified revision 2.\n"], output)
+ else:
+ svntest.verify.compare_and_display_lines(
+ "Error while running 'svnadmin verify'.",
+ 'STDOUT', ["* Verified revision 0.\n",
+ "* Verified revision 1.\n",
+ "* Verified revision 2.\n"], output)
+
+#----------------------------------------------------------------------
+
+# Returns the filename of the rev or revprop file (according to KIND)
+# numbered REV in REPO_DIR, which must be in the first shard if we're
+# using a sharded repository.
+def fsfs_file(repo_dir, kind, rev):
+ if svntest.main.options.server_minor_version >= 5:
+ if svntest.main.options.fsfs_sharding is None:
+ return os.path.join(repo_dir, 'db', kind, '0', rev)
+ else:
+ shard = int(rev) // svntest.main.options.fsfs_sharding
+ path = os.path.join(repo_dir, 'db', kind, str(shard), rev)
+
+ if svntest.main.options.fsfs_packing is None or kind == 'revprops':
+ # we don't pack revprops
+ return path
+ elif os.path.exists(path):
+ # rev exists outside a pack file.
+ return path
+ else:
+ # didn't find the plain file; assume it's in a pack file
+ return os.path.join(repo_dir, 'db', kind, ('%d.pack' % shard), 'pack')
+ else:
+ return os.path.join(repo_dir, 'db', kind, rev)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def verify_incremental_fsfs(sbox):
+ """svnadmin verify detects corruption dump can't"""
+
+ if svntest.main.options.fsfs_version is not None and \
+ svntest.main.options.fsfs_version not in [4, 6]:
+ raise svntest.Skip("Unsupported prepackaged repository version")
+
+ # setup a repo with a directory 'c:hi'
+ # use physical addressing as this is hard to provoke with logical addressing
+ sbox.build(create_wc = False,
+ minor_version = min(svntest.main.options.server_minor_version,8))
+ repo_url = sbox.repo_url
+ E_url = sbox.repo_url + '/A/B/E'
+
+ # Create A/B/E/bravo in r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ E_url + '/bravo')
+ # Corrupt r2's reference to A/C by replacing "dir 7-1.0.r1/1568" with
+ # "dir 7-1.0.r1/1569" (increment offset) and updating the checksum for
+ # this directory listing to "c9b5a2d26473a4e28088673dda9df804" so that
+ # the listing itself is valid.
+ r2 = fsfs_file(sbox.repo_dir, 'revs', '2')
+ if r2.endswith('pack'):
+ raise svntest.Skip("Test doesn't handle packed revisions")
+
+ fp = open(r2, 'wb')
+ fp.write(b"""id: 0-2.0.r2/0
+type: dir
+count: 0
+cpath: /A/B/E/bravo
+copyroot: 0 /
+
+PLAIN
+K 5
+alpha
+V 17
+file 3-1.0.r1/719
+K 4
+beta
+V 17
+file 4-1.0.r1/840
+K 5
+bravo
+V 14
+dir 0-2.0.r2/0
+END
+ENDREP
+id: 2-1.0.r2/181
+type: dir
+pred: 2-1.0.r1/1043
+count: 1
+text: 2 69 99 99 f63001f7fddd1842d8891474d0982111
+cpath: /A/B/E
+copyroot: 0 /
+
+PLAIN
+K 1
+E
+V 16
+dir 2-1.0.r2/181
+K 1
+F
+V 17
+dir 5-1.0.r1/1160
+K 6
+lambda
+V 17
+file 6-1.0.r1/597
+END
+ENDREP
+id: 1-1.0.r2/424
+type: dir
+pred: 1-1.0.r1/1335
+count: 1
+text: 2 316 95 95 bccb66379b4f825dac12b50d80211bae
+cpath: /A/B
+copyroot: 0 /
+
+PLAIN
+K 1
+B
+V 16
+dir 1-1.0.r2/424
+K 1
+C
+V 17
+dir 7-1.0.r1/1569
+K 1
+D
+V 17
+dir 8-1.0.r1/3061
+K 2
+mu
+V 18
+file i-1.0.r1/1451
+END
+ENDREP
+id: 0-1.0.r2/692
+type: dir
+pred: 0-1.0.r1/3312
+count: 1
+text: 2 558 121 121 c9b5a2d26473a4e28088673dda9df804
+cpath: /A
+copyroot: 0 /
+
+PLAIN
+K 1
+A
+V 16
+dir 0-1.0.r2/692
+K 4
+iota
+V 18
+file j-1.0.r1/3428
+END
+ENDREP
+id: 0.0.r2/904
+type: dir
+pred: 0.0.r1/3624
+count: 2
+text: 2 826 65 65 e44e4151d0d124533338619f082c8c9a
+cpath: /
+copyroot: 0 /
+
+_0.0.t1-1 add false false /A/B/E/bravo
+
+
+904 1031
+""")
+ fp.close()
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify", "-r2",
+ sbox.repo_dir)
+ svntest.verify.verify_outputs(
+ message=None, actual_stdout=output, actual_stderr=errput,
+ expected_stdout=None,
+ expected_stderr=".*Found malformed header '[^']*' in revision file"
+ "|.*Missing id field in node-rev.*")
+
+#----------------------------------------------------------------------
+
+# Helper for two test functions.
+def corrupt_and_recover_db_current(sbox, minor_version=None):
+ """Build up a MINOR_VERSION sandbox and test different recovery scenarios
+ with missing, out-of-date or even corrupt db/current files. Recovery should
+ behave the same way with all values of MINOR_VERSION, hence this helper
+ containing the common code that allows us to check it."""
+
+ sbox.build(minor_version=minor_version)
+ current_path = os.path.join(sbox.repo_dir, 'db', 'current')
+
+ # Commit up to r3, so we can test various recovery scenarios.
+ svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newer line\n')
+ sbox.simple_commit(message='log msg')
+
+ svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newest line\n')
+ sbox.simple_commit(message='log msg')
+
+ # Remember the contents of the db/current file.
+ expected_current_contents = open(current_path).read()
+
+ # Move aside the current file for r3.
+ os.rename(os.path.join(sbox.repo_dir, 'db','current'),
+ os.path.join(sbox.repo_dir, 'db','was_current'))
+
+ # Run 'svnadmin recover' and check that the current file is recreated.
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ actual_current_contents = open(current_path).read()
+ svntest.verify.compare_and_display_lines(
+ "Contents of db/current is unexpected.",
+ 'db/current', expected_current_contents, actual_current_contents)
+
+ # Now try writing db/current to be one rev lower than it should be.
+ svntest.main.file_write(current_path, '2\n')
+
+ # Run 'svnadmin recover' and check that the current file is fixed.
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ actual_current_contents = open(current_path).read()
+ svntest.verify.compare_and_display_lines(
+ "Contents of db/current is unexpected.",
+ 'db/current', expected_current_contents, actual_current_contents)
+
+ # Now try writing db/current to be *two* revs lower than it should be.
+ svntest.main.file_write(current_path, '1\n')
+
+ # Run 'svnadmin recover' and check that the current file is fixed.
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ actual_current_contents = open(current_path).read()
+ svntest.verify.compare_and_display_lines(
+ "Contents of db/current is unexpected.",
+ 'db/current', expected_current_contents, actual_current_contents)
+
+ # Now try writing db/current to be fish revs lower than it should be.
+ #
+ # Note: I'm not actually sure it's wise to recover from this, but
+ # detecting it would require rewriting fs_fs.c:get_youngest() to
+ # check the actual contents of its buffer, since atol() will happily
+ # convert "fish" to 0.
+ svntest.main.file_write(current_path, 'fish\n')
+
+ # Run 'svnadmin recover' and check that the current file is fixed.
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ actual_current_contents = open(current_path).read()
+ svntest.verify.compare_and_display_lines(
+ "Contents of db/current is unexpected.",
+ 'db/current', expected_current_contents, actual_current_contents)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_recover_db_current(sbox):
+ "fsfs recover db/current"
+ corrupt_and_recover_db_current(sbox)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_recover_old_db_current(sbox):
+ "fsfs recover db/current --compatible-version=1.3"
+
+ # Around trunk@1573728, 'svnadmin recover' wrongly errored out
+ # for the --compatible-version=1.3 repositories with missing or
+ # invalid db/current file:
+ # svnadmin: E160006: No such revision 1
+
+ corrupt_and_recover_db_current(sbox, minor_version=3)
+
+#----------------------------------------------------------------------
+@Issue(2983)
+def load_with_parent_dir(sbox):
+ "'svnadmin load --parent-dir' reparents mergeinfo"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2983. ##
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'mergeinfo_included.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ # Create 'sample' dir in sbox.repo_url, and load the dump stream there.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 1.\n'],
+ [], "mkdir", sbox.repo_url + "/sample",
+ "-m", "Create sample dir")
+ load_dumpstream(sbox, dumpfile, '--parent-dir', '/sample')
+
+ # Verify the svn:mergeinfo properties for '--parent-dir'
+ svntest.actions.run_and_verify_svn([sbox.repo_url +
+ "/sample/branch - /sample/trunk:5-7\n"],
+ [], 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url + '/sample/branch')
+ svntest.actions.run_and_verify_svn([sbox.repo_url +
+ "/sample/branch1 - " +
+ "/sample/branch:6-9\n"],
+ [], 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url + '/sample/branch1')
+
+ # Create 'sample-2' dir in sbox.repo_url, and load the dump stream again.
+ # This time, don't include a leading slash on the --parent-dir argument.
+ # See issue #3547.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 11.\n'],
+ [], "mkdir", sbox.repo_url + "/sample-2",
+ "-m", "Create sample-2 dir")
+ load_dumpstream(sbox, dumpfile, '--parent-dir', 'sample-2')
+
+ # Verify the svn:mergeinfo properties for '--parent-dir'.
+ svntest.actions.run_and_verify_svn([sbox.repo_url +
+ "/sample-2/branch - " +
+ "/sample-2/trunk:15-17\n"],
+ [], 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url + '/sample-2/branch')
+ svntest.actions.run_and_verify_svn([sbox.repo_url +
+ "/sample-2/branch1 - " +
+ "/sample-2/branch:16-19\n"],
+ [], 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url + '/sample-2/branch1')
+
+#----------------------------------------------------------------------
+
+def set_uuid(sbox):
+ "test 'svnadmin setuuid'"
+
+ sbox.build(create_wc=False)
+
+ # Squirrel away the original repository UUID.
+ exit_code, output, errput = svntest.main.run_svnlook('uuid', sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ orig_uuid = output[0].rstrip()
+
+ # Try setting a new, bogus UUID.
+ svntest.actions.run_and_verify_svnadmin(None, '^.*Malformed UUID.*$',
+ 'setuuid', sbox.repo_dir, 'abcdef')
+
+ # Try generating a brand new UUID.
+ svntest.actions.run_and_verify_svnadmin([], None,
+ 'setuuid', sbox.repo_dir)
+ exit_code, output, errput = svntest.main.run_svnlook('uuid', sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ new_uuid = output[0].rstrip()
+ if new_uuid == orig_uuid:
+ logger.warn("Error: new UUID matches the original one")
+ raise svntest.Failure
+
+ # Now, try setting the UUID back to the original value.
+ svntest.actions.run_and_verify_svnadmin([], None,
+ 'setuuid', sbox.repo_dir, orig_uuid)
+ exit_code, output, errput = svntest.main.run_svnlook('uuid', sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ new_uuid = output[0].rstrip()
+ if new_uuid != orig_uuid:
+ logger.warn("Error: new UUID doesn't match the original one")
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+@Issue(3020)
+def reflect_dropped_renumbered_revs(sbox):
+ "reflect dropped renumbered revs in svn:mergeinfo"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3020. ##
+
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'with_merges.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ # Create 'toplevel' dir in sbox.repo_url
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 1.\n'],
+ [], "mkdir", sbox.repo_url + "/toplevel",
+ "-m", "Create toplevel dir")
+
+ # Load the dump stream in sbox.repo_url
+ load_dumpstream(sbox, dumpfile)
+
+ # Load the dump stream in toplevel dir
+ load_dumpstream(sbox, dumpfile, '--parent-dir', '/toplevel')
+
+ # Verify the svn:mergeinfo properties
+ url = sbox.repo_url
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/trunk - /branch1:5-9\n",
+ url + "/toplevel/trunk - /toplevel/branch1:14-18\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@Issue(2992)
+def fsfs_recover_handle_missing_revs_or_revprops_file(sbox):
+ """fsfs recovery checks missing revs / revprops files"""
+ # Set up a repository containing the greek tree.
+ sbox.build()
+
+ # Commit up to r3, so we can test various recovery scenarios.
+ svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newer line\n')
+ sbox.simple_commit(message='log msg')
+
+ svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newest line\n')
+ sbox.simple_commit(message='log msg')
+
+ rev_3 = fsfs_file(sbox.repo_dir, 'revs', '3')
+ rev_was_3 = rev_3 + '.was'
+
+ # Move aside the revs file for r3.
+ os.rename(rev_3, rev_was_3)
+
+ # Verify 'svnadmin recover' fails when youngest has a revprops
+ # file but no revs file.
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin recover' is unexpected.", None, errput, None,
+ ".*Expected current rev to be <= %s but found 3"
+ # For example, if svntest.main.fsfs_sharding == 2, then rev_3 would
+ # be the pack file for r2:r3, and the error message would report "<= 1".
+ % (rev_3.endswith('pack') and '[012]' or '2')):
+ raise svntest.Failure
+
+ # Restore the r3 revs file, thus repairing the repository.
+ os.rename(rev_was_3, rev_3)
+
+ revprop_3 = fsfs_file(sbox.repo_dir, 'revprops', '3')
+ revprop_was_3 = revprop_3 + '.was'
+
+ # Move aside the revprops file for r3.
+ os.rename(revprop_3, revprop_was_3)
+
+ # Verify 'svnadmin recover' fails when youngest has a revs file
+ # but no revprops file (issue #2992).
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin recover' is unexpected.", None, errput, None,
+ ".*Revision 3 has a revs file but no revprops file"):
+ raise svntest.Failure
+
+ # Restore the r3 revprops file, thus repairing the repository.
+ os.rename(revprop_was_3, revprop_3)
+
+ # Change revprops file to a directory for revision 3
+ os.rename(revprop_3, revprop_was_3)
+ os.mkdir(revprop_3)
+
+ # Verify 'svnadmin recover' fails when youngest has a revs file
+ # but revprops file is not a file (another aspect of issue #2992).
+ exit_code, output, errput = svntest.main.run_svnadmin("recover",
+ sbox.repo_dir)
+
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin recover' is unexpected.", None, errput, None,
+ ".*Revision 3 has a non-file where its revprops file should be.*"):
+ raise svntest.Failure
+
+ # Restore the r3 revprops file, thus repairing the repository.
+ os.rmdir(revprop_3)
+ os.rename(revprop_was_3, revprop_3)
+
+
+#----------------------------------------------------------------------
+
+@Skip(svntest.main.tests_use_prepackaged_repository)
+def create_in_repo_subdir(sbox):
+ "'svnadmin create /path/to/repo/subdir'"
+
+ sbox.build(create_wc=False, empty=True)
+ repo_dir = sbox.repo_dir
+
+ success = False
+ try:
+ # This should fail
+ subdir = os.path.join(repo_dir, 'Z')
+ svntest.main.create_repos(subdir)
+ except svntest.main.SVNRepositoryCreateFailure:
+ success = True
+ if not success:
+ raise svntest.Failure
+
+ cwd = os.getcwd()
+ success = False
+ try:
+ # This should fail, too
+ subdir = os.path.join(repo_dir, 'conf')
+ os.chdir(subdir)
+ svntest.main.create_repos('Z')
+ os.chdir(cwd)
+ except svntest.main.SVNRepositoryCreateFailure:
+ success = True
+ os.chdir(cwd)
+ if not success:
+ raise svntest.Failure
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipDumpLoadCrossCheck()
+def verify_with_invalid_revprops(sbox):
+ "svnadmin verify detects invalid revprops file"
+
+ sbox.build(create_wc=False, empty=True)
+ repo_dir = sbox.repo_dir
+
+ # Run a test verify
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin verify' is unexpected.", None, output, None,
+ ".*Verified revision 0*"):
+ raise svntest.Failure
+
+ # Empty the revprops file
+ rp_file = open(os.path.join(repo_dir, 'db', 'revprops', '0', '0'), 'w')
+
+ rp_file.write('')
+ rp_file.close()
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin verify' is unexpected.", None, errput, None,
+ ".*svnadmin: E200002:.*"):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Even *more* testing for issue #3020 'Reflect dropped/renumbered
+# revisions in svn:mergeinfo data during svnadmin load'
+#
+# Full or incremental dump-load cycles should result in the same
+# mergeinfo in the loaded repository.
+#
+# Given a repository 'SOURCE-REPOS' with mergeinfo, and a repository
+# 'TARGET-REPOS' (which may or may not be empty), either of the following
+# methods to move 'SOURCE-REPOS' to 'TARGET-REPOS' should result in
+# the same mergeinfo on 'TARGET-REPOS':
+#
+# 1) Dump -r1:HEAD from 'SOURCE-REPOS' and load it in one shot to
+# 'TARGET-REPOS'.
+#
+# 2) Dump 'SOURCE-REPOS' in a series of incremental dumps and load
+# each of them to 'TARGET-REPOS'.
+#
+# See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc13
+@Issue(3020)
+def dont_drop_valid_mergeinfo_during_incremental_loads(sbox):
+ "don't filter mergeinfo revs from incremental dump"
+
+ # Create an empty repos.
+ sbox.build(empty=True)
+
+ # PART 1: Load a full dump to an empty repository.
+ #
+ # The test repository used here, 'mergeinfo_included_full.dump', is
+ # this repos:
+ # __________________________________________
+ # | |
+ # | ____________________________|_____
+ # | | | |
+ # trunk---r2---r3-----r5---r6-------r8---r9---------------> | |
+ # r1 | | | | | |
+ # initial | | | |______ | |
+ # import copy | copy | merge merge
+ # | | | merge (r5) (r8)
+ # | | | (r9) | |
+ # | | | | | |
+ # | | V V | |
+ # | | branches/B2-------r11---r12----> | |
+ # | | r7 |____| | |
+ # | | | | |
+ # | merge |___ | |
+ # | (r6) | | |
+ # | |_________________ | | |
+ # | | merge | |
+ # | | (r11-12) | |
+ # | | | | |
+ # V V V | |
+ # branches/B1-------------------r10--------r13--> | |
+ # r4 | |
+ # | V V
+ # branches/B1/B/E------------------------------r14---r15->
+ #
+ #
+ # The mergeinfo on this repos@15 is:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:11-12
+ # /trunk:6,9
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:11-12
+ # /trunk/B/E:5-6,8-9
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:9
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'mergeinfo_included_full.dump')
+ dumpfile_full = svntest.actions.load_dumpfile(dumpfile_location)
+ load_dumpstream(sbox, dumpfile_full, '--ignore-uuid')
+
+ # Check that the mergeinfo is as expected.
+ url = sbox.repo_url + '/branches/'
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "B1 - /branches/B2:11-12\n",
+ "/trunk:6,9\n",
+ url + "B2 - /trunk:9\n",
+ url + "B1/B/E - /branches/B2/B/E:11-12\n",
+ "/trunk/B/E:5-6,8-9\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # PART 2: Load a a series of incremental dumps to an empty repository.
+ #
+ # Incrementally dump the repository into three dump files:
+ dump_file_r1_10 = sbox.get_tempname("r1-10-dump")
+ exit_code, output, errput = svntest.main.run_svnadmin(
+ 'dump', sbox.repo_dir, '-r1:10')
+ dump_fp = open(dump_file_r1_10, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ dump_file_r11_13 = sbox.get_tempname("r11-13-dump")
+ exit_code, output, errput = svntest.main.run_svnadmin(
+ 'dump', sbox.repo_dir, '--incremental', '-r11:13')
+ dump_fp = open(dump_file_r11_13, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ dump_file_r14_15 = sbox.get_tempname("r14-15-dump")
+ exit_code, output, errput = svntest.main.run_svnadmin(
+ 'dump', sbox.repo_dir, '--incremental', '-r14:15')
+ dump_fp = open(dump_file_r14_15, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ # Blow away the current repos and create an empty one in its place.
+ sbox.build(empty=True)
+
+ # Load the three incremental dump files in sequence.
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r1_10),
+ '--ignore-uuid')
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r11_13),
+ '--ignore-uuid')
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r14_15),
+ '--ignore-uuid')
+
+ # Check the mergeinfo, we use the same expected output as before,
+ # as it (duh!) should be exactly the same as when we loaded the
+ # repos in one shot.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # Now repeat the above two scenarios, but with an initially non-empty target
+ # repository. First, try the full dump-load in one shot.
+ #
+ # PART 3: Load a full dump to an non-empty repository.
+ #
+ # Reset our sandbox.
+ sbox.build(empty=True)
+
+ # Load this skeleton repos into the empty target:
+ #
+ # Projects/ (Added r1)
+ # README (Added r2)
+ # Project-X (Added r3)
+ # Project-Y (Added r4)
+ # Project-Z (Added r5)
+ # docs/ (Added r6)
+ # README (Added r6)
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ dumpfile_skeleton = svntest.actions.load_dumpfile(dumpfile_location)
+ load_dumpstream(sbox, dumpfile_skeleton, '--ignore-uuid')
+
+ # Load 'svnadmin_tests_data/mergeinfo_included_full.dump' in one shot:
+ load_dumpstream(sbox, dumpfile_full, '--parent-dir', 'Projects/Project-X',
+ '--ignore-uuid')
+
+ # Check that the mergeinfo is as expected. This is exactly the
+ # same expected mergeinfo we previously checked, except that the
+ # revisions are all offset +6 to reflect the revions already in
+ # the skeleton target before we began loading and the leading source
+ # paths are adjusted by the --parent-dir:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /Projects/Project-X/branches/B2:17-18
+ # /Projects/Project-X/trunk:12,15
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /Projects/Project-X/branches/B2/B/E:17-18
+ # /Projects/Project-X/trunk/B/E:11-12,14-15
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /Projects/Project-X/trunk:15
+ url = sbox.repo_url + '/Projects/Project-X/branches/'
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "B1 - /Projects/Project-X/branches/B2:17-18\n",
+ "/Projects/Project-X/trunk:12,15\n",
+ url + "B2 - /Projects/Project-X/trunk:15\n",
+ url + "B1/B/E - /Projects/Project-X/branches/B2/B/E:17-18\n",
+ "/Projects/Project-X/trunk/B/E:11-12,14-15\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # PART 4: Load a a series of incremental dumps to an non-empty repository.
+ #
+ # Reset our sandbox.
+ sbox.build(empty=True)
+
+ # Load this skeleton repos into the empty target:
+ load_dumpstream(sbox, dumpfile_skeleton, '--ignore-uuid')
+
+ # Load the three incremental dump files in sequence.
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r1_10),
+ '--parent-dir', 'Projects/Project-X', '--ignore-uuid')
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r11_13),
+ '--parent-dir', 'Projects/Project-X', '--ignore-uuid')
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dump_file_r14_15),
+ '--parent-dir', 'Projects/Project-X', '--ignore-uuid')
+
+ # Check the resulting mergeinfo. We expect the exact same results
+ # as Part 3.
+ # See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc16.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+
+@SkipUnless(svntest.main.is_posix_os)
+@Issue(2591)
+def hotcopy_symlink(sbox):
+ "'svnadmin hotcopy' replicates symlink"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2591. ##
+
+ # Create a repository.
+ sbox.build(create_wc=False, empty=True)
+ original_repo = sbox.repo_dir
+
+ hotcopy_repo, hotcopy_url = sbox.add_repo_path('hotcopy')
+
+ # Create a file, a dir and a missing path outside the repoitory.
+ svntest.main.safe_rmtree(sbox.wc_dir, 1)
+ os.mkdir(sbox.wc_dir)
+ external_file_path = os.path.join(sbox.wc_dir, "file")
+ svntest.main.file_write(external_file_path, "An existing file")
+ external_dir_path = os.path.join(sbox.wc_dir, "dir")
+ os.mkdir(external_dir_path)
+ external_missing_path = os.path.join(sbox.wc_dir, "missing")
+
+ # Symlink definitions: base name -> target relpath.
+ # Check both existing and nonexistent targets.
+ # Check targets both within and outside the source repository.
+ symlinks = [
+ ('in_repos_file', 'format'),
+ ('in_repos_dir', 'conf'),
+ ('in_repos_missing', 'missing'),
+ ('external_file', os.path.join('..', '..', '..', external_file_path)),
+ ('external_dir', os.path.join('..', '..', '..', external_dir_path)),
+ ('external_missing', os.path.join('..', '..', '..', external_missing_path)),
+ ]
+
+ # Create symlinks within the repository directory.
+ for name, target_relpath in symlinks:
+ target_path = os.path.join(original_repo, target_relpath)
+ target_abspath = os.path.abspath(target_path)
+
+ # Create two symlinks to each target - one relative, one absolute.
+ symlink_path = os.path.join(original_repo, name)
+ os.symlink(target_relpath, symlink_path + '_rel')
+ os.symlink(target_abspath, symlink_path + '_abs')
+
+ svntest.actions.run_and_verify_svnadmin(
+ None, [],
+ "hotcopy", original_repo, hotcopy_repo)
+
+ # Check if the symlinks were copied correctly.
+ for name, target_relpath in symlinks:
+ target_path = os.path.join(original_repo, target_relpath)
+ target_abspath = os.path.abspath(target_path)
+
+ # Check two symlinks to each target - one relative, one absolute.
+ symlink_path = os.path.join(hotcopy_repo, name)
+ if os.readlink(symlink_path + '_rel') != target_relpath:
+ raise svntest.Failure
+ if os.readlink(symlink_path + '_abs') != target_abspath:
+ raise svntest.Failure
+
+def load_bad_props(sbox):
+ "svnadmin load with invalid svn: props"
+
+ dump_str = b"""SVN-fs-dump-format-version: 2
+
+UUID: dc40867b-38f6-0310-9f5f-f81aa277e06f
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-05-03T19:09:41.129900Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 99
+Content-length: 99
+
+K 7
+svn:log
+V 3
+\n\r\n
+K 10
+svn:author
+V 2
+pl
+K 8
+svn:date
+V 27
+2005-05-03T19:10:19.975578Z
+PROPS-END
+
+Node-path: file
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 5
+Text-content-md5: e1cbb0c3879af8347246f12c559a86b5
+Content-length: 15
+
+PROPS-END
+text
+
+
+"""
+ sbox.build(empty=True)
+
+ # Try to load the dumpstream, expecting a failure (because of mixed EOLs).
+ exp_err = svntest.verify.RegexListOutput(['svnadmin: E125005',
+ 'svnadmin: E125005',
+ 'svnadmin: E125017'],
+ match_all=False)
+ load_and_verify_dumpstream(sbox, [], exp_err, dumpfile_revisions,
+ False, dump_str, '--ignore-uuid')
+
+ # Now try it again bypassing prop validation. (This interface takes
+ # care of the removal and recreation of the original repository.)
+ svntest.actions.load_repo(sbox, dump_str=dump_str,
+ bypass_prop_validation=True)
+ # Getting the property should fail.
+ svntest.actions.run_and_verify_svn(None, 'svn: E135000: ',
+ 'pg', 'svn:log', '--revprop', '-r1',
+ sbox.repo_url)
+
+ # Now try it again with prop normalization.
+ svntest.actions.load_repo(sbox, dump_str=dump_str,
+ bypass_prop_validation=False,
+ normalize_props=True)
+ # We should get the expected property value.
+ exit_code, output, _ = svntest.main.run_svn(None, 'pg', 'svn:log',
+ '--revprop', '-r1',
+ '--no-newline',
+ sbox.repo_url)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+ if output != ['\n', '\n']:
+ raise svntest.Failure("Unexpected property value %s" % output)
+
+# This test intentionally corrupts a revision and assumes an FSFS
+# repository. If you can make it work with BDB please do so.
+# However, the verification triggered by this test is in the repos layer
+# so it will trigger with either backend anyway.
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipUnless(svntest.main.server_enforces_UTF8_fspaths_in_verify)
+def verify_non_utf8_paths(sbox):
+ "svnadmin verify with non-UTF-8 paths"
+
+ if svntest.main.options.fsfs_version is not None and \
+ svntest.main.options.fsfs_version not in [4, 6]:
+ raise svntest.Skip("Unsupported prepackaged repository version")
+
+ dumpfile = clean_dumpfile()
+
+ # Corruption only possible in physically addressed revisions created
+ # with pre-1.6 servers.
+ sbox.build(empty=True,
+ minor_version=min(svntest.main.options.server_minor_version,8))
+
+ # Load the dumpstream
+ load_and_verify_dumpstream(sbox, [], [], dumpfile_revisions, False,
+ dumpfile, '--ignore-uuid')
+
+ # Replace the path 'A' in revision 1 with a non-UTF-8 sequence.
+ # This has been observed in repositories in the wild, though Subversion
+ # 1.6 and greater should prevent such filenames from entering the repository.
+ path1 = os.path.join(sbox.repo_dir, "db", "revs", "0", "1")
+ path_new = os.path.join(sbox.repo_dir, "db", "revs", "0", "1.new")
+ fp1 = open(path1, 'rb')
+ fp_new = open(path_new, 'wb')
+ for line in fp1.readlines():
+ if line == b"A\n":
+ # replace 'A' with a latin1 character -- the new path is not valid UTF-8
+ fp_new.write(b"\xE6\n")
+ elif line == b"text: 1 340 32 32 a6be7b4cf075fd39e6a99eb69a31232b\n":
+ # phys, PLAIN directories: fix up the representation checksum
+ fp_new.write(b"text: 1 340 32 32 f2e93e73272cac0f18fccf16f224eb93\n")
+ elif line == b"text: 1 340 44 32 a6be7b4cf075fd39e6a99eb69a31232b\n":
+ # phys, deltified directories: fix up the representation checksum
+ fp_new.write(b"text: 1 340 44 32 f2e93e73272cac0f18fccf16f224eb93\n")
+ elif line == b"cpath: /A\n":
+ # also fix up the 'created path' field
+ fp_new.write(b"cpath: /\xE6\n")
+ elif line == b"_0.0.t0-0 add-file true true /A\n":
+ # and another occurrance
+ fp_new.write(b"_0.0.t0-0 add-file true true /\xE6\n")
+ else:
+ fp_new.write(line)
+ fp1.close()
+ fp_new.close()
+ os.remove(path1)
+ os.rename(path_new, path1)
+
+ # Verify the repository, expecting failure
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+ svntest.verify.verify_outputs(
+ "Unexpected error while running 'svnadmin verify'.",
+ [], errput, None, ".*Path '.*' is not in UTF-8.*")
+
+ # Make sure the repository can still be dumped so that the
+ # encoding problem can be fixed in a dump/edit/load cycle.
+ expected_stderr = [
+ "* Dumped revision 0.\n",
+ "WARNING 0x0002: E160005: "
+ "While validating fspath '?\\E6': "
+ "Path '?\\E6' is not in UTF-8"
+ "\n",
+ "* Dumped revision 1.\n",
+ ]
+ exit_code, output, errput = svntest.main.run_svnadmin("dump", sbox.repo_dir)
+ if svntest.verify.compare_and_display_lines(
+ "Output of 'svnadmin dump' is unexpected.",
+ 'STDERR', expected_stderr, errput):
+ raise svntest.Failure
+
+def test_lslocks_and_rmlocks(sbox):
+ "test 'svnadmin lslocks' and 'svnadmin rmlocks'"
+
+ sbox.build(create_wc=False)
+ iota_url = sbox.repo_url + '/iota'
+ lambda_url = sbox.repo_url + '/A/B/lambda'
+
+ exit_code, output, errput = svntest.main.run_svnadmin("lslocks",
+ sbox.repo_dir)
+
+ if exit_code or errput or output:
+ raise svntest.Failure("Error: 'lslocks' failed")
+
+ expected_output = svntest.verify.UnorderedRegexListOutput(
+ ["'.*lambda' locked by user 'jrandom'.\n",
+ "'.*iota' locked by user 'jrandom'.\n"])
+
+ # Lock iota and A/B/lambda using svn client
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], "lock", "-m", "Locking files",
+ iota_url, lambda_url)
+
+ def expected_output_list(path):
+ return [
+ "Path: " + path,
+ "UUID Token: opaquelocktoken",
+ "Owner: jrandom",
+ "Created:",
+ "Expires:",
+ "Comment \(1 line\):",
+ "Locking files",
+ "\n", # empty line
+ ]
+
+ # List all locks
+ exit_code, output, errput = svntest.main.run_svnadmin("lslocks",
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ expected_output = svntest.verify.UnorderedRegexListOutput(
+ expected_output_list('/A/B/lambda') +
+ expected_output_list('/iota'))
+ svntest.verify.compare_and_display_lines('lslocks output mismatch',
+ 'output',
+ expected_output, output)
+
+ # List lock in path /A
+ exit_code, output, errput = svntest.main.run_svnadmin("lslocks",
+ sbox.repo_dir,
+ "A")
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ expected_output = svntest.verify.RegexListOutput(
+ expected_output_list('/A/B/lambda'))
+ svntest.verify.compare_and_display_lines('lslocks output mismatch',
+ 'output',
+ expected_output, output)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+
+ # Remove locks
+ exit_code, output, errput = svntest.main.run_svnadmin("rmlocks",
+ sbox.repo_dir,
+ "iota",
+ "A/B/lambda")
+ expected_output = UnorderedOutput(["Removed lock on '/iota'.\n",
+ "Removed lock on '/A/B/lambda'.\n"])
+
+ svntest.verify.verify_outputs(
+ "Unexpected output while running 'svnadmin rmlocks'.",
+ output, [], expected_output, None)
+
+#----------------------------------------------------------------------
+@Issue(3734)
+def load_ranges(sbox):
+ "'svnadmin load --revision X:Y'"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3734. ##
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ dumplines = svntest.actions.load_dumpfile(dumpfile_location)
+
+ # Load our dumpfile, 2 revisions at a time, verifying that we have
+ # the correct youngest revision after each load.
+ load_dumpstream(sbox, dumplines, '-r0:2')
+ svntest.actions.run_and_verify_svnlook(['2\n'],
+ None, 'youngest', sbox.repo_dir)
+ load_dumpstream(sbox, dumplines, '-r3:4')
+ svntest.actions.run_and_verify_svnlook(['4\n'],
+ None, 'youngest', sbox.repo_dir)
+ load_dumpstream(sbox, dumplines, '-r5:6')
+ svntest.actions.run_and_verify_svnlook(['6\n'],
+ None, 'youngest', sbox.repo_dir)
+
+ # There are ordering differences in the property blocks.
+ if (svntest.main.options.server_minor_version < 6):
+ temp = []
+
+ for line in dumplines:
+ if not "Text-content-sha1:" in line:
+ temp.append(line)
+
+ expected_dump = UnorderedOutput(temp)
+ else:
+ expected_dump = UnorderedOutput(dumplines)
+
+ new_dumpdata = svntest.actions.run_and_verify_dump(sbox.repo_dir)
+ svntest.verify.compare_and_display_lines("Dump files", "DUMP",
+ expected_dump, new_dumpdata)
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def hotcopy_incremental(sbox):
+ "'svnadmin hotcopy --incremental PATH .'"
+ sbox.build()
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ os.mkdir(backup_dir)
+ cwd = os.getcwd()
+
+ for i in [1, 2, 3]:
+ os.chdir(backup_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ None, [],
+ "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.')
+
+ os.chdir(cwd)
+
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+
+ if i < 3:
+ sbox.simple_mkdir("newdir-%i" % i)
+ sbox.simple_commit()
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipUnless(svntest.main.fs_has_pack)
+def hotcopy_incremental_packed(sbox):
+ "'svnadmin hotcopy --incremental' with packing"
+
+ # Configure two files per shard to trigger packing.
+ sbox.build()
+ patch_format(sbox.repo_dir, shard_size=2)
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ os.mkdir(backup_dir)
+ cwd = os.getcwd()
+
+ # Pack revisions 0 and 1 if not already packed.
+ if not (svntest.main.is_fs_type_fsfs and svntest.main.options.fsfs_packing
+ and svntest.main.options.fsfs_sharding == 2):
+ svntest.actions.run_and_verify_svnadmin(
+ ['Packing revisions in shard 0...done.\n'], [], "pack",
+ os.path.join(cwd, sbox.repo_dir))
+
+ # Commit 5 more revs, hotcopy and pack after each commit.
+ for i in [1, 2, 3, 4, 5]:
+ os.chdir(backup_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ None, [],
+ "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.')
+
+ os.chdir(cwd)
+
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+
+ if i < 5:
+ sbox.simple_mkdir("newdir-%i" % i)
+ sbox.simple_commit()
+ if (svntest.main.is_fs_type_fsfs and not svntest.main.options.fsfs_packing
+ and not i % 2):
+ expected_output = ['Packing revisions in shard %d...done.\n' % (i/2)]
+ else:
+ expected_output = []
+ svntest.actions.run_and_verify_svnadmin(
+ expected_output, [], "pack", os.path.join(cwd, sbox.repo_dir))
+
+
+def locking(sbox):
+ "svnadmin lock tests"
+ sbox.build(create_wc=False)
+
+ comment_path = os.path.join(svntest.main.temp_dir, "comment")
+ svntest.main.file_write(comment_path, "dummy comment")
+
+ invalid_comment_path = os.path.join(svntest.main.temp_dir, "invalid_comment")
+ svntest.main.file_write(invalid_comment_path, "character is invalid")
+
+ # Test illegal character in comment file.
+ expected_error = ".*svnadmin: E130004:.*"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ invalid_comment_path)
+
+ # Test locking path with --bypass-hooks
+ expected_output = "'/iota' locked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path,
+ "--bypass-hooks")
+
+ # Remove lock
+ svntest.actions.run_and_verify_svnadmin(None,
+ None, "rmlocks",
+ sbox.repo_dir, "iota")
+
+ # Test locking path without --bypass-hooks
+ expected_output = "'/iota' locked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Test locking already locked path.
+ expected_error = ".*svnadmin: E160035:.*"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Test locking non-existent path.
+ expected_error = ".*svnadmin: E160013:.*"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "non-existent", "jrandom",
+ comment_path)
+
+ # Test locking a path while specifying a lock token.
+ expected_output = "'/A/D/G/rho' locked by user 'jrandom'."
+ lock_token = "opaquelocktoken:01234567-89ab-cdef-89ab-cdef01234567"
+ svntest.actions.run_and_verify_svnadmin(expected_output,
+ None, "lock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ comment_path, lock_token)
+
+ # Test unlocking a path, but provide the wrong lock token.
+ expected_error = ".*svnadmin: E160040:.*"
+ wrong_lock_token = "opaquelocktoken:12345670-9ab8-defc-9ab8-def01234567c"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "unlock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ wrong_lock_token)
+
+ # Test unlocking the path again, but this time provide the correct
+ # lock token.
+ expected_output = "'/A/D/G/rho' unlocked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(expected_output,
+ None, "unlock",
+ sbox.repo_dir,
+ "A/D/G/rho", "jrandom",
+ lock_token)
+
+ # Install lock/unlock prevention hooks.
+ hook_path = svntest.main.get_pre_lock_hook_path(sbox.repo_dir)
+ svntest.main.create_python_hook_script(hook_path, 'import sys; sys.exit(1)')
+ hook_path = svntest.main.get_pre_unlock_hook_path(sbox.repo_dir)
+ svntest.main.create_python_hook_script(hook_path, 'import sys; sys.exit(1)')
+
+ # Test locking a path. Don't use --bypass-hooks, though, as we wish
+ # to verify that hook script is really getting executed.
+ expected_error = ".*svnadmin: E165001:.*"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "lock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ comment_path)
+
+ # Fetch the lock token for our remaining locked path. (We didn't
+ # explicitly set it, so it will vary from test run to test run.)
+ exit_code, output, errput = svntest.main.run_svnadmin("lslocks",
+ sbox.repo_dir,
+ "iota")
+ iota_token = None
+ for line in output:
+ if line.startswith("UUID Token: opaquelocktoken:"):
+ iota_token = line[12:].rstrip()
+ break
+ if iota_token is None:
+ raise svntest.Failure("Unable to lookup lock token for 'iota'")
+
+ # Try to unlock a path while providing the correct lock token but
+ # with a preventative hook in place.
+ expected_error = ".*svnadmin: E165001:.*"
+ svntest.actions.run_and_verify_svnadmin(None,
+ expected_error, "unlock",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ iota_token)
+
+ # Finally, use --bypass-hooks to unlock the path (again using the
+ # correct lock token).
+ expected_output = "'/iota' unlocked by user 'jrandom'."
+ svntest.actions.run_and_verify_svnadmin(expected_output,
+ None, "unlock",
+ "--bypass-hooks",
+ sbox.repo_dir,
+ "iota", "jrandom",
+ iota_token)
+
+
+@SkipUnless(svntest.main.is_threaded_python)
+@Issue(4129)
+def mergeinfo_race(sbox):
+ "concurrent mergeinfo commits invalidate pred-count"
+ sbox.build()
+
+ # This test exercises two commit-time race condition bugs:
+ #
+ # (a) metadata corruption when concurrent commits change svn:mergeinfo (issue #4129)
+ # (b) false positive SVN_ERR_FS_CONFLICT error with httpv1 commits
+ # https://mail-archives.apache.org/mod_mbox/subversion-dev/201507.mbox/%3C20150731234536.GA5395@tarsus.local2%3E
+ #
+ # Both bugs are timing-dependent and might not reproduce 100% of the time.
+
+ wc_dir = sbox.wc_dir
+ wc2_dir = sbox.add_wc_path('2')
+
+ ## Create wc2.
+ svntest.main.run_svn(None, 'checkout', '-q', sbox.repo_url, wc2_dir)
+
+ ## Some random edits.
+ svntest.main.run_svn(None, 'mkdir', sbox.ospath('d1', wc_dir))
+ svntest.main.run_svn(None, 'mkdir', sbox.ospath('d2', wc2_dir))
+
+ ## Set random mergeinfo properties.
+ svntest.main.run_svn(None, 'ps', 'svn:mergeinfo', '/P:42', sbox.ospath('A', wc_dir))
+ svntest.main.run_svn(None, 'ps', 'svn:mergeinfo', '/Q:42', sbox.ospath('iota', wc2_dir))
+
+ def makethread(some_wc_dir):
+ def worker():
+ svntest.main.run_svn(None, 'commit', '-mm', some_wc_dir)
+ return worker
+
+ t1 = threading.Thread(None, makethread(wc_dir))
+ t2 = threading.Thread(None, makethread(wc2_dir))
+
+ # t2 will trigger the issue #4129 sanity check in fs_fs.c
+ t1.start(); t2.start()
+
+ t1.join(); t2.join()
+
+ # Crude attempt to make sure everything worked.
+ # TODO: better way to catch exceptions in the thread
+ if svntest.actions.run_and_parse_info(sbox.repo_url)[0]['Revision'] != '3':
+ raise svntest.Failure("one or both commits failed")
+
+
+@Issue(4213)
+@Skip(svntest.main.is_fs_type_fsx)
+def recover_old_empty(sbox):
+ "recover empty --compatible-version=1.3"
+ sbox.build(create_wc=False, empty=True, minor_version=3)
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ "recover", sbox.repo_dir)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def verify_keep_going(sbox):
+ "svnadmin verify --keep-going test"
+
+ # No support for modifying pack files
+ if svntest.main.options.fsfs_packing:
+ raise svntest.Skip('fsfs packing set')
+
+ sbox.build(create_wc = False)
+ repo_url = sbox.repo_url
+ B_url = sbox.repo_url + '/B'
+ C_url = sbox.repo_url + '/C'
+
+ # Create A/B/E/bravo in r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ B_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ C_url)
+
+ r2 = fsfs_file(sbox.repo_dir, 'revs', '2')
+ fp = open(r2, 'r+b')
+ fp.write(b"inserting junk to corrupt the rev")
+ fp.close()
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ "--keep-going",
+ sbox.repo_dir)
+
+ exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.",
+ ".*Verified revision 1.",
+ ".*",
+ ".*Summary.*",
+ ".*r2: E160004:.*",
+ ".*r2: E160004:.*",
+ ".*r3: E160004:.*",
+ ".*r3: E160004:.*"])
+
+ if (svntest.main.fs_has_rep_sharing()):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*",
+ ".*Error verifying revision 3.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*",
+ "svnadmin: E205012:.*"], False)
+
+ if (svntest.main.is_fs_log_addressing()):
+ exp_err.insert(0, ".*Error verifying repository metadata.")
+ exp_err.insert(1, "svnadmin: E160004:.*")
+
+ if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+
+ if (svntest.main.is_fs_log_addressing()):
+ exp_out = svntest.verify.RegexListOutput([".*Verifying metadata at revision 0"])
+ else:
+ exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.",
+ ".*Verified revision 1."])
+ if (svntest.main.fs_has_rep_sharing()):
+ exp_out.insert(0, ".*Verifying repository metadata.*")
+
+ if (svntest.main.is_fs_log_addressing()):
+ exp_err = svntest.verify.RegexListOutput([
+ ".*Error verifying repository metadata.",
+ "svnadmin: E160004:.*"], False)
+ else:
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*"], False)
+
+ if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ "--quiet",
+ sbox.repo_dir)
+
+ if (svntest.main.is_fs_log_addressing()):
+ exp_err = svntest.verify.RegexListOutput([
+ ".*Error verifying repository metadata.",
+ "svnadmin: E160004:.*"], False)
+ else:
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*"], False)
+
+ if svntest.verify.verify_outputs("Output of 'svnadmin verify' is unexpected.",
+ None, errput, None, exp_err):
+ raise svntest.Failure
+
+ # Don't leave a corrupt repository
+ svntest.main.safe_rmtree(sbox.repo_dir, True)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def verify_keep_going_quiet(sbox):
+ "svnadmin verify --keep-going --quiet test"
+
+ # No support for modifying pack files
+ if svntest.main.options.fsfs_packing:
+ raise svntest.Skip('fsfs packing set')
+
+ sbox.build(create_wc = False)
+ repo_url = sbox.repo_url
+ B_url = sbox.repo_url + '/B'
+ C_url = sbox.repo_url + '/C'
+
+ # Create A/B/E/bravo in r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ B_url)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ C_url)
+
+ r2 = fsfs_file(sbox.repo_dir, 'revs', '2')
+ fp = open(r2, 'r+b')
+ fp.write(b"inserting junk to corrupt the rev")
+ fp.close()
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ "--keep-going",
+ "--quiet",
+ sbox.repo_dir)
+
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*",
+ ".*Error verifying revision 3.",
+ "svnadmin: E160004:.*",
+ "svnadmin: E160004:.*",
+ "svnadmin: E205012:.*"], False)
+
+ # Insert another expected error from checksum verification
+ if (svntest.main.is_fs_log_addressing()):
+ exp_err.insert(0, ".*Error verifying repository metadata.")
+ exp_err.insert(1, "svnadmin: E160004:.*")
+
+ if svntest.verify.verify_outputs(
+ "Unexpected error while running 'svnadmin verify'.",
+ output, errput, None, exp_err):
+ raise svntest.Failure
+
+ # Don't leave a corrupt repository
+ svntest.main.safe_rmtree(sbox.repo_dir, True)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def verify_invalid_path_changes(sbox):
+ "detect invalid changed path list entries"
+
+ # No support for modifying pack files
+ if svntest.main.options.fsfs_packing:
+ raise svntest.Skip('fsfs packing set')
+
+ sbox.build(create_wc = False)
+ repo_url = sbox.repo_url
+
+ # Create a number of revisions each adding a single path
+ for r in range(2,20):
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log_msg',
+ sbox.repo_url + '/B' + str(r))
+
+ # modify every other revision to make sure that errors are not simply
+ # "carried over" but that all corrupts we get detected independently
+
+ # add existing node
+ set_changed_path_list(sbox, 2,
+ b"_0.0.t1-1 add-dir false false /A\n\n")
+
+ # add into non-existent parent
+ set_changed_path_list(sbox, 4,
+ b"_0.0.t3-2 add-dir false false /C/X\n\n")
+
+ # del non-existent node
+ set_changed_path_list(sbox, 6,
+ b"_0.0.t5-2 delete-dir false false /C\n\n")
+
+ # del existent node of the wrong kind
+ #
+ # THIS WILL NOT BE DETECTED
+ # since dump mechanism and file don't care about the types of deleted nodes
+ set_changed_path_list(sbox, 8,
+ b"_0.0.t7-2 delete-file false false /B3\n\n")
+
+ # copy from non-existent node
+ set_changed_path_list(sbox, 10,
+ b"_0.0.t9-2 add-dir false false /B10\n6 /B8\n")
+
+ # copy from existing node of the wrong kind
+ set_changed_path_list(sbox, 12,
+ b"_0.0.t11-2 add-file false false /B12\n9 /B8\n")
+
+ # modify non-existent node
+ set_changed_path_list(sbox, 14,
+ b"_0.0.t13-2 modify-file false false /A/D/H/foo\n\n")
+
+ # modify existent node of the wrong kind
+ set_changed_path_list(sbox, 16,
+ b"_0.0.t15-2 modify-file false false /B12\n\n")
+
+ # replace non-existent node
+ set_changed_path_list(sbox, 18,
+ b"_0.0.t17-2 replace-file false false /A/D/H/foo\n\n")
+
+ # find corruptions
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ "--keep-going",
+ sbox.repo_dir)
+
+ # Errors generated by FSFS when CHANGED_PATHS is not forced into emulation
+ exp_out1 = svntest.verify.RegexListOutput([".*Verified revision 0.",
+ ".*Verified revision 1.",
+ ".*Verified revision 3.",
+ ".*Verified revision 5.",
+ ".*Verified revision 7.",
+ ".*Verified revision 8.",
+ ".*Verified revision 9.",
+ ".*Verified revision 11.",
+ ".*Verified revision 13.",
+ ".*Verified revision 15.",
+ ".*Verified revision 17.",
+ ".*Verified revision 19.",
+ ".*",
+ ".*Summary.*",
+ ".*r2: E160020:.*",
+ ".*r2: E160020:.*",
+ ".*r4: E160013:.*",
+ ".*r6: E160013:.*",
+ ".*r6: E160013:.*",
+ ".*r10: E160013:.*",
+ ".*r10: E160013:.*",
+ ".*r12: E145001:.*",
+ ".*r12: E145001:.*",
+ ".*r14: E160013:.*",
+ ".*r14: E160013:.*",
+ ".*r16: E145001:.*",
+ ".*r16: E145001:.*",
+ ".*r18: E160013:.*",
+ ".*r18: E160013:.*"])
+
+ exp_err1 = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160020:.*",
+ "svnadmin: E160020:.*",
+ ".*Error verifying revision 4.",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 6.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 10.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 12.",
+ "svnadmin: E145001:.*",
+ "svnadmin: E145001:.*",
+ ".*Error verifying revision 14.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 16.",
+ "svnadmin: E145001:.*",
+ "svnadmin: E145001:.*",
+ ".*Error verifying revision 18.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E160013:.*",
+ "svnadmin: E205012:.*"], False)
+
+ # If CHANGED_PATHS is emulated, FSFS fails earlier, generating fewer
+ # of the same messages per revision.
+ exp_out2 = svntest.verify.RegexListOutput([".*Verified revision 0.",
+ ".*Verified revision 1.",
+ ".*Verified revision 3.",
+ ".*Verified revision 5.",
+ ".*Verified revision 7.",
+ ".*Verified revision 8.",
+ ".*Verified revision 9.",
+ ".*Verified revision 11.",
+ ".*Verified revision 13.",
+ ".*Verified revision 15.",
+ ".*Verified revision 17.",
+ ".*Verified revision 19.",
+ ".*",
+ ".*Summary.*",
+ ".*r2: E160020:.*",
+ ".*r2: E160020:.*",
+ ".*r4: E160013:.*",
+ ".*r6: E160013:.*",
+ ".*r10: E160013:.*",
+ ".*r10: E160013:.*",
+ ".*r12: E145001:.*",
+ ".*r12: E145001:.*",
+ ".*r14: E160013:.*",
+ ".*r16: E145001:.*",
+ ".*r16: E145001:.*",
+ ".*r18: E160013:.*"])
+
+ exp_err2 = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160020:.*",
+ "svnadmin: E160020:.*",
+ ".*Error verifying revision 4.",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 6.",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 10.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 12.",
+ "svnadmin: E145001:.*",
+ "svnadmin: E145001:.*",
+ ".*Error verifying revision 14.",
+ "svnadmin: E160013:.*",
+ ".*Error verifying revision 16.",
+ "svnadmin: E145001:.*",
+ "svnadmin: E145001:.*",
+ ".*Error verifying revision 18.",
+ "svnadmin: E160013:.*",
+ "svnadmin: E205012:.*"], False)
+
+ # Determine which pattern to use.
+ # Note that index() will throw an exception if the string can't be found.
+ try:
+ rev6_line = errput.index('* Error verifying revision 6.\n');
+ rev10_line = errput.index('* Error verifying revision 10.\n');
+
+ error_count = 0
+ for line in errput[rev6_line+1:rev10_line]:
+ if "svnadmin: E" in line:
+ error_count = error_count + 1
+
+ if error_count == 1:
+ exp_out = exp_out2
+ exp_err = exp_err2
+ else:
+ exp_out = exp_out1
+ exp_err = exp_err1
+ except ValueError:
+ exp_out = exp_out1
+ exp_err = exp_err1
+
+ if (svntest.main.fs_has_rep_sharing()):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+ if svntest.main.options.fsfs_sharding is not None:
+ for x in range(0, 19 / svntest.main.options.fsfs_sharding):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+ if svntest.main.is_fs_log_addressing():
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+
+ if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir)
+
+ exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.",
+ ".*Verified revision 1."])
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160020:.*",
+ "svnadmin: E160020:.*"], False)
+
+ if (svntest.main.fs_has_rep_sharing()):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+ if svntest.main.options.fsfs_sharding is not None:
+ for x in range(0, 19 / svntest.main.options.fsfs_sharding):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+ if svntest.main.is_fs_log_addressing():
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+
+ if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ "--quiet",
+ sbox.repo_dir)
+
+ exp_out = []
+ exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.",
+ "svnadmin: E160020:.*",
+ "svnadmin: E160020:.*"], False)
+
+ if svntest.verify.verify_outputs("Output of 'svnadmin verify' is unexpected.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+ # Don't leave a corrupt repository
+ svntest.main.safe_rmtree(sbox.repo_dir, True)
+
+
+def verify_denormalized_names(sbox):
+ "detect denormalized names and name collisions"
+
+ sbox.build(create_wc=False, empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'normalization_check.dump')
+ load_dumpstream(sbox, svntest.actions.load_dumpfile(dumpfile_location))
+
+ exit_code, output, errput = svntest.main.run_svnadmin(
+ "verify", "--check-normalization", sbox.repo_dir)
+
+ expected_output_regex_list = [
+ ".*Verified revision 0.",
+ ".*Verified revision 1.",
+ ".*Verified revision 2.",
+ ".*Verified revision 3.",
+ # A/{Eacute}/{aring}lpha
+ "WARNING 0x0003: Duplicate representation of path 'A/.*/.*lpha'",
+ ".*Verified revision 4.",
+ ".*Verified revision 5.",
+ # Q/{aring}lpha
+ "WARNING 0x0004: Duplicate representation of path '/Q/.*lpha'"
+ # A/{Eacute}
+ " in svn:mergeinfo property of 'A/.*'",
+ ".*Verified revision 6.",
+ ".*Verified revision 7."]
+
+ # The BDB backend doesn't do global metadata verification.
+ if (svntest.main.fs_has_rep_sharing() and not svntest.main.is_fs_type_bdb()):
+ expected_output_regex_list.insert(0, ".*Verifying repository metadata.*")
+
+ if svntest.main.options.fsfs_sharding is not None:
+ for x in range(0, 7 / svntest.main.options.fsfs_sharding):
+ expected_output_regex_list.insert(0, ".*Verifying.*metadata.*")
+
+ if svntest.main.is_fs_log_addressing():
+ expected_output_regex_list.insert(0, ".* Verifying metadata at revision 0.*")
+
+ exp_out = svntest.verify.RegexListOutput(expected_output_regex_list)
+ exp_err = svntest.verify.ExpectedOutput([])
+
+ svntest.verify.verify_outputs(
+ "Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_recover_old_non_empty(sbox):
+ "fsfs recover non-empty --compatible-version=1.3"
+
+ # Around trunk@1560210, 'svnadmin recover' wrongly errored out
+ # for the --compatible-version=1.3 Greek tree repository:
+ # svnadmin: E200002: Serialized hash missing terminator
+
+ sbox.build(create_wc=False, minor_version=3)
+ svntest.actions.run_and_verify_svnadmin(None, [], "recover",
+ sbox.repo_dir)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_hotcopy_old_non_empty(sbox):
+ "fsfs hotcopy non-empty --compatible-version=1.3"
+
+ # Around trunk@1560210, 'svnadmin hotcopy' wrongly errored out
+ # for the --compatible-version=1.3 Greek tree repository:
+ # svnadmin: E160006: No such revision 1
+
+ sbox.build(create_wc=False, minor_version=3)
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+
+
+def load_ignore_dates(sbox):
+ "svnadmin load --ignore-dates"
+
+ # All revisions in the loaded repository should come after this time.
+ start_time = time.localtime()
+ time.sleep(1)
+
+ sbox.build(create_wc=False, empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ dumpfile_skeleton = svntest.actions.load_dumpfile(dumpfile_location)
+
+ load_dumpstream(sbox, dumpfile_skeleton, '--ignore-dates')
+ svntest.actions.run_and_verify_svnlook(['6\n'],
+ None, 'youngest', sbox.repo_dir)
+ for rev in range(1, 6):
+ exit_code, output, errput = svntest.main.run_svnlook('date', '-r', rev,
+ sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ rev_time = time.strptime(output[0].rstrip()[:19], '%Y-%m-%d %H:%M:%S')
+ if rev_time < start_time:
+ raise svntest.Failure("Revision time for r%d older than load start time\n"
+ " rev_time: %s\n"
+ " start_time: %s"
+ % (rev, str(rev_time), str(start_time)))
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_hotcopy_old_with_id_changes(sbox):
+ "fsfs hotcopy old with node-id and copy-id changes"
+
+ # Around trunk@1573728, running 'svnadmin hotcopy' for the
+ # --compatible-version=1.3 repository with certain node-id and copy-id
+ # changes ended with mismatching db/current in source and destination:
+ #
+ # source: "2 l 1" destination: "2 k 1",
+ # "3 l 2" "3 4 2"
+ # (and so on...)
+ #
+ # We test this case by creating a --compatible-version=1.3 repository
+ # and committing things that result in node-id and copy-id changes.
+ # After every commit, we hotcopy the repository to a new destination
+ # and check whether the source of the backup and the backup itself are
+ # identical. We also maintain a separate --incremental backup, which
+ # is updated and checked after every commit.
+ sbox.build(create_wc=True, minor_version=3)
+
+ inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup')
+
+ # r1 = Initial greek tree sandbox.
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r1')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r2 = Add a new property.
+ sbox.simple_propset('foo', 'bar', 'A/mu')
+ sbox.simple_commit(message='r2')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r2')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r3 = Copy a file.
+ sbox.simple_copy('A/B/E', 'A/B/E1')
+ sbox.simple_commit(message='r3')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r3')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r4 = Remove an existing file ...
+ sbox.simple_rm('A/D/gamma')
+ sbox.simple_commit(message='r4')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r4')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r5 = ...and replace it with a new file here.
+ sbox.simple_add_text("This is the replaced file.\n", 'A/D/gamma')
+ sbox.simple_commit(message='r5')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r5')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r6 = Add an entirely new file.
+ sbox.simple_add_text('This is an entirely new file.\n', 'A/C/mu1')
+ sbox.simple_commit(message='r6')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r6')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+ # r7 = Change the content of the existing file (this changeset does
+ # not bump the next-id and copy-id counters in the repository).
+ sbox.simple_append('A/mu', 'This is change in the existing file.\n')
+ sbox.simple_commit(message='r7')
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-after-r7')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ "--incremental",
+ sbox.repo_dir, inc_backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, backup_dir)
+ check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir)
+
+
+@SkipUnless(svntest.main.fs_has_pack)
+def verify_packed(sbox):
+ "verify packed with small shards"
+
+ # Configure two files per shard to trigger packing.
+ sbox.build()
+ patch_format(sbox.repo_dir, shard_size=2)
+
+ # Play with our greek tree. These changesets fall into two
+ # separate shards with r2 and r3 being in shard 1 ...
+ sbox.simple_append('iota', "Line.\n")
+ sbox.simple_append('A/D/gamma', "Another line.\n")
+ sbox.simple_commit(message='r2')
+ sbox.simple_propset('foo', 'bar', 'iota')
+ sbox.simple_propset('foo', 'baz', 'A/mu')
+ sbox.simple_commit(message='r3')
+
+ # ...and r4 and r5 being in shard 2.
+ sbox.simple_rm('A/C')
+ sbox.simple_copy('A/B/E', 'A/B/E1')
+ sbox.simple_move('A/mu', 'A/B/mu')
+ sbox.simple_commit(message='r4')
+ sbox.simple_propdel('foo', 'A/B/mu')
+ sbox.simple_commit(message='r5')
+
+ if svntest.main.is_fs_type_fsfs and svntest.main.options.fsfs_packing:
+ # With --fsfs-packing, everything is already packed and we
+ # can skip this part.
+ pass
+ else:
+ expected_output = ["Packing revisions in shard 0...done.\n",
+ "Packing revisions in shard 1...done.\n",
+ "Packing revisions in shard 2...done.\n"]
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ "pack", sbox.repo_dir)
+
+ if svntest.main.is_fs_log_addressing():
+ expected_output = ["* Verifying metadata at revision 0 ...\n",
+ "* Verifying metadata at revision 2 ...\n",
+ "* Verifying metadata at revision 4 ...\n",
+ "* Verifying repository metadata ...\n",
+ "* Verified revision 0.\n",
+ "* Verified revision 1.\n",
+ "* Verified revision 2.\n",
+ "* Verified revision 3.\n",
+ "* Verified revision 4.\n",
+ "* Verified revision 5.\n"]
+ else:
+ expected_output = ["* Verifying repository metadata ...\n",
+ "* Verified revision 0.\n",
+ "* Verified revision 1.\n",
+ "* Verified revision 2.\n",
+ "* Verified revision 3.\n",
+ "* Verified revision 4.\n",
+ "* Verified revision 5.\n"]
+
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ "verify", sbox.repo_dir)
+
+# Test that 'svnadmin freeze' is nestable. (For example, this ensures it
+# won't take system-global locks, only repository-scoped ones.)
+#
+# This could be useful to easily freeze a small number of repositories at once.
+#
+# ### We don't actually test that freeze takes a write lock anywhere (not even
+# ### in C tests.)
+def freeze_freeze(sbox):
+ "svnadmin freeze svnadmin freeze (some-cmd)"
+
+ sbox.build(create_wc=False, read_only=True)
+ second_repo_dir, _ = sbox.add_repo_path('backup')
+ svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy",
+ sbox.repo_dir, second_repo_dir)
+
+ if svntest.main.is_fs_type_fsx() or \
+ (svntest.main.is_fs_type_fsfs() and \
+ svntest.main.options.server_minor_version < 9):
+ # FSFS repositories created with --compatible-version=1.8 and less
+ # erroneously share the filesystem data (locks, shared transaction
+ # data, ...) between hotcopy source and destination. This is fixed
+ # for new FS formats, but in order to avoid a deadlock for old formats,
+ # we have to manually assign a new UUID for the hotcopy destination.
+ # As of trunk@1618024, the same applies to FSX repositories.
+ svntest.actions.run_and_verify_svnadmin([], None,
+ 'setuuid', second_repo_dir)
+
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'freeze', '--', sbox.repo_dir,
+ svntest.main.svnadmin_binary, 'freeze', '--', second_repo_dir,
+ sys.executable, '-c', 'True')
+
+ arg_file = sbox.get_tempname()
+ svntest.main.file_write(arg_file,
+ "%s\n%s\n" % (sbox.repo_dir, second_repo_dir))
+
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'freeze', '-F', arg_file, '--',
+ sys.executable, '-c', 'True')
+
+def verify_metadata_only(sbox):
+ "verify metadata only"
+
+ sbox.build(create_wc = False)
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir,
+ "--metadata-only")
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+
+ # Unfortunately, older formats won't test as thoroughly than newer ones
+ # resulting in different progress output. BDB will do a full check but
+ # not produce any output.
+ if svntest.main.is_fs_log_addressing():
+ svntest.verify.compare_and_display_lines(
+ "Unexpected error while running 'svnadmin verify'.",
+ 'STDOUT', ["* Verifying metadata at revision 0 ...\n",
+ "* Verifying repository metadata ...\n"], output)
+ elif svntest.main.fs_has_rep_sharing() \
+ and not svntest.main.is_fs_type_bdb():
+ svntest.verify.compare_and_display_lines(
+ "Unexpected error while running 'svnadmin verify'.",
+ 'STDOUT', ["* Verifying repository metadata ...\n"], output)
+ else:
+ svntest.verify.compare_and_display_lines(
+ "Unexpected error while running 'svnadmin verify'.",
+ 'STDOUT', [], output)
+
+
+@Skip(svntest.main.is_fs_type_bdb)
+def verify_quickly(sbox):
+ "verify quickly using metadata"
+
+ sbox.build(create_wc = False)
+ if svntest.main.is_fs_type_fsfs():
+ rev_file = open(fsfs_file(sbox.repo_dir, 'revs', '1'), 'r+b')
+ else:
+ rev_file = open(fsfs_file(sbox.repo_dir, 'revs', 'r1'), 'r+b')
+
+ # set new contents
+ rev_file.seek(8)
+ rev_file.write(b'#')
+ rev_file.close()
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify",
+ sbox.repo_dir,
+ "--metadata-only")
+
+ # unfortunately, some backends needs to do more checks than other
+ # resulting in different progress output
+ if svntest.main.is_fs_log_addressing():
+ exp_out = svntest.verify.RegexListOutput([])
+ exp_err = svntest.verify.RegexListOutput(["svnadmin: E160004:.*"], False)
+ else:
+ exp_out = svntest.verify.RegexListOutput([])
+ exp_err = svntest.verify.RegexListOutput([])
+
+ if (svntest.main.fs_has_rep_sharing()):
+ exp_out.insert(0, ".*Verifying.*metadata.*")
+ if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.",
+ output, errput, exp_out, exp_err):
+ raise svntest.Failure
+
+ # Don't leave a corrupt repository
+ svntest.main.safe_rmtree(sbox.repo_dir, True)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipUnless(svntest.main.fs_has_pack)
+def fsfs_hotcopy_progress(sbox):
+ "hotcopy progress reporting"
+
+ # Check how 'svnadmin hotcopy' reports progress for non-incremental
+ # and incremental scenarios. The progress output can be affected by
+ # the --fsfs-packing option, so skip the test if that is the case.
+ if svntest.main.options.fsfs_packing:
+ raise svntest.Skip('fsfs packing set')
+
+ # Create an empty repository, configure three files per shard.
+ sbox.build(create_wc=False, empty=True)
+ patch_format(sbox.repo_dir, shard_size=3)
+
+ inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup')
+
+ # Nothing really exciting for the empty repository.
+ expected_full = [
+ "* Copied revision 0.\n"
+ ]
+ expected_incremental = [
+ "* Copied revision 0.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-0')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+ # Commit three revisions. After this step we have a full shard
+ # (r0, r1, r2) and the second shard (r3) with a single revision.
+ for i in range(3):
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir-%i' % i)
+ expected_full = [
+ "* Copied revision 0.\n",
+ "* Copied revision 1.\n",
+ "* Copied revision 2.\n",
+ "* Copied revision 3.\n",
+ ]
+ expected_incremental = [
+ "* Copied revision 1.\n",
+ "* Copied revision 2.\n",
+ "* Copied revision 3.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-1')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+ # Pack everything (r3 is still unpacked) and hotcopy again. In this case,
+ # the --incremental output should track the incoming (r0, r1, r2) pack and
+ # should not mention r3, because it is already a part of the destination
+ # and is *not* a part of the incoming pack.
+ svntest.actions.run_and_verify_svnadmin(None, [], 'pack',
+ sbox.repo_dir)
+ expected_full = [
+ "* Copied revisions from 0 to 2.\n",
+ "* Copied revision 3.\n",
+ ]
+ expected_incremental = [
+ "* Copied revisions from 0 to 2.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-2')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+ # Fill the second shard, pack again, commit several unpacked revisions
+ # on top of it. Rerun the hotcopy and check the progress output.
+ for i in range(4, 6):
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir-%i' % i)
+
+ svntest.actions.run_and_verify_svnadmin(None, [], 'pack',
+ sbox.repo_dir)
+
+ for i in range(6, 8):
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir-%i' % i)
+ expected_full = [
+ "* Copied revisions from 0 to 2.\n",
+ "* Copied revisions from 3 to 5.\n",
+ "* Copied revision 6.\n",
+ "* Copied revision 7.\n",
+ ]
+ expected_incremental = [
+ "* Copied revisions from 3 to 5.\n",
+ "* Copied revision 6.\n",
+ "* Copied revision 7.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-3')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_hotcopy_progress_with_revprop_changes(sbox):
+ "incremental hotcopy progress with changed revprops"
+
+ # The progress output can be affected by the --fsfs-packing
+ # option, so skip the test if that is the case.
+ if svntest.main.options.fsfs_packing:
+ raise svntest.Skip('fsfs packing set')
+
+ # Create an empty repository, commit several revisions and hotcopy it.
+ sbox.build(create_wc=False, empty=True)
+
+ for i in range(6):
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir-%i' % i)
+ expected_output = [
+ "* Copied revision 0.\n",
+ "* Copied revision 1.\n",
+ "* Copied revision 2.\n",
+ "* Copied revision 3.\n",
+ "* Copied revision 4.\n",
+ "* Copied revision 5.\n",
+ "* Copied revision 6.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+
+ # Amend a few log messages in the source, run the --incremental hotcopy.
+ # The progress output should only mention the corresponding revisions.
+ revprop_file = sbox.get_tempname()
+ svntest.main.file_write(revprop_file, "Modified log message.")
+
+ for i in [1, 3, 6]:
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'setrevprop',
+ sbox.repo_dir, '-r', i,
+ 'svn:log', revprop_file)
+ expected_output = [
+ "* Copied revision 1.\n",
+ "* Copied revision 3.\n",
+ "* Copied revision 6.\n",
+ ]
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, backup_dir)
+
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def fsfs_hotcopy_progress_old(sbox):
+ "hotcopy --compatible-version=1.3 progress"
+
+ sbox.build(create_wc=False, empty=True, minor_version=3)
+
+ inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup')
+
+ # Nothing really exciting for the empty repository.
+ expected_full = [
+ "* Copied revision 0.\n"
+ ]
+ expected_incremental = [
+ "* Copied revision 0.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-0')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+ # Commit three revisions, hotcopy and check the progress output.
+ for i in range(3):
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir-%i' % i)
+
+ expected_full = [
+ "* Copied revision 0.\n",
+ "* Copied revision 1.\n",
+ "* Copied revision 2.\n",
+ "* Copied revision 3.\n",
+ ]
+ expected_incremental = [
+ "* Copied revision 1.\n",
+ "* Copied revision 2.\n",
+ "* Copied revision 3.\n",
+ ]
+
+ backup_dir, backup_url = sbox.add_repo_path('backup-1')
+ svntest.actions.run_and_verify_svnadmin(expected_full, [],
+ 'hotcopy',
+ sbox.repo_dir, backup_dir)
+ svntest.actions.run_and_verify_svnadmin(expected_incremental, [],
+ 'hotcopy', '--incremental',
+ sbox.repo_dir, inc_backup_dir)
+
+
+@SkipUnless(svntest.main.fs_has_unique_freeze)
+def freeze_same_uuid(sbox):
+ "freeze multiple repositories with same UUID"
+
+ sbox.build(create_wc=False)
+
+ first_repo_dir, _ = sbox.add_repo_path('first')
+ second_repo_dir, _ = sbox.add_repo_path('second')
+
+ # Test that 'svnadmin freeze A (svnadmin freeze B)' does not deadlock for
+ # new FSFS formats, even if 'A' and 'B' share the same UUID. Create two
+ # repositories by loading the same dump file, ...
+ svntest.main.create_repos(first_repo_dir)
+ svntest.main.create_repos(second_repo_dir)
+
+ dump_path = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ dump_contents = open(dump_path, 'rb').readlines()
+ svntest.actions.run_and_verify_load(first_repo_dir, dump_contents)
+ svntest.actions.run_and_verify_load(second_repo_dir, dump_contents)
+
+ # ...and execute the 'svnadmin freeze -F' command.
+ arg_file = sbox.get_tempname()
+ svntest.main.file_write(arg_file,
+ "%s\n%s\n" % (first_repo_dir, second_repo_dir))
+
+ svntest.actions.run_and_verify_svnadmin(None, None,
+ 'freeze', '-F', arg_file, '--',
+ sys.executable, '-c', 'True')
+
+
+@Skip(svntest.main.is_fs_type_fsx)
+def upgrade(sbox):
+ "upgrade --compatible-version=1.3"
+
+ sbox.build(create_wc=False, minor_version=3)
+ svntest.actions.run_and_verify_svnadmin(None, [], "upgrade",
+ sbox.repo_dir)
+ # Does the repository work after upgrade?
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [], 'mkdir',
+ '-m', svntest.main.make_log_msg(),
+ sbox.repo_url + '/dir')
+
+def load_txdelta(sbox):
+ "exercising svn_txdelta_target on BDB"
+
+ sbox.build(empty=True)
+
+ # This dumpfile produced a BDB repository that generated cheksum
+ # mismatches on read caused by the improper handling of
+ # svn_txdelta_target ops. The bug was fixed by r1640832.
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'load_txdelta.dump.gz')
+ dumpfile = gzip.open(dumpfile_location, "rb").readlines()
+
+ load_dumpstream(sbox, dumpfile)
+
+ # Verify would fail with a checksum mismatch:
+ # * Error verifying revision 14.
+ # svnadmin: E200014: MD5 checksum mismatch on representation 'r':
+ # expected: 5182e8876ed894dc7fe28f6ff5b2fee6
+ # actual: 5121f82875508863ad70daa8244e6947
+
+ exit_code, output, errput = svntest.main.run_svnadmin("verify", sbox.repo_dir)
+ if errput:
+ raise SVNUnexpectedStderr(errput)
+ if svntest.verify.verify_outputs(
+ "Output of 'svnadmin verify' is unexpected.", None, output, None,
+ ".*Verified revision *"):
+ raise svntest.Failure
+
+@Issues(4563)
+def load_no_svndate_r0(sbox):
+ "load without svn:date on r0"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # svn:date exits
+ svntest.actions.run_and_verify_svnlook([' svn:date\n'], [],
+ 'proplist', '--revprop', '-r0',
+ sbox.repo_dir)
+
+ dump_old = [b"SVN-fs-dump-format-version: 2\n", b"\n",
+ b"UUID: bf52886d-358d-4493-a414-944a6e5ad4f5\n", b"\n",
+ b"Revision-number: 0\n",
+ b"Prop-content-length: 10\n",
+ b"Content-length: 10\n", b"\n",
+ b"PROPS-END\n", b"\n"]
+ svntest.actions.run_and_verify_load(sbox.repo_dir, dump_old)
+
+ # svn:date should have been removed
+ svntest.actions.run_and_verify_svnlook([], [],
+ 'proplist', '--revprop', '-r0',
+ sbox.repo_dir)
+
+# This is only supported for FSFS
+# The port to FSX is still pending, BDB won't support it.
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def hotcopy_read_only(sbox):
+ "'svnadmin hotcopy' a read-only source repository"
+ sbox.build()
+ svntest.main.chmod_tree(sbox.repo_dir, 0, svntest.main.S_ALL_WRITE)
+
+ backup_dir, backup_url = sbox.add_repo_path('backup')
+ exit_code, output, errput = svntest.main.run_svnadmin("hotcopy",
+ sbox.repo_dir,
+ backup_dir)
+
+ # r/o repos are hard to clean up. Make it writable again.
+ svntest.main.chmod_tree(sbox.repo_dir, svntest.main.S_ALL_WRITE,
+ svntest.main.S_ALL_WRITE)
+ if errput:
+ logger.warn("Error: hotcopy failed")
+ raise SVNUnexpectedStderr(errput)
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipUnless(svntest.main.fs_has_pack)
+def fsfs_pack_non_sharded(sbox):
+ "'svnadmin pack' on a non-sharded repository"
+
+ # Configure two files per shard to trigger packing.
+ sbox.build(create_wc = False,
+ minor_version = min(svntest.main.options.server_minor_version,3))
+
+ # Skip for pre-cooked sharded repositories
+ if is_sharded(sbox.repo_dir):
+ raise svntest.Skip('sharded pre-cooked repository')
+
+ svntest.actions.run_and_verify_svnadmin(
+ None, [], "upgrade", sbox.repo_dir)
+ svntest.actions.run_and_verify_svnadmin(
+ ['svnadmin: Warning - this repository is not sharded. Packing has no effect.\n'],
+ [], "pack", sbox.repo_dir)
+
+def load_revprops(sbox):
+ "svnadmin load-revprops"
+
+ sbox.build(create_wc=False, empty=True)
+
+ dump_path = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ dump_contents = open(dump_path, 'rb').readlines()
+ load_and_verify_dumpstream(sbox, None, [], None, False, dump_contents)
+
+ svntest.actions.run_and_verify_svnlook(['Initial setup...\n', '\n'],
+ [], 'log', '-r1', sbox.repo_dir)
+
+ # After loading the dump, amend one of the log message in the repository.
+ input_file = sbox.get_tempname()
+ svntest.main.file_write(input_file, 'Modified log message...\n')
+
+ svntest.actions.run_and_verify_svnadmin([], [], 'setlog', '--bypass-hooks',
+ '-r1', sbox.repo_dir, input_file)
+ svntest.actions.run_and_verify_svnlook(['Modified log message...\n', '\n'],
+ [], 'log', '-r1', sbox.repo_dir)
+
+ # Load the same dump, but with 'svnadmin load-revprops'. Doing so should
+ # restore the log message to its original state.
+ svntest.main.run_command_stdin(svntest.main.svnadmin_binary, None, 0,
+ True, dump_contents, 'load-revprops',
+ sbox.repo_dir)
+
+ svntest.actions.run_and_verify_svnlook(['Initial setup...\n', '\n'],
+ [], 'log', '-r1', sbox.repo_dir)
+
+def dump_revprops(sbox):
+ "svnadmin dump-revprops"
+
+ sbox.build(create_wc=False)
+
+ # Dump revprops only.
+ exit_code, dump_contents, errput = \
+ svntest.actions.run_and_verify_svnadmin(None, [], "dump-revprops", "-q",
+ sbox.repo_dir)
+
+ # We expect the dump to contain no path changes
+ for line in dump_contents:
+ if line.find(b"Node-path: ") > -1:
+ logger.warn("Error: path change found in revprops-only dump.")
+ raise svntest.Failure
+
+ # Remember the current log message for r1
+ exit_code, log_msg, errput = \
+ svntest.actions.run_and_verify_svnlook(None, [], 'log', '-r1',
+ sbox.repo_dir)
+
+ # Now, change the log message in the repository.
+ input_file = sbox.get_tempname()
+ svntest.main.file_write(input_file, 'Modified log message...\n')
+
+ svntest.actions.run_and_verify_svnadmin([], [], 'setlog', '--bypass-hooks',
+ '-r1', sbox.repo_dir, input_file)
+ svntest.actions.run_and_verify_svnlook(['Modified log message...\n', '\n'],
+ [], 'log', '-r1', sbox.repo_dir)
+
+ # Load the same dump with 'svnadmin load-revprops'. Doing so should
+ # restore the log message to its original state.
+ svntest.main.run_command_stdin(svntest.main.svnadmin_binary, None, 0,
+ True, dump_contents, 'load-revprops',
+ sbox.repo_dir)
+
+ svntest.actions.run_and_verify_svnlook(log_msg, [], 'log', '-r1',
+ sbox.repo_dir)
+
+@XFail(svntest.main.is_fs_type_fsx)
+@Issue(4598)
+def dump_no_op_change(sbox):
+ "svnadmin dump with no-op changes"
+
+ sbox.build(create_wc=False, empty=True)
+ empty_file = sbox.get_tempname()
+ svntest.main.file_write(empty_file, '')
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', svntest.main.make_log_msg(),
+ 'put', empty_file, 'bar')
+ # Commit a no-op change.
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', svntest.main.make_log_msg(),
+ 'put', empty_file, 'bar')
+ # Dump and load the repository.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ sbox.repo_dir)
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # We expect svn log -v to yield identical results for both original and
+ # reconstructed repositories. This used to fail as described in the
+ # Issue 4598 (https://issues.apache.org/jira/browse/SVN-4598), at least
+ # around r1706415.
+ #
+ # Test svn log -v for r2:
+ _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v',
+ '-r2', sbox.repo_url)
+ found = [True for line in expected if line.find('M /bar\n') != -1]
+ if not found:
+ raise svntest.Failure
+ svntest.actions.run_and_verify_svn(expected, [], 'log', '-v',
+ '-r2', sbox2.repo_url)
+ # Test svn log -v for /bar:
+ _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v',
+ sbox.repo_url + '/bar')
+ found = [True for line in expected if line.find('M /bar\n') != -1]
+ if not found:
+ raise svntest.Failure
+ svntest.actions.run_and_verify_svn(expected, [], 'log', '-v',
+ sbox2.repo_url + '/bar')
+
+@XFail(svntest.main.is_fs_type_bdb)
+@XFail(svntest.main.is_fs_type_fsx)
+@Issue(4623)
+def dump_no_op_prop_change(sbox):
+ "svnadmin dump with no-op property change"
+
+ sbox.build(create_wc=False, empty=True)
+ empty_file = sbox.get_tempname()
+ svntest.main.file_write(empty_file, '')
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', svntest.main.make_log_msg(),
+ 'put', empty_file, 'bar',
+ 'propset', 'pname', 'pval', 'bar')
+ # Commit a no-op property change.
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', sbox.repo_url,
+ '-m', svntest.main.make_log_msg(),
+ 'propset', 'pname', 'pval', 'bar')
+ # Dump and load the repository.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ sbox.repo_dir)
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Test svn log -v for r2:
+ _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v',
+ '-r2', sbox.repo_url)
+ found = [True for line in expected if line.find('M /bar\n') != -1]
+ if not found:
+ raise svntest.Failure
+ svntest.actions.run_and_verify_svn(expected, [], 'log', '-v',
+ '-r2', sbox2.repo_url)
+ # Test svn log -v for /bar:
+ _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v',
+ sbox.repo_url + '/bar')
+ found = [True for line in expected if line.find('M /bar\n') != -1]
+ if not found:
+ raise svntest.Failure
+ svntest.actions.run_and_verify_svn(expected, [], 'log', '-v',
+ sbox2.repo_url + '/bar')
+
+def load_no_flush_to_disk(sbox):
+ "svnadmin load --no-flush-to-disk"
+
+ sbox.build(empty=True)
+
+ # Can't test the "not flushing to disk part", but loading the
+ # dump should work.
+ dump = clean_dumpfile()
+ expected = [
+ svntest.wc.State('', {
+ 'A' : svntest.wc.StateItem(contents="text\n",
+ props={'svn:keywords': 'Id'})
+ })
+ ]
+ load_and_verify_dumpstream(sbox, [], [], expected, True, dump,
+ '--no-flush-to-disk', '--ignore-uuid')
+
+def dump_to_file(sbox):
+ "svnadmin dump --file ARG"
+
+ sbox.build(create_wc=False, empty=False)
+ expected_dump = svntest.actions.run_and_verify_dump(sbox.repo_dir)
+
+ file = sbox.get_tempname()
+ svntest.actions.run_and_verify_svnadmin2([],
+ ["* Dumped revision 0.\n",
+ "* Dumped revision 1.\n"],
+ 0, 'dump', '--file', file,
+ sbox.repo_dir)
+ actual_dump = open(file, 'rb').readlines()
+ svntest.verify.compare_dump_files(None, None, expected_dump, actual_dump)
+
+ # Test that svnadmin dump --file overwrites existing files.
+ file = sbox.get_tempname()
+ svntest.main.file_write(file, '')
+ svntest.actions.run_and_verify_svnadmin2([],
+ ["* Dumped revision 0.\n",
+ "* Dumped revision 1.\n"],
+ 0, 'dump', '--file', file,
+ sbox.repo_dir)
+ actual_dump = open(file, 'rb').readlines()
+ svntest.verify.compare_dump_files(None, None, expected_dump, actual_dump)
+
+def load_from_file(sbox):
+ "svnadmin load --file ARG"
+
+ sbox.build(empty=True)
+
+ file = sbox.get_tempname()
+ open(file, 'wb').writelines(clean_dumpfile())
+ svntest.actions.run_and_verify_svnadmin2(None, [],
+ 0, 'load', '--file', file,
+ '--ignore-uuid', sbox.repo_dir)
+ expected_tree = \
+ svntest.wc.State('', {
+ 'A' : svntest.wc.StateItem(contents="text\n",
+ props={'svn:keywords': 'Id'})
+ })
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'update', sbox.wc_dir)
+ svntest.actions.verify_disk(sbox.wc_dir, expected_tree, check_props=True)
+
+def dump_exclude(sbox):
+ "svnadmin dump with excluded paths"
+
+ sbox.build(create_wc=False)
+
+ # Dump repository with /A/D/H and /A/B/E paths excluded.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--exclude', '/A/D/H',
+ '--exclude', '/A/B/E',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r1\ .*\n',
+ # '/A/D/H' and '/A/B/E' is not added.
+ re.escape('Changed paths:\n'),
+ re.escape(' A /A\n'),
+ re.escape(' A /A/B\n'),
+ re.escape(' A /A/B/F\n'),
+ re.escape(' A /A/B/lambda\n'),
+ re.escape(' A /A/C\n'),
+ re.escape(' A /A/D\n'),
+ re.escape(' A /A/D/G\n'),
+ re.escape(' A /A/D/G/pi\n'),
+ re.escape(' A /A/D/G/rho\n'),
+ re.escape(' A /A/D/G/tau\n'),
+ re.escape(' A /A/D/gamma\n'),
+ re.escape(' A /A/mu\n'),
+ re.escape(' A /iota\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_exclude_copysource(sbox):
+ "svnadmin dump with excluded copysource"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create default repository structure.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branches',
+ sbox.repo_url + '/tags',
+ "-m", "Create repository structure.")
+
+ # Create a branch.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "copy",
+ sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branches/branch1',
+ "-m", "Create branch.")
+
+ # Dump repository with /trunk excluded.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--exclude', '/trunk',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r2\ .*\n',
+ re.escape('Changed paths:\n'),
+ # Simple add, not copy.
+ re.escape(' A /branches/branch1\n'),
+ '-+\\n',
+ 'r1\ .*\n',
+ # '/trunk' is not added.
+ re.escape('Changed paths:\n'),
+ re.escape(' A /branches\n'),
+ re.escape(' A /tags\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_include(sbox):
+ "svnadmin dump with included paths"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create a couple of directories.
+ # Note that we can't use greek tree as it contains only two top-level
+ # nodes. Including non top-level nodes (e.g. '--include /A/B/E') will
+ # produce unloadable dump for now.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/A',
+ sbox.repo_url + '/B',
+ sbox.repo_url + '/C',
+ "-m", "Create folder.")
+
+ # Dump repository with /A and /C paths included.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--include', '/A',
+ '--include', '/C',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r1\ .*\n',
+ # '/B' is not added.
+ re.escape('Changed paths:\n'),
+ re.escape(' A /A\n'),
+ re.escape(' A /C\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_not_include_copysource(sbox):
+ "svnadmin dump with not included copysource"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create default repository structure.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branches',
+ sbox.repo_url + '/tags',
+ "-m", "Create repository structure.")
+
+ # Create a branch.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "copy",
+ sbox.repo_url + '/trunk',
+ sbox.repo_url + '/branches/branch1',
+ "-m", "Create branch.")
+
+ # Dump repository with only /branches included.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--include', '/branches',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r2\ .*\n',
+ re.escape('Changed paths:\n'),
+ # Simple add, not copy.
+ re.escape(' A /branches/branch1\n'),
+ '-+\\n',
+ 'r1\ .*\n',
+ # Only '/branches' is added in r1.
+ re.escape('Changed paths:\n'),
+ re.escape(' A /branches\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_exclude_by_pattern(sbox):
+ "svnadmin dump with paths excluded by pattern"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create a couple of directories.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/aaa',
+ sbox.repo_url + '/aab',
+ sbox.repo_url + '/aac',
+ sbox.repo_url + '/bbc',
+ "-m", "Create repository structure.")
+
+ # Dump with paths excluded by pattern.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--exclude', '/aa?',
+ '--pattern',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r1\ .*\n',
+ re.escape('Changed paths:\n'),
+ # Only '/bbc' is added in r1.
+ re.escape(' A /bbc\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_include_by_pattern(sbox):
+ "svnadmin dump with paths included by pattern"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create a couple of directories.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/aaa',
+ sbox.repo_url + '/aab',
+ sbox.repo_url + '/aac',
+ sbox.repo_url + '/bbc',
+ "-m", "Create repository structure.")
+
+ # Dump with paths included by pattern.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--include', '/aa?',
+ '--pattern',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r1\ .*\n',
+ # '/bbc' is not added.
+ re.escape('Changed paths:\n'),
+ re.escape(' A /aaa\n'),
+ re.escape(' A /aab\n'),
+ re.escape(' A /aac\n'),
+ '-+\\n'
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', '-q', sbox2.repo_url)
+
+def dump_exclude_all_rev_changes(sbox):
+ "svnadmin dump with all revision changes excluded"
+
+ sbox.build(create_wc=False, empty=True)
+
+ # Create a couple of directories (r1).
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/r1a',
+ sbox.repo_url + '/r1b',
+ sbox.repo_url + '/r1c',
+ "-m", "Revision 1.")
+
+ # Create a couple of directories (r2).
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/r2a',
+ sbox.repo_url + '/r2b',
+ sbox.repo_url + '/r2c',
+ "-m", "Revision 2.")
+
+ # Create a couple of directories (r3).
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "mkdir",
+ sbox.repo_url + '/r3a',
+ sbox.repo_url + '/r3b',
+ sbox.repo_url + '/r3c',
+ "-m", "Revision 3.")
+
+ # Dump with paths excluded by pattern.
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ '--exclude', '/r2?',
+ '--pattern',
+ sbox.repo_dir)
+
+ # Load repository from dump.
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump)
+
+ # Check log. Revision properties ('svn:log' etc.) should be empty for r2.
+ expected_output = svntest.verify.RegexListOutput([
+ '-+\\n',
+ 'r3\ |\ jrandom\ |\ .*\ |\ 1\ line\\n',
+ re.escape('Changed paths:'),
+ re.escape(' A /r3a'),
+ re.escape(' A /r3b'),
+ re.escape(' A /r3c'),
+ '',
+ re.escape('Revision 3.'),
+ '-+\\n',
+ re.escape('r2 | (no author) | (no date) | 1 line'),
+ '',
+ '',
+ '-+\\n',
+ 'r1\ |\ jrandom\ |\ .*\ |\ 1\ line\\n',
+ re.escape('Changed paths:'),
+ re.escape(' A /r1a'),
+ re.escape(' A /r1b'),
+ re.escape(' A /r1c'),
+ '',
+ re.escape('Revision 1.'),
+ '-+\\n',
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-v', sbox2.repo_url)
+
+def dump_invalid_filtering_option(sbox):
+ "dump with --include and --exclude simultaneously"
+
+ sbox.build(create_wc=False, empty=False)
+
+ # Attempt to dump repository with '--include' and '--exclude' options
+ # specified simultaneously.
+ expected_error = ".*: '--exclude' and '--include' options cannot be used " \
+ "simultaneously"
+ svntest.actions.run_and_verify_svnadmin(None, expected_error,
+ 'dump', '-q',
+ '--exclude', '/A/D/H',
+ '--include', '/A/B/E',
+ sbox.repo_dir)
+
+@Issue(4725)
+def load_issue4725(sbox):
+ """load that triggers issue 4725"""
+
+ sbox.build(empty=True)
+
+ sbox.simple_mkdir('subversion')
+ sbox.simple_commit()
+ sbox.simple_mkdir('subversion/trunk')
+ sbox.simple_mkdir('subversion/branches')
+ sbox.simple_commit()
+ sbox.simple_mkdir('subversion/trunk/src')
+ sbox.simple_commit()
+
+ _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [],
+ 'dump', '-q',
+ sbox.repo_dir)
+
+ sbox2 = sbox.clone_dependent()
+ sbox2.build(create_wc=False, empty=True)
+ load_and_verify_dumpstream(sbox2, None, [], None, False, dump, '-M100')
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ extra_headers,
+ extra_blockcontent,
+ inconsistent_headers,
+ empty_date,
+ dump_copied_dir,
+ dump_move_dir_modify_child,
+ dump_quiet,
+ hotcopy_dot,
+ hotcopy_format,
+ setrevprop,
+ verify_windows_paths_in_repos,
+ verify_incremental_fsfs,
+ fsfs_recover_db_current,
+ fsfs_recover_old_db_current,
+ load_with_parent_dir,
+ set_uuid,
+ reflect_dropped_renumbered_revs,
+ fsfs_recover_handle_missing_revs_or_revprops_file,
+ create_in_repo_subdir,
+ verify_with_invalid_revprops,
+ dont_drop_valid_mergeinfo_during_incremental_loads,
+ hotcopy_symlink,
+ load_bad_props,
+ verify_non_utf8_paths,
+ test_lslocks_and_rmlocks,
+ load_ranges,
+ hotcopy_incremental,
+ hotcopy_incremental_packed,
+ locking,
+ mergeinfo_race,
+ recover_old_empty,
+ verify_keep_going,
+ verify_keep_going_quiet,
+ verify_invalid_path_changes,
+ verify_denormalized_names,
+ fsfs_recover_old_non_empty,
+ fsfs_hotcopy_old_non_empty,
+ load_ignore_dates,
+ fsfs_hotcopy_old_with_id_changes,
+ verify_packed,
+ freeze_freeze,
+ verify_metadata_only,
+ verify_quickly,
+ fsfs_hotcopy_progress,
+ fsfs_hotcopy_progress_with_revprop_changes,
+ fsfs_hotcopy_progress_old,
+ freeze_same_uuid,
+ upgrade,
+ load_txdelta,
+ load_no_svndate_r0,
+ hotcopy_read_only,
+ fsfs_pack_non_sharded,
+ load_revprops,
+ dump_revprops,
+ dump_no_op_change,
+ dump_no_op_prop_change,
+ load_no_flush_to_disk,
+ dump_to_file,
+ load_from_file,
+ dump_exclude,
+ dump_exclude_copysource,
+ dump_include,
+ dump_not_include_copysource,
+ dump_exclude_by_pattern,
+ dump_include_by_pattern,
+ dump_exclude_all_rev_changes,
+ dump_invalid_filtering_option,
+ load_issue4725,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz b/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz
new file mode 100644
index 0000000..900d357
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz
Binary files differ
diff --git a/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included.dump b/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included.dump
new file mode 100644
index 0000000..911ab84
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included.dump
@@ -0,0 +1,434 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 69b8a793-d874-4ec8-8b96-2ed378223a29
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-11-26T10:18:12.708107Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 117
+Content-length: 117
+
+K 7
+svn:log
+V 14
+Creating trunk
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:12.768640Z
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 16
+commit all files
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:13.169532Z
+PROPS-END
+
+Node-path: trunk/1
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 88c16a56754e0f17a93d269ae74dde9b
+Content-length: 25
+
+PROPS-END
+This is file 1
+
+
+Node-path: trunk/2
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: db06069ef1c9f40986ffa06db4fe8fd7
+Content-length: 25
+
+PROPS-END
+This is file 2
+
+
+Node-path: trunk/3
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 95227e10e2c33771e1c1379b17330c86
+Content-length: 25
+
+PROPS-END
+This is file 3
+
+
+Node-path: trunk/4
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 601120837709ea5ee34dadddf5289113
+Content-length: 25
+
+PROPS-END
+This is file 4
+
+
+Node-path: trunk/5
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 657c74cf3521f7760f9203ac7d9d2cdb
+Content-length: 25
+
+PROPS-END
+This is file 5
+
+
+Revision-number: 3
+Prop-content-length: 121
+Content-length: 121
+
+K 7
+svn:log
+V 18
+commit all newfile
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:14.196863Z
+PROPS-END
+
+Node-path: trunk/newdir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/newdir/newfile1
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fcda1dbcda65ae79081ea438825d53c6
+Content-length: 36
+
+PROPS-END
+This is file 'new file' 1
+
+
+Node-path: trunk/newdir/newfile2
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 19650182d50067647653c23d560e106d
+Content-length: 36
+
+PROPS-END
+This is file 'new file' 2
+
+
+Node-path: trunk/newdir/newfile3
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 879fa1f1f5f8045d78d147b6dedceca0
+Content-length: 36
+
+PROPS-END
+This is file 'new file' 3
+
+
+Node-path: trunk/newdir/newfile4
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: cb35ffdf6115d37cad186931e346fb73
+Content-length: 36
+
+PROPS-END
+This is file 'new file' 4
+
+
+Revision-number: 4
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 15
+Creating branch
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:15.138825Z
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 3
+Node-copyfrom-path: trunk
+Prop-content-length: 34
+Content-length: 34
+
+K 13
+svn:mergeinfo
+V 0
+
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 135
+Content-length: 135
+
+K 7
+svn:log
+V 32
+commit change to a file in trunk
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:16.190751Z
+PROPS-END
+
+Node-path: trunk/1
+Node-kind: file
+Node-action: change
+Text-content-length: 22
+Text-content-md5: 3c8ccd677a521a019dc9eb5cfc41a6e4
+Content-length: 22
+
+This is file modified
+
+
+Revision-number: 6
+Prop-content-length: 142
+Content-length: 142
+
+K 7
+svn:log
+V 39
+commit change to a file in trunk newdir
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:17.143981Z
+PROPS-END
+
+Node-path: trunk/newdir/newfile1
+Node-kind: file
+Node-action: change
+Text-content-length: 26
+Text-content-md5: a60fe71213564df1058cdc62da340ed2
+Content-length: 26
+
+This is newfile1 modified
+
+
+Node-path: trunk/newdir/newfile2
+Node-kind: file
+Node-action: change
+Text-content-length: 26
+Text-content-md5: f677890258bef93d3098f9cabf3552b7
+Content-length: 26
+
+This is newfile2 modified
+
+
+Revision-number: 7
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 16
+Creating branch1
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:19.150199Z
+PROPS-END
+
+Node-path: branch1
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: trunk
+Prop-content-length: 34
+Content-length: 34
+
+K 13
+svn:mergeinfo
+V 0
+
+PROPS-END
+
+
+Revision-number: 8
+Prop-content-length: 104
+Content-length: 104
+
+K 7
+svn:log
+V 2
+ww
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:20.136412Z
+PROPS-END
+
+Node-path: branch/newdir1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 9
+Prop-content-length: 124
+Content-length: 124
+
+K 7
+svn:log
+V 21
+commit change to repo
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-26T10:18:22.167250Z
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 45
+Content-length: 45
+
+K 13
+svn:mergeinfo
+V 10
+/trunk:4-6
+PROPS-END
+
+
+Node-path: branch/1
+Node-kind: file
+Node-action: change
+Text-content-length: 22
+Text-content-md5: 3c8ccd677a521a019dc9eb5cfc41a6e4
+Content-length: 22
+
+This is file modified
+
+
+Node-path: branch/newdir/newfile1
+Node-kind: file
+Node-action: change
+Text-content-length: 26
+Text-content-md5: a60fe71213564df1058cdc62da340ed2
+Content-length: 26
+
+This is newfile1 modified
+
+
+Node-path: branch/newdir/newfile2
+Node-kind: file
+Node-action: change
+Text-content-length: 26
+Text-content-md5: f677890258bef93d3098f9cabf3552b7
+Content-length: 26
+
+This is newfile2 modified
+
+
+Node-path: branch1
+Node-kind: dir
+Node-action: change
+Prop-content-length: 46
+Content-length: 46
+
+K 13
+svn:mergeinfo
+V 11
+/branch:5-8
+PROPS-END
+
+
+Node-path: branch1/newdir1
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 8
+Node-copyfrom-path: branch/newdir1
+
+
diff --git a/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included_full.dump b/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included_full.dump
new file mode 100644
index 0000000..443afe9
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests_data/mergeinfo_included_full.dump
@@ -0,0 +1,713 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 4fdb8097-d6b7-af4b-b818-c79c3d7082dc
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-03-22T14:28:02.578125Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 115
+Content-length: 115
+
+K 7
+svn:log
+V 14
+Initial import
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:27:13.171875Z
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: trunk/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: trunk/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: trunk/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: trunk/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: trunk/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Revision-number: 2
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+Some work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:31:23.109375Z
+PROPS-END
+
+Node-path: trunk/D/gamma
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 3
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+Some work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:31:42.125000Z
+PROPS-END
+
+Node-path: trunk/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 4
+Prop-content-length: 130
+Content-length: 130
+
+K 7
+svn:log
+V 29
+Create branch B1 from trunk@3
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:32:11.421875Z
+PROPS-END
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 3
+Node-copyfrom-path: trunk
+
+
+Revision-number: 5
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:33:01.734375Z
+PROPS-END
+
+Node-path: trunk/B/E/alpha
+Node-action: delete
+
+
+Revision-number: 6
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:34:02.734375Z
+PROPS-END
+
+Node-path: trunk/B/E/new_alpha
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 5
+Node-copyfrom-path: trunk/B/E/beta
+Text-copy-source-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-copy-source-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+
+
+Node-path: trunk/B/E/beta
+Node-action: delete
+
+
+Revision-number: 7
+Prop-content-length: 138
+Content-length: 138
+
+K 7
+svn:log
+V 37
+Create another branch B2 from trunk@6
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:34:28.500000Z
+PROPS-END
+
+Node-path: branches/B2
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: trunk
+
+
+Revision-number: 8
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:38:53.468750Z
+PROPS-END
+
+Node-path: trunk/B/E/new_alpha
+Node-kind: file
+Node-action: change
+Text-content-length: 61
+Text-content-md5: ac5f7c1c890095cafdb4e2fa0ff2680b
+Text-content-sha1: dc90ed6c9f5254772c7b17f5e710a7c342623390
+Content-length: 61
+
+This is the file 'beta'.
+this is the new alpha based on beta
+
+
+Revision-number: 9
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:39:10.625000Z
+PROPS-END
+
+Node-path: trunk/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 10
+Prop-content-length: 135
+Content-length: 135
+
+K 7
+svn:log
+V 34
+Merge r6 from trunk to branches/B1
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:40:49.015625Z
+PROPS-END
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: change
+Prop-content-length: 42
+Content-length: 42
+
+K 13
+svn:mergeinfo
+V 8
+/trunk:6
+PROPS-END
+
+
+Node-path: branches/B1/B/E/new_alpha
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: trunk/B/E/new_alpha
+Text-copy-source-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-copy-source-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+
+
+Node-path: branches/B1/B/E/beta
+Node-action: delete
+
+
+Revision-number: 11
+Prop-content-length: 135
+Content-length: 135
+
+K 7
+svn:log
+V 34
+Merge r9 from trunk to branches/B2
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:53:44.156250Z
+PROPS-END
+
+Node-path: branches/B2
+Node-kind: dir
+Node-action: change
+Prop-content-length: 42
+Content-length: 42
+
+K 13
+svn:mergeinfo
+V 8
+/trunk:9
+PROPS-END
+
+
+Node-path: branches/B2/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 12
+Prop-content-length: 111
+Content-length: 111
+
+K 7
+svn:log
+V 10
+Work on B2
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:55:17.390625Z
+PROPS-END
+
+Node-path: branches/B2/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 10
+Text-content-md5: 7fb893eb43ac0ef015b2b95b88628b8c
+Text-content-sha1: a54c7be3471b59cb5be150b1f679c1d2895b00e0
+Content-length: 10
+
+B2 tweaks
+
+
+Revision-number: 13
+Prop-content-length: 132
+Content-length: 132
+
+K 7
+svn:log
+V 31
+Merge r11 and r12 from B2 to B1
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:55:51.703125Z
+PROPS-END
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: change
+Prop-content-length: 64
+Content-length: 64
+
+K 13
+svn:mergeinfo
+V 29
+/branches/B2:11-12
+/trunk:6,9
+PROPS-END
+
+
+Node-path: branches/B1/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 10
+Text-content-md5: 7fb893eb43ac0ef015b2b95b88628b8c
+Text-content-sha1: a54c7be3471b59cb5be150b1f679c1d2895b00e0
+Content-length: 10
+
+B2 tweaks
+
+
+Revision-number: 14
+Prop-content-length: 143
+Content-length: 143
+
+K 7
+svn:log
+V 42
+Merge r5 from trunk/B/E to branches/B1/B/E
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T15:10:22.234375Z
+PROPS-END
+
+Node-path: branches/B1/B/E
+Node-kind: dir
+Node-action: change
+Prop-content-length: 74
+Content-length: 74
+
+K 13
+svn:mergeinfo
+V 39
+/branches/B2/B/E:11-12
+/trunk/B/E:5-6,9
+PROPS-END
+
+
+Node-path: branches/B1/B/E/alpha
+Node-action: delete
+
+
+Revision-number: 15
+Prop-content-length: 143
+Content-length: 143
+
+K 7
+svn:log
+V 42
+Merge r8 from trunk/B/E to branches/B1/B/E
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T15:00:17.203125Z
+PROPS-END
+
+Node-path: branches/B1/B/E
+Node-kind: dir
+Node-action: change
+Prop-content-length: 76
+Content-length: 76
+
+K 13
+svn:mergeinfo
+V 41
+/branches/B2/B/E:11-12
+/trunk/B/E:5-6,8-9
+PROPS-END
+
+
+Node-path: branches/B1/B/E/new_alpha
+Node-kind: file
+Node-action: change
+Text-content-length: 60
+Text-content-md5: b1738c908160291bb40ef9d1b8c89e82
+Text-content-sha1: ad6df4488978b8e8eade283028ab753791073a76
+Content-length: 60
+
+This is the file 'beta'.
+this is the new alpha based on beta
+
diff --git a/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump b/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump
new file mode 100644
index 0000000..32ae006
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump
@@ -0,0 +1,259 @@
+SVN-fs-dump-format-version: 2
+
+UUID: bf695de5-cd61-4024-8cb3-a12d299c7c62
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2013-11-24T02:29:36.942478Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 126
+Content-length: 126
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T02:34:17.982927Z
+K 7
+svn:log
+V 24
+Denormalized tree import
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/EÌ
+Node-kind: dir
+Node-action: add
+Prop-content-length: 47
+Content-length: 47
+
+K 13
+svn:mergeinfo
+V 12
+/Q/Ã¥lpha:69
+PROPS-END
+
+
+Node-path: A/EÌ/Ã¥lpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/îöta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 128
+Content-length: 128
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T02:34:39.316466Z
+K 7
+svn:log
+V 26
+Modified denormalized file
+PROPS-END
+
+Node-path: A/îöta
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: d2508118d0d39e198d1129d87d692d59
+Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020
+Content-length: 9
+
+modified
+
+
+Revision-number: 3
+Prop-content-length: 126
+Content-length: 126
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T02:35:09.976189Z
+K 7
+svn:log
+V 24
+Modified normalized file
+PROPS-END
+
+Node-path: A/EÌ/Ã¥lpha
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: d2508118d0d39e198d1129d87d692d59
+Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020
+Content-length: 9
+
+modified
+
+
+Revision-number: 4
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T02:36:11.674695Z
+K 7
+svn:log
+V 22
+Created name collision
+PROPS-END
+
+Node-path: A/EÌ/aÌŠlpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 125
+Content-length: 125
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T02:36:52.182891Z
+K 7
+svn:log
+V 23
+Modified colliding file
+PROPS-END
+
+Node-path: A/EÌ/aÌŠlpha
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: d2508118d0d39e198d1129d87d692d59
+Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020
+Content-length: 9
+
+modified
+
+
+Revision-number: 6
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T18:04:43.128158Z
+K 7
+svn:log
+V 25
+Update mergeinfo on A/EÌ
+PROPS-END
+
+Node-path: A/EÌ
+Node-kind: dir
+Node-action: change
+Prop-content-length: 61
+Content-length: 61
+
+K 13
+svn:mergeinfo
+V 26
+/Q/aÌŠlpha:71
+/Q/Ã¥lpha:69
+PROPS-END
+
+
+Revision-number: 7
+Prop-content-length: 130
+Content-length: 130
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2013-11-24T18:04:51.128158Z
+K 7
+svn:log
+V 25
+Update mergeinfo on A/EÌ
+PROPS-END
+
+Node-path: A/EÌ
+Node-kind: dir
+Node-action: change
+Prop-content-length: 64
+Content-length: 64
+
+K 13
+svn:mergeinfo
+V 29
+/Q/aÌŠlpha:71
+/Q/Ã¥lpha:69,71
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnadmin_tests_data/skeleton_repos.dump b/subversion/tests/cmdline/svnadmin_tests_data/skeleton_repos.dump
new file mode 100644
index 0000000..cdadb0d
--- /dev/null
+++ b/subversion/tests/cmdline/svnadmin_tests_data/skeleton_repos.dump
@@ -0,0 +1,207 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 45cfc32e-e360-9043-8e66-614bff171639
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-03-24T16:31:48.968750Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:35:09.828125Z
+PROPS-END
+
+Node-path: Projects
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:36:28.781250Z
+PROPS-END
+
+Node-path: README
+Node-kind: file
+Node-action: add
+Prop-content-length: 40
+Text-content-length: 48
+Text-content-md5: 02d086e41b03058c5f1af6282c1f483f
+Text-content-sha1: cc67e4dd7cd8ca83095c8b95f65b6698b39cb263
+Content-length: 88
+
+K 13
+svn:eol-style
+V 6
+native
+PROPS-END
+Valuable information will go here...
+...someday.
+
+Revision-number: 3
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:37:10.937500Z
+PROPS-END
+
+Node-path: Projects/Project-X
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:37:38.156250Z
+PROPS-END
+
+Node-path: Projects/Project-Y
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:38:11.046875Z
+PROPS-END
+
+Node-path: Projects/Project-Z
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 6
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 17
+Initial setup...
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T16:39:04.062500Z
+PROPS-END
+
+Node-path: Projects/docs
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: Projects/docs/README
+Node-kind: file
+Node-action: add
+Prop-content-length: 40
+Text-content-length: 48
+Text-content-md5: 02d086e41b03058c5f1af6282c1f483f
+Text-content-sha1: cc67e4dd7cd8ca83095c8b95f65b6698b39cb263
+Content-length: 88
+
+K 13
+svn:eol-style
+V 6
+native
+PROPS-END
+Valuable information will go here...
+...someday.
+
diff --git a/subversion/tests/cmdline/svnauthz_tests.py b/subversion/tests/cmdline/svnauthz_tests.py
new file mode 100755
index 0000000..fd1de40
--- /dev/null
+++ b/subversion/tests/cmdline/svnauthz_tests.py
@@ -0,0 +1,926 @@
+#!/usr/bin/env python
+#
+# svnauthz_tests.py: testing the 'svnauthz' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os.path
+import tempfile
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+# Run svnauthz commands on commit
+hook_template = """import sys,os,subprocess
+svnauthz_bin=%s
+
+fp = open(os.path.join(sys.argv[1], 'hooks.log'), 'wb')
+def output_command(fp, cmd, opt):
+ command = [svnauthz_bin, cmd, '-t', sys.argv[2], sys.argv[1]] + opt
+ process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, bufsize=-1)
+ (output, errors) = process.communicate()
+ status = process.returncode
+ fp.write(output)
+ fp.write(errors)
+ fp.write(("Exit %%d\\n" %% status).encode())
+ return status
+
+for (svnauthz_cmd, svnauthz_opt) in %s:
+ output_command(fp, svnauthz_cmd, svnauthz_opt.split())
+fp.close()"""
+
+#----------------------------------------------------------------------
+def verify_logfile(logfilename, expected_data, delete_log=True):
+ if os.path.exists(logfilename):
+ fp = open(logfilename)
+ else:
+ raise svntest.verify.SVNUnexpectedOutput("hook logfile %s not found"\
+ % logfilename)
+
+ actual_data = fp.readlines()
+ fp.close()
+ if delete_log:
+ os.unlink(logfilename)
+ svntest.verify.compare_and_display_lines('wrong hook logfile content',
+ 'HOOKLOG',
+ expected_data, actual_data)
+
+#----------------------------------------------------------------------
+
+# Note we don't test various different validation failures, the
+# validation is actually just done when the file is loaded and
+# the library tests for the config file parser and the authz
+# parser already validate various failures that return errors.
+
+def svnauthz_validate_file_test(sbox):
+ "test 'svnauthz validate' on files"
+
+ # build an authz file
+ (authz_fd, authz_path) = tempfile.mkstemp()
+ authz_content = "[/]\n* = rw\n"
+ svntest.main.file_write(authz_path, authz_content)
+
+ # Valid authz file
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 0, False, "validate", authz_path)
+
+ # Invalid authz file, expect exit code 1, we found the file loaded it
+ # but found an error
+ svntest.main.file_write(authz_path, 'x\n')
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 1, False, "validate", authz_path)
+
+ # Non-existent authz file
+ # exit code 2, operational error since we can't test the file.
+ os.close(authz_fd)
+ os.remove(authz_path)
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 2, False, "validate",
+ authz_path)
+
+@SkipUnless(svntest.main.is_ra_type_file)
+def svnauthz_validate_repo_test(sbox):
+ "test 'svnauthz validate' on urls"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ authz_content = "[/]\n* = rw\n"
+
+ # build an authz file and commit it to the repo
+ authz_path = os.path.join(wc_dir, 'A', 'authz')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.run_svn(None, 'add', authz_path)
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Valid authz url (file stored in repo)
+ authz_url = repo_url + '/A/authz'
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 0, False, "validate", authz_url)
+
+ # Invalid authz url (again use the iota file in the repo)
+ # expect exit code 1, we found the file loaded it but found an error
+ iota_url = repo_url + '/iota'
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 1, False, "validate", iota_url)
+
+ # Non-existent authz url
+ # exit code 2, operational error since we can't test the file.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 2, False, "validate",
+ repo_url + "/zilch")
+
+def svnauthz_validate_txn_test(sbox):
+ "test 'svnauthz validate --transaction'"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ logfilepath = os.path.join(repo_dir, 'hooks.log')
+ pre_commit_hook = svntest.main.get_pre_commit_hook_path(repo_dir)
+ hook_instance = hook_template % (repr(svntest.main.svnauthz_binary),
+ repr([('validate', 'A/authz')]))
+ svntest.main.create_python_hook_script(pre_commit_hook, hook_instance)
+
+ # Create an authz file
+ authz_content = "[/]\n* = rw\n"
+ authz_path = os.path.join(wc_dir, 'A/authz')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.run_svn(None, 'add', authz_path)
+
+ # commit a valid authz file, and check the hook's logfile
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = ['Exit 0\n']
+ verify_logfile(logfilepath, expected_data)
+
+ # Add an invalid line to the authz file.
+ svntest.main.file_append(authz_path, 'x')
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')})
+ expected_status.tweak('A/authz', status=' ', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = svntest.verify.RegexOutput(".*?Error parsing authz file: '.*?'",
+ match_all=False)
+ verify_logfile(logfilepath, expected_data, delete_log=False)
+ # Check the logfile that our Exit was 1 too
+ expected_data = svntest.verify.ExpectedOutput("Exit 1\n", match_all=False)
+ verify_logfile(logfilepath, expected_data)
+
+ # Validate a file that doesn't exist and make sure we're exiting with 2.
+ hook_instance = hook_template % (repr(svntest.main.svnauthz_binary),
+ repr([('validate', 'zilch')]))
+ svntest.main.create_python_hook_script(pre_commit_hook, hook_instance)
+ svntest.main.file_append(authz_path, 'x')
+ expected_status.tweak('A/authz', status=' ', wc_rev=4)
+ if svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status):
+ raise svntest.Failure
+ expected_data = svntest.verify.ExpectedOutput("Exit 2\n", match_all=False)
+ verify_logfile(logfilepath, expected_data)
+
+def svnauthz_accessof_file_test(sbox):
+ "test 'svnauthz accessof' on files"
+
+ # build an authz file
+ (authz_fd, authz_path) = tempfile.mkstemp()
+ authz_content = "[/]\ngroucho = \ngallagher = rw\n* = r\n" + \
+ "[/bios]\n* = rw\n" + \
+ "[comedy:/jokes]\ngroucho = rw\n" + \
+ "[slapstick:/jokes]\n* =\n"
+ svntest.main.file_write(authz_path, authz_content)
+
+ # Anonymous access with no path, and no repository should be rw
+ # since it returns the highest level of access granted anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_path)
+
+ # Anonymous access on /jokes should be r, no repo so won't match
+ # the slapstick:/jokes section.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof",
+ authz_path, "--path", "/jokes")
+
+ # Anonymous access on /jokes on slapstick repo should be no
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--repository", "slapstick")
+
+ # User access with no path, and no repository should be rw
+ # since it returns the h ighest level of access anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_path,
+ "--username", "groucho")
+
+ # User groucho specified on /jokes with no repo, will not match any of the
+ # repo specific sections, so is r since everyone has read access.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None,
+ 0, False, "accessof", authz_path,
+ "--path", "/jokes", "--username",
+ "groucho")
+
+ # User groucho specified on /jokes with the repo comedy will be rw
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy")
+
+ os.close(authz_fd)
+ os.remove(authz_path)
+
+@SkipUnless(svntest.main.is_ra_type_file)
+def svnauthz_accessof_repo_test(sbox):
+ "test 'svnauthz accessof' on urls"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ authz_content = "[/]\ngroucho = \ngallagher = rw\n* = r\n" + \
+ "[/bios]\n* = rw\n" + \
+ "[comedy:/jokes]\ngroucho = rw\n" + \
+ "[slapstick:/jokes]\n* =\n"
+
+ # build an authz file and commit it to the repo
+ authz_path = os.path.join(wc_dir, 'A', 'authz')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.run_svn(None, 'add', authz_path)
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ if svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status):
+ raise svntest.Failure
+
+ # Anonymous access with no path, and no repository should be rw
+ # since it returns the highest level of access granted anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ authz_url = repo_url + "/A/authz"
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_url)
+
+ # Anonymous access on /jokes should be r, no repo so won't match
+ # the slapstick:/jokes section.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof",
+ authz_url, "--path", "/jokes")
+
+ # Anonymous access on /jokes on slapstick repo should be no
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--repository", "slapstick")
+
+ # User access with no path, and no repository should be rw
+ # since it returns the h ighest level of access anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_url,
+ "--username", "groucho")
+
+ # User groucho specified on /jokes with no repo, will not match any of the
+ # repo specific sections, so is r since everyone has read access.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None,
+ 0, False, "accessof", authz_url,
+ "--path", "/jokes", "--username",
+ "groucho")
+
+ # User groucho specified on /jokes with the repo comedy will be rw
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy")
+
+def svnauthz_accessof_groups_file_test(sbox):
+ "test 'svnauthz accessof --groups-file' on files"
+
+ # build an authz file
+ (authz_fd, authz_path) = tempfile.mkstemp()
+ authz_content = "[/]\n@musicians = rw\n@comedians = \n" + \
+ "[comedy:/jokes]\n@musicians = \n@comedians = r\n"
+ svntest.main.file_write(authz_path, authz_content)
+
+ # build a groups file
+ (groups_fd, groups_path) = tempfile.mkstemp()
+ groups_content = "[groups]\nmusicians=stafford\ncomedians=groucho\n"
+ svntest.main.file_write(groups_path, groups_content)
+
+ # Anonymous access with no path, and no repository should be no
+ # since it returns the highest level of access granted anywhere.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_path,
+ "--groups-file", groups_path)
+
+ # User stafford (@musicians) access with no path, and no repository should
+ # be no since it returns the highest level of access granted anywhere.
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_path,
+ "--groups-file", groups_path,
+ "--username", "stafford")
+
+ # User groucho (@comedians) access with no path, and no repository should
+ # be no since it returns the highest level of access granted anywhere.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_path,
+ "--groups-file", groups_path,
+ "--username", "groucho")
+
+ # Anonymous access specified on /jokes with the repo comedy will be no.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False,
+ "accessof", authz_path,
+ "--groups-file", groups_path,
+ "--path", "jokes",
+ "--repository", "comedy")
+
+ # User stafford (@musicians) specified on /jokes with the repo comedy
+ # will be no.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_path,
+ "--groups-file", groups_path,
+ "--path", "jokes",
+ "--repository", "comedy",
+ "--username", "stafford")
+
+ # User groucho (@comedians) specified on /jokes with the repo
+ # comedy will be r.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None,
+ 0, False, "accessof", authz_path,
+ "--groups-file", groups_path,
+ "--path", "jokes",
+ "--repository", "comedy",
+ "--username", "groucho")
+
+ os.close(authz_fd)
+ os.remove(authz_path)
+ os.close(groups_fd)
+ os.remove(groups_path)
+
+@SkipUnless(svntest.main.is_ra_type_file)
+def svnauthz_accessof_groups_repo_test(sbox):
+ "test 'svnauthz accessof --groups-file' on urls"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ authz_content = "[/]\n@musicians = rw\n@comedians = \n" + \
+ "[comedy:/jokes]\n@musicians = \n@comedians = r\n"
+
+ groups_content = "[groups]\nmusicians=stafford\ncomedians=groucho\n"
+
+ # build authz and groups files and commit them to the repo
+ authz_path = os.path.join(wc_dir, 'A', 'authz')
+ groups_path = os.path.join(wc_dir, 'A', 'groups')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.file_write(groups_path, groups_content)
+ svntest.main.run_svn(None, 'add', authz_path, groups_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/authz' : Item(verb='Adding'),
+ 'A/groups' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ 'A/groups' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Anonymous access with no path, and no repository should be no
+ # since it returns the highest level of access granted anywhere.
+ authz_url = repo_url + "/A/authz"
+ groups_url = repo_url + "/A/groups"
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_url,
+ "--groups-file", groups_url)
+
+ # User stafford (@musicians) access with no path, and no repository should
+ # be no since it returns the highest level of access granted anywhere.
+ svntest.actions.run_and_verify_svnauthz(["rw\n"], None,
+ 0, False, "accessof", authz_url,
+ "--groups-file", groups_url,
+ "--username", "stafford")
+
+ # User groucho (@comedians) access with no path, and no repository should
+ # be no since it returns the highest level of access granted anywhere.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_url,
+ "--groups-file", groups_url,
+ "--username", "groucho")
+
+ # Anonymous access specified on /jokes with the repo comedy will be no.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False,
+ "accessof", authz_url,
+ "--groups-file", groups_url,
+ "--path", "jokes",
+ "--repository", "comedy")
+
+ # User stafford (@musicians) specified on /jokes with the repo comedy
+ # will be no.
+ svntest.actions.run_and_verify_svnauthz(["no\n"], None,
+ 0, False, "accessof", authz_url,
+ "--groups-file", groups_url,
+ "--path", "jokes",
+ "--repository", "comedy",
+ "--username", "stafford")
+
+ # User groucho (@comedians) specified on /jokes with the repo
+ # comedy will be r.
+ svntest.actions.run_and_verify_svnauthz(["r\n"], None,
+ 0, False, "accessof", authz_url,
+ "--groups-file", groups_url,
+ "--path", "jokes",
+ "--repository", "comedy",
+ "--username", "groucho")
+
+def svnauthz_accessof_is_file_test(sbox):
+ "test 'svnauthz accessof --is' on files"
+
+ # build an authz file
+ (authz_fd, authz_path) = tempfile.mkstemp()
+ authz_content = "[/]\ngroucho = \ngallagher = rw\n* = r\n" + \
+ "[/bios]\n* = rw\n" + \
+ "[comedy:/jokes]\ngroucho = rw\n" + \
+ "[slapstick:/jokes]\n* =\n"
+ svntest.main.file_write(authz_path, authz_content)
+
+ # Test an invalid --is option, should get an error message and exit code
+ # of 2.
+ expected_output = svntest.verify.RegexOutput(
+ ".*'x' is not a valid argument for --is", match_all=False
+ )
+ svntest.actions.run_and_verify_svnauthz(None,
+ expected_output, 2, False,
+ "accessof", authz_path, "--is", "x")
+
+ # Anonymous access with no path, and no repository should be rw
+ # since it returns the highest level of access granted anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 0, False, "accessof",
+ authz_path, "--is", "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_path, "--is", "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_path, "--is", "no")
+
+ # Anonymous access on /jokes should be r, no repo so won't match
+ # the slapstick:/jokes section.
+ # Test --is r returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--is", "r")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--is", "rw")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--is", "no")
+
+ # Anonymous access on /jokes on slapstick repo should be no
+ # Test --is no returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "no")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "r")
+
+ # User access with no path, and no repository should be rw
+ # since it returns the h ighest level of access anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 0, False, "accessof", authz_path,
+ "--username", "groucho", "--is",
+ "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 3, False, "accessof", authz_path,
+ "--username", "groucho", "--is",
+ "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 3, False, "accessof", authz_path,
+ "--username", "groucho", "--is",
+ "no")
+
+ # User groucho specified on /jokes with no repo, will not match any of the
+ # repo specific sections, so is r since everyone has read access.
+ # Test --is r returns 0.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho", "--is", "r")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--is", "rw")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--is", "no")
+
+ # User groucho specified on /jokes with the repo comedy will be rw
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_path, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "no")
+
+ # Add an invalid line to the authz file
+ svntest.main.file_append(authz_path, "x\n")
+ # Check that --is returns 1 when the syntax is invalid with a file..
+ expected_out = svntest.verify.RegexOutput(
+ ".*Error while parsing authz file:",
+ match_all=False
+ )
+ svntest.actions.run_and_verify_svnauthz(None, expected_out, 1, False,
+ "accessof", authz_path, "--path",
+ "/jokes", "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "rw")
+
+ os.close(authz_fd)
+ os.remove(authz_path)
+
+@SkipUnless(svntest.main.is_ra_type_file)
+def svnauthz_accessof_is_repo_test(sbox):
+ "test 'svnauthz accessof --is' on files and urls"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ authz_content = "[/]\ngroucho = \ngallagher = rw\n* = r\n" + \
+ "[/bios]\n* = rw\n" + \
+ "[comedy:/jokes]\ngroucho = rw\n" + \
+ "[slapstick:/jokes]\n* =\n"
+
+ # build an authz file and commit it to the repo
+ authz_path = os.path.join(wc_dir, 'A', 'authz')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.run_svn(None, 'add', authz_path)
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Test an invalid --is option, should get an error message and exit code
+ # of 2.
+ authz_url = repo_url + "/A/authz"
+ expected_output = svntest.verify.RegexOutput(
+ ".*'x' is not a valid argument for --is", match_all=False
+ )
+ svntest.actions.run_and_verify_svnauthz(None,
+ expected_output, 2, False,
+ "accessof", authz_url, "--is", "x")
+
+ # Anonymous access with no path, and no repository should be rw
+ # since it returns the highest level of access granted anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 0, False, "accessof",
+ authz_url, "--is", "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_url, "--is", "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_url, "--is", "no")
+
+ # Anonymous access on /jokes should be r, no repo so won't match
+ # the slapstick:/jokes section.
+ # Test --is r returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--is", "r")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--is", "rw")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--is", "no")
+
+ # Anonymous access on /jokes on slapstick repo should be no
+ # Test --is no returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "no")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--repository", "slapstick",
+ "--is", "r")
+
+ # User access with no path, and no repository should be rw
+ # since it returns the h ighest level of access anywhere.
+ # So /bios being rw for everyone means this will be rw.
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 0, False, "accessof", authz_url,
+ "--username", "groucho", "--is",
+ "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 3, False, "accessof", authz_url,
+ "--username", "groucho", "--is",
+ "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None,
+ 3, False, "accessof", authz_url,
+ "--username", "groucho", "--is",
+ "no")
+
+ # User groucho specified on /jokes with no repo, will not match any of the
+ # repo specific sections, so is r since everyone has read access.
+ # Test --is r returns 0.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho", "--is", "r")
+ # Test --is rw returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--is", "rw")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None,
+ None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--is", "no")
+
+ # User groucho specified on /jokes with the repo comedy will be rw
+ # Test --is rw returns 0.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "rw")
+ # Test --is r returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "r")
+ # Test --is no returns 3.
+ svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof",
+ authz_url, "--path", "/jokes",
+ "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "no")
+
+ # Add an invalid line to the authz file
+ svntest.main.file_append(authz_path, "x\n")
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')})
+ expected_status.tweak('A/authz', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Check that --is returns 1 when the syntax is invalid with a url.
+ expected_out = svntest.verify.RegexOutput(
+ ".*Error while parsing authz file:",
+ match_all=False
+ )
+ svntest.actions.run_and_verify_svnauthz(None, expected_out, 1, False,
+ "accessof", authz_url, "--path",
+ "/jokes", "--username", "groucho",
+ "--repository", "comedy", "--is",
+ "rw")
+
+def svnauthz_accessof_txn_test(sbox):
+ "test 'svnauthz accessof --transaction'"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ logfilepath = os.path.join(repo_dir, 'hooks.log')
+ pre_commit_hook = svntest.main.get_pre_commit_hook_path(repo_dir)
+ hook_instance = hook_template % (repr(svntest.main.svnauthz_binary),
+ repr([('accessof',
+ '--is rw A/authz')]))
+ svntest.main.create_python_hook_script(pre_commit_hook, hook_instance)
+
+ # Create an authz file
+ authz_content = "[/]\n* = rw\n"
+ authz_path = os.path.join(wc_dir, 'A/authz')
+ svntest.main.file_write(authz_path, authz_content)
+ svntest.main.run_svn(None, 'add', authz_path)
+
+ # Only really testing the exit value code paths.
+
+ # commit a valid authz file, and run --is rw which is true.
+ # Should get an exit of 0.
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = ['Exit 0\n']
+ verify_logfile(logfilepath, expected_data)
+
+ # commit a valid authz file, and run --is r which is false
+ # Should get an exit of 3.
+ hook_instance = hook_template % (repr(svntest.main.svnauthz_binary),
+ repr([('accessof',
+ '--is r A/authz')]))
+ svntest.main.create_python_hook_script(pre_commit_hook, hook_instance)
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')})
+ expected_status.tweak('A/authz', status=' ', wc_rev=3)
+ svntest.main.file_append(authz_path, "groucho = r\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = svntest.verify.ExpectedOutput('Exit 3\n', match_all=False)
+ verify_logfile(logfilepath, expected_data)
+
+ # break the authz file with a non-existent group and check for an exit 1.
+ expected_status.tweak('A/authz', status=' ', wc_rev=4)
+ svntest.main.file_append(authz_path, "@friends = rw\n")
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = svntest.verify.ExpectedOutput('Exit 1\n', match_all=False)
+ verify_logfile(logfilepath, expected_data)
+
+ # break the authz file with a non-existent gropu and check for an exit 2.
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Deleting')})
+ expected_status.remove('A/authz')
+ svntest.main.run_svn(None, 'rm', authz_path)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ expected_data = svntest.verify.ExpectedOutput('Exit 2\n', match_all=False)
+ verify_logfile(logfilepath, expected_data)
+
+def svnauthz_compat_mode_file_test(sbox):
+ "test 'svnauthz-validate' compatibility mode file"
+
+
+ # Create an authz file
+ (authz_fd, authz_path) = tempfile.mkstemp()
+ authz_content = "[/]\n* = rw\n"
+ svntest.main.file_write(authz_path, authz_content)
+
+ # Check a valid file.
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, True,
+ authz_path)
+
+ # Check an invalid file.
+ svntest.main.file_append(authz_path, "x\n")
+ svntest.actions.run_and_verify_svnauthz(None, None, 1, True,
+ authz_path)
+
+ # Remove the file.
+ os.close(authz_fd)
+ os.remove(authz_path)
+
+ # Check a non-existent file.
+ svntest.actions.run_and_verify_svnauthz(
+ None, None, 2, True,
+ authz_path
+ )
+
+
+@SkipUnless(svntest.main.is_ra_type_file)
+def svnauthz_compat_mode_repo_test(sbox):
+ "test 'svnauthz-validate' compatibility mode url"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Create an authz file
+ authz_content = "[/]\n* = rw\n"
+ authz_path = os.path.join(wc_dir, 'A/authz')
+ svntest.main.file_write(authz_path, authz_content)
+ authz_url = repo_url + '/A/authz'
+
+ # Commit the file and check a URL
+ svntest.main.run_svn(None, 'add', authz_path)
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Adding')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/authz' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ svntest.actions.run_and_verify_svnauthz(None, None, 0, True,
+ authz_url)
+
+ # Check an invalid url.
+ svntest.main.file_append(authz_path, "x\n")
+ expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')})
+ expected_status.tweak('A/authz', status=' ', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ svntest.actions.run_and_verify_svnauthz(None, None, 1, True,
+ authz_path)
+
+ # Check a non-existent url.
+ # Exit code really should be 2 since this is an operational error.
+ svntest.actions.run_and_verify_svnauthz(
+ None, None, 2, True,
+ repo_url + "/zilch"
+ )
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ svnauthz_validate_file_test,
+ svnauthz_validate_repo_test,
+ svnauthz_validate_txn_test,
+ svnauthz_accessof_file_test,
+ svnauthz_accessof_repo_test,
+ svnauthz_accessof_groups_file_test,
+ svnauthz_accessof_groups_repo_test,
+ svnauthz_accessof_is_file_test,
+ svnauthz_accessof_is_repo_test,
+ svnauthz_accessof_txn_test,
+ svnauthz_compat_mode_file_test,
+ svnauthz_compat_mode_repo_test,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svndumpfilter_tests.py b/subversion/tests/cmdline/svndumpfilter_tests.py
new file mode 100755
index 0000000..7ee09a4
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests.py
@@ -0,0 +1,788 @@
+#!/usr/bin/env python
+#
+# svndumpfilter_tests.py: testing the 'svndumpfilter' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import sys
+import tempfile
+
+# Our testing module
+import svntest
+from svntest.verify import SVNExpectedStdout, SVNExpectedStderr
+
+# Get some helper routines
+from svnadmin_tests import load_and_verify_dumpstream, load_dumpstream
+from svntest.main import run_svn, run_svnadmin
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# Helper routines
+
+
+def filter_and_return_output(dump, bufsize=0, *varargs):
+ """Filter the array of lines passed in 'dump' and return the output
+ and errput"""
+
+ if isinstance(dump, str):
+ dump = [ dump ]
+
+ # Does the caller want the stderr?
+ if '-q' in varargs or '--quiet' in varargs:
+ expected_errput = None # Stderr with -q or --quiet is a real error!
+ else:
+ expected_errput = svntest.verify.AnyOutput
+ ## TODO: Should we handle exit_code?
+ exit_code, output, errput = svntest.main.run_command_stdin(
+ svntest.main.svndumpfilter_binary, expected_errput, bufsize, True,
+ dump, *varargs)
+
+ # Since we call svntest.main.run_command_stdin() in binary mode,
+ # normalize the stderr line endings on Windows ourselves.
+ if sys.platform == 'win32':
+ errput = map(lambda x : x.replace('\r\n', '\n'), errput)
+
+ return output, errput
+
+
+######################################################################
+# Tests
+
+@Issue(2982)
+def reflect_dropped_renumbered_revs(sbox):
+ "reflect dropped renumbered revs in svn:mergeinfo"
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2982. ##
+
+ # Test svndumpfilter with include option
+ sbox.build(empty=True)
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'with_merges.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ filtered_out, filtered_err = filter_and_return_output(
+ dumpfile, 0, "include",
+ "trunk", "branch1",
+ "--skip-missing-merge-sources",
+ "--drop-empty-revs",
+ "--renumber-revs", "--quiet")
+
+ load_dumpstream(sbox, filtered_out, "--ignore-uuid")
+
+ # Verify the svn:mergeinfo properties
+ url = sbox.repo_url
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/trunk - /branch1:4-5\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+
+ # Test svndumpfilter with exclude option
+ sbox.build(empty=True)
+ filtered_out, filtered_err = filter_and_return_output(
+ dumpfile, 0, "exclude", "branch1",
+ "--skip-missing-merge-sources",
+ "--drop-empty-revs",
+ "--renumber-revs", "--quiet")
+
+ load_dumpstream(sbox, filtered_out, "--ignore-uuid")
+
+ # Verify the svn:mergeinfo properties
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/trunk - \n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+@Issue(3181)
+def svndumpfilter_loses_mergeinfo(sbox):
+ "svndumpfilter loses mergeinfo"
+ #svndumpfilter loses mergeinfo if invoked without --renumber-revs
+
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3181. ##
+
+ sbox.build(empty=True)
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'with_merges.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ filtered_out, filtered_err = filter_and_return_output(dumpfile, 0, "include",
+ "trunk", "branch1",
+ "--quiet")
+ load_dumpstream(sbox, filtered_out)
+
+ # Verify the svn:mergeinfo properties
+ url = sbox.repo_url
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/trunk - /branch1:4-8\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+
+def _simple_dumpfilter_test(sbox, dumpfile, *dumpargs):
+ """Run svndumpfilter with arguments DUMPARGS, taking input from DUMPFILE.
+ Check that the output consists of the standard Greek tree excluding
+ all paths that start with 'A/B/E', 'A/D/G' or 'A/D/H'."""
+ wc_dir = sbox.wc_dir
+
+ filtered_output, filtered_err = filter_and_return_output(dumpfile, 0,
+ '--quiet',
+ *dumpargs)
+
+ # Setup our expectations
+ load_dumpstream(sbox, filtered_output, '--ignore-uuid')
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha')
+ expected_disk.remove('A/B/E/beta')
+ expected_disk.remove('A/B/E')
+ expected_disk.remove('A/D/H/chi')
+ expected_disk.remove('A/D/H/psi')
+ expected_disk.remove('A/D/H/omega')
+ expected_disk.remove('A/D/H')
+ expected_disk.remove('A/D/G/pi')
+ expected_disk.remove('A/D/G/rho')
+ expected_disk.remove('A/D/G/tau')
+ expected_disk.remove('A/D/G')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'iota' : Item(status='A '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.remove('A/B/E/beta')
+ expected_status.remove('A/B/E')
+ expected_status.remove('A/D/H/chi')
+ expected_status.remove('A/D/H/psi')
+ expected_status.remove('A/D/H/omega')
+ expected_status.remove('A/D/H')
+ expected_status.remove('A/D/G/pi')
+ expected_status.remove('A/D/G/rho')
+ expected_status.remove('A/D/G/tau')
+ expected_status.remove('A/D/G')
+
+ # Check that our paths really were excluded
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+@Issue(2697)
+def dumpfilter_with_targets(sbox):
+ "svndumpfilter --targets blah"
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2697. ##
+
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'greek_tree.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ (fd, targets_file) = tempfile.mkstemp(dir=svntest.main.temp_dir)
+ try:
+ targets = open(targets_file, 'w')
+ targets.write('/A/D/H\n')
+ targets.write('/A/D/G\n')
+ targets.close()
+ _simple_dumpfilter_test(sbox, dumpfile,
+ 'exclude', '/A/B/E', '--targets', targets_file)
+ finally:
+ os.close(fd)
+ os.remove(targets_file)
+
+
+def dumpfilter_with_patterns(sbox):
+ "svndumpfilter --pattern PATH_PREFIX"
+
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'greek_tree.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+ _simple_dumpfilter_test(sbox, dumpfile,
+ 'exclude', '--pattern', '/A/D/[GH]*', '/A/[B]/E*')
+
+#----------------------------------------------------------------------
+# More testing for issue #3020 'Reflect dropped/renumbered revisions in
+# svn:mergeinfo data during svnadmin load'
+#
+# Specifically, test that svndumpfilter, when used with the
+# --skip-missing-merge-sources option, removes mergeinfo that refers to
+# revisions that are older than the oldest revision in the dump stream.
+@Issue(3020)
+def filter_mergeinfo_revs_outside_of_dump_stream(sbox):
+ "filter mergeinfo revs outside of dump stream"
+
+ sbox.build(empty=True)
+
+ # Load a partial dump into an existing repository.
+ #
+ # Picture == 1k words:
+ #
+ # The dump file we filter in this test, 'mergeinfo_included_partial.dump', is
+ # a dump of r6:HEAD of the following repos:
+ #
+ # __________________________________________
+ # | |
+ # | ____________________________|_____
+ # | | | |
+ # trunk---r2---r3-----r5---r6-------r8---r9---------------> | |
+ # r1 | | | | | |
+ # initial | | | |______ | |
+ # import copy | copy | merge merge
+ # | | | merge (r5) (r8)
+ # | | | (r9) | |
+ # | | | | | |
+ # | | V V | |
+ # | | branches/B2-------r11---r12----> | |
+ # | | r7 |____| | |
+ # | | | | |
+ # | merge |___ | |
+ # | (r6) | | |
+ # | |_________________ | | |
+ # | | merge | |
+ # | | (r11-12) | |
+ # | | | | |
+ # V V V | |
+ # branches/B1-------------------r10--------r13--> | |
+ # r4 | |
+ # | V V
+ # branches/B1/B/E------------------------------r14---r15->
+ #
+ #
+ # The mergeinfo on the complete repos would look like this:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:11-12
+ # /trunk:6,9
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:11-12
+ # /trunk/B/E:5-6,8-9
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:9
+ #
+ # We will run the partial dump through svndumpfilter using the the
+ # --skip-missing-merge-soruces which should strip out any revisions < 6.
+ # Then we'll load the filtered result into an empty repository. This
+ # should offset the incoming mergeinfo by -5. In addition, any mergeinfo
+ # referring to the initial revision in the dump file (r6) should be
+ # removed because the change it refers to (r5:6) is not wholly within the
+ # dumpfile. The resulting mergeinfo should look like this:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:6-7
+ # /trunk:4
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:6-7
+ # /trunk/B/E:3-4
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:4
+ partial_dump = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'mergeinfo_included_partial.dump')
+ partial_dump_contents = svntest.actions.load_dumpfile(partial_dump)
+ filtered_dumpfile2, filtered_out = filter_and_return_output(
+ partial_dump_contents,
+ 8192, # Set a sufficiently large bufsize to avoid a deadlock
+ "include", "trunk", "branches",
+ "--skip-missing-merge-sources",
+ "--quiet")
+ load_dumpstream(sbox, filtered_dumpfile2, '--ignore-uuid')
+ # Check the resulting mergeinfo.
+ url = sbox.repo_url + "/branches"
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/B1 - /branches/B2:6-7\n",
+ "/trunk:4\n",
+ url + "/B2 - /trunk:4\n",
+ url + "/B1/B/E - /branches/B2/B/E:6-7\n",
+ "/trunk/B/E:3-4\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # Blow away the current repos, create an empty one in its place, and
+ # then load this skeleton repos into the empty target:
+ #
+ # Projects/ (Added r1)
+ # README (Added r2)
+ # Project-X (Added r3)
+ # Project-Y (Added r4)
+ # Project-Z (Added r5)
+ # docs/ (Added r6)
+ # README (Added r6).
+ sbox.build(empty=True)
+ skeleton_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'skeleton_repos.dump')
+ skeleton_dumpfile = svntest.actions.load_dumpfile(skeleton_location)
+ load_dumpstream(sbox, skeleton_dumpfile, '--ignore-uuid')
+ partial_dump2 = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'mergeinfo_included_partial.dump')
+ partial_dump_contents2 = svntest.actions.load_dumpfile(partial_dump2)
+ # Now use the partial dump file we used above, but this time exclude
+ # the B2 branch. Load the filtered dump into the /Projects/Project-X
+ # subtree of the skeleton repos.
+ filtered_dumpfile2, filtered_err = filter_and_return_output(
+ partial_dump_contents2,
+ 8192, # Set a sufficiently large bufsize to avoid a deadlock
+ "exclude", "branches/B2",
+ "--skip-missing-merge-sources",
+ "--drop-empty-revs",
+ "--renumber-revs")
+
+ # Starting with the same expectation we had when loading into an empty
+ # repository, adjust each revision by +6 to account for the six revision
+ # already present in the target repos, that gives:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:12-13
+ # /trunk:10
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:12-13
+ # /trunk/B/E:9-10
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:10
+ #
+ # ...But /branches/B2 has been filtered out, so all references to
+ # that branch should be gone, leaving:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /trunk:10
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /trunk/B/E:9-10
+ #
+ # ...But wait, there's more! Because we use the '--drop-empty-revs'
+ # option, when filtering out 'branches/B2' all the revisions that effect
+ # only that branch should be dropped (i.e. original revs r7, r11, and r12).
+ # In and of itself that has no effect, but we also specifiy the
+ # '--renumber-revs' option, so when r7 is dropped, r8 should map to r7,
+ # r9 to r8, and r10 to r9 (and so on). That should finally leave us with:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /trunk:9
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /trunk/B/E:8-9
+ #
+ # This test currently fails with this mergeinfo:
+ #
+ #
+ #
+ #
+ # Check that all the blather above really happens. First does
+ # svndumpfilter report what we expect to stderr?
+ expected_err = [
+ "Excluding (and dropping empty revisions for) prefixes:\n",
+ " '/branches/B2'\n",
+ "\n",
+ "Revision 6 committed as 6.\n",
+ "Revision 7 skipped.\n", # <-- DROP!
+ "Revision 8 committed as 7.\n",
+ "Revision 9 committed as 8.\n",
+ "Revision 10 committed as 9.\n",
+ "Revision 11 skipped.\n", # <-- DROP!
+ "Revision 12 skipped.\n", # <-- DROP!
+ "Revision 13 committed as 10.\n",
+ "Revision 14 committed as 11.\n",
+ "Revision 15 committed as 12.\n",
+ "\n",
+ "Dropped 3 revisions.\n",
+ "\n",
+ "Revisions renumbered as follows:\n",
+ " 15 => 12\n",
+ " 14 => 11\n",
+ " 13 => 10\n",
+ " 12 => (dropped)\n", # <-- DROP!
+ " 11 => (dropped)\n", # <-- DROP!
+ " 10 => 9\n",
+ " 9 => 8\n",
+ " 8 => 7\n",
+ " 7 => (dropped)\n", # <-- DROP!
+ " 6 => 6\n",
+ "\n",
+ "Dropped 2 nodes:\n",
+ " '/branches/B2'\n",
+ " '/branches/B2/D/H/chi'\n",
+ "\n"]
+ svntest.verify.verify_outputs(
+ "Actual svndumpfilter stderr does not agree with expected stderr",
+ None, filtered_err, None, expected_err)
+
+ # Now actually load the filtered dump into the skeleton repository
+ # and then check the resulting mergeinfo.
+ load_dumpstream(sbox, filtered_dumpfile2,
+ '--parent-dir', '/Projects/Project-X', '--ignore-uuid')
+
+ url = sbox.repo_url + "/Projects/Project-X/branches"
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/B1 - /Projects/Project-X/trunk:9\n",
+ url + "/B1/B/E - /Projects/Project-X/trunk/B/E:8-9\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+# More testing for issue #3020 'Reflect dropped/renumbered revisions in
+# svn:mergeinfo data during svnadmin load'
+#
+# Using svndumpfilter with the --drop-empty-revs option, but without the
+# --renumber-revs option, can create a dump with non-contiguous revisions.
+# Such dumps should not interfere with the correct remapping of mergeinfo
+# source revisions.
+@Issue(3020)
+def dropped_but_not_renumbered_empty_revs(sbox):
+ "mergeinfo maps correctly when dropping revs"
+
+ sbox.build(empty=True)
+
+ # The dump file mergeinfo_included_full.dump represents this repository:
+ #
+ #
+ # __________________________________________
+ # | |
+ # | ____________________________|_____
+ # | | | |
+ # trunk---r2---r3-----r5---r6-------r8---r9---------------> | |
+ # r1 | | | | | |
+ # initial | | | |______ | |
+ # import copy | copy | merge merge
+ # | | | merge (r5) (r8)
+ # | | | (r9) | |
+ # | | | | | |
+ # | | V V | |
+ # | | branches/B2-------r11---r12----> | |
+ # | | r7 |____| | |
+ # | | | | |
+ # | merge |___ | |
+ # | (r6) | | |
+ # | |_________________ | | |
+ # | | merge | |
+ # | | (r11-12) | |
+ # | | | | |
+ # V V V | |
+ # branches/B1-------------------r10--------r13--> | |
+ # r4 | |
+ # | V V
+ # branches/B1/B/E------------------------------r14---r15->
+ #
+ #
+ # The mergeinfo on mergeinfo_included_full.dump is:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:11-12
+ # /trunk:6,9
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:11-12
+ # /trunk/B/E:5-6,8-9
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:9
+ #
+ # Use svndumpfilter to filter mergeinfo_included_full.dump, excluding
+ # branches/B2, while dropping, but not renumbering, empty revisions.
+ #
+ # Load the filtered dump into an empty repository. Since we are excluding
+ # /branches/B2 and dropping empty revs, revisions 7, 11, and 12 won't be
+ # included in the filtered dump.
+ full_dump = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnadmin_tests_data',
+ 'mergeinfo_included_full.dump')
+ full_dump_contents = svntest.actions.load_dumpfile(full_dump)
+ filtered_dumpfile, filtered_out = filter_and_return_output(
+ full_dump_contents,
+ 16384, # Set a sufficiently large bufsize to avoid a deadlock
+ "exclude", "branches/B2",
+ "--skip-missing-merge-sources", "--drop-empty-revs")
+
+ # Now load the filtered dump into an empty repository.
+ load_dumpstream(sbox, filtered_dumpfile, '--ignore-uuid')
+
+ # The mergeinfo in the newly loaded repos should have no references to the
+ # dropped branch and the remaining merge source revs should be remapped to
+ # reflect the fact that the loaded repository no longer has any empty
+ # revisions:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /trunk:6,8
+ # ^
+ # With r7 dropped, r9 in the incoming
+ # dump becomes r8 in the loaded repos.
+ #
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /trunk/B/E:5-8
+ # ^
+ # With r7 dropped, r8 and r9 in the incoming
+ # dump becomes r7 and r8 in the loaded repos.
+
+ # Check the resulting mergeinfo.
+ url = sbox.repo_url + "/branches"
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/B1 - /trunk:6,8\n",
+ url + "/B1/B/E - /trunk/B/E:5-8\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+def match_empty_prefix(sbox):
+ "svndumpfilter with an empty prefix"
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'greek_tree.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ def test(sbox, dumpfile, *dumpargs):
+ """Run svndumpfilter with DUMPFILE as the input lines, load
+ the result and check it matches EXPECTED_DISK, EXPECTED_OUTPUT,
+ EXPECTED_STATUS."""
+
+ # Filter the Greek tree dump
+ filtered_output, filtered_err = filter_and_return_output(dumpfile, 0,
+ '--quiet',
+ *dumpargs)
+ if filtered_err:
+ raise verify.UnexpectedStderr(filtered_err)
+
+ # Load the filtered dump into a repo and check the result
+ sbox.build(empty=True)
+ load_dumpstream(sbox, filtered_output, '--ignore-uuid')
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Test excluding everything
+ expected_disk = svntest.wc.State(sbox.wc_dir, {})
+ expected_output = svntest.wc.State(sbox.wc_dir, {})
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '': Item(status=' ', wc_rev=1) })
+
+ test(sbox, dumpfile, 'exclude', '')
+
+ # Test including everything
+ expected_disk = svntest.main.greek_state.copy()
+ expected_output = svntest.main.greek_state.copy().tweak(status='A ')
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+
+ test(sbox, dumpfile, 'include', '', '/A/D/G')
+
+ # Note: We also ought to test the '--pattern' option, including or
+ # excluding a pattern of '*'. However, passing a wildcard parameter
+ # is troublesome on Windows: it may be expanded, depending on whether
+ # the svndumpfilter executable was linked with 'setargv.obj', and there
+ # doesn't seem to be a consistent way to quote such an argument to
+ # prevent expansion.
+
+@Issue(2760)
+def accepts_deltas(sbox):
+ "accepts deltas in the input"
+ # Accept format v3 (as created by 'svnadmin --deltas' or svnrdump).
+
+ sbox.build(empty=True)
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'simple_v3.dump')
+ dump_in = svntest.actions.load_dumpfile(dumpfile_location)
+
+ dump_out, err = filter_and_return_output(dump_in, 0, "include",
+ "trunk", "--quiet")
+
+ expected_revs = [
+ svntest.wc.State('', {
+ 'trunk' : svntest.wc.StateItem(props={'soup': 'No soup for you!'}),
+ 'trunk/foo' : svntest.wc.StateItem("This is file 'foo'.\n"),
+ }),
+ svntest.wc.State('', {
+ 'trunk' : svntest.wc.StateItem(props={'soup': 'No soup for you!'}),
+ 'trunk/foo' : svntest.wc.StateItem("This is file 'foo'.\n"),
+ }),
+ svntest.wc.State('', {
+ 'trunk' : svntest.wc.StateItem(props={'story': 'Yada yada yada...'}),
+ 'trunk/foo' : svntest.wc.StateItem("This is file 'foo'.\n"),
+ }),
+ ]
+
+ load_and_verify_dumpstream(sbox, [], [], expected_revs, True, dump_out,
+ '--ignore-uuid')
+
+
+
+@Issue(4234)
+def dumpfilter_targets_expect_leading_slash_prefixes(sbox):
+ "dumpfilter targets expect leading '/' in prefixes"
+ ## See http://subversion.tigris.org/issues/show_bug.cgi?id=4234. ##
+
+ sbox.build(empty=True)
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'greek_tree.dump')
+ dumpfile = svntest.actions.load_dumpfile(dumpfile_location)
+
+ (fd, targets_file) = tempfile.mkstemp(dir=svntest.main.temp_dir)
+ try:
+ targets = open(targets_file, 'w')
+
+ # Removing the leading slash in path prefixes should work.
+ targets.write('A/D/H\n')
+ targets.write('A/D/G\n')
+ targets.close()
+ _simple_dumpfilter_test(sbox, dumpfile,
+ 'exclude', '/A/B/E', '--targets', targets_file)
+ finally:
+ os.close(fd)
+ os.remove(targets_file)
+
+@Issue(3681)
+def drop_all_empty_revisions(sbox):
+ "drop all empty revisions except revision 0"
+
+ dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svndumpfilter_tests_data',
+ 'empty_revisions.dump')
+ dump_contents = svntest.actions.load_dumpfile(dumpfile_location)
+
+ filtered_dumpfile, filtered_err = filter_and_return_output(
+ dump_contents,
+ 8192, # Set a sufficiently large bufsize to avoid a deadlock
+ "include", "branch1",
+ "--drop-all-empty-revs")
+
+ expected_err = [
+ "Including (and dropping empty revisions for) prefixes:\n",
+ " '/branch1'\n",
+ "\n",
+ "Revision 0 committed as 0.\n",
+ "Revision 1 skipped.\n",
+ "Revision 2 committed as 2.\n",
+ "Revision 3 skipped.\n",
+ "\n",
+ "Dropped 2 revisions.\n",
+ "\n"]
+
+ svntest.verify.verify_outputs(
+ "Actual svndumpfilter stderr does not agree with expected stderr",
+ None, filtered_err, None, expected_err)
+
+ # Test with --renumber-revs option.
+ filtered_dumpfile, filtered_err = filter_and_return_output(
+ dump_contents,
+ 8192, # Set a sufficiently large bufsize to avoid a deadlock
+ "include", "branch1",
+ "--drop-all-empty-revs",
+ "--renumber-revs")
+
+ expected_err = [
+ "Including (and dropping empty revisions for) prefixes:\n",
+ " '/branch1'\n",
+ "\n",
+ "Revision 0 committed as 0.\n",
+ "Revision 1 skipped.\n",
+ "Revision 2 committed as 1.\n",
+ "Revision 3 skipped.\n",
+ "\n",
+ "Dropped 2 revisions.\n",
+ "\n",
+ "Revisions renumbered as follows:\n",
+ " 3 => (dropped)\n",
+ " 2 => 1\n",
+ " 1 => (dropped)\n",
+ " 0 => 0\n",
+ "\n"]
+
+ svntest.verify.verify_outputs(
+ "Actual svndumpfilter stderr does not agree with expected stderr",
+ None, filtered_err, None, expected_err)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ reflect_dropped_renumbered_revs,
+ svndumpfilter_loses_mergeinfo,
+ dumpfilter_with_targets,
+ dumpfilter_with_patterns,
+ filter_mergeinfo_revs_outside_of_dump_stream,
+ dropped_but_not_renumbered_empty_revs,
+ match_empty_prefix,
+ accepts_deltas,
+ dumpfilter_targets_expect_leading_slash_prefixes,
+ drop_all_empty_revisions,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svndumpfilter_tests_data/empty_revisions.dump b/subversion/tests/cmdline/svndumpfilter_tests_data/empty_revisions.dump
new file mode 100644
index 0000000..bc1df6c
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests_data/empty_revisions.dump
@@ -0,0 +1,94 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 40278d28-80c2-4ce3-9606-68ce4b659d51
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2012-06-24T14:02:12.037632Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2012-06-24T14:02:13.264066Z
+K 7
+svn:log
+V 4
+init
+PROPS-END
+
+Revision-number: 2
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2012-06-24T14:02:14.070370Z
+K 7
+svn:log
+V 13
+make a branch
+PROPS-END
+
+Node-path: branch1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 41
+Content-length: 41
+
+K 4
+soup
+V 16
+No soup for you!
+PROPS-END
+
+
+Node-path: branch1/foo
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 20
+Text-content-md5: 6f2d0469e1b4e16adf755b7e18f09d8a
+Text-content-sha1: 3df9ea3dfa67b8dea7968ecfd30e726285a2b383
+Content-length: 30
+
+PROPS-END
+This is file 'foo'.
+
+
+Revision-number: 3
+Prop-content-length: 112
+Content-length: 112
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2012-06-24T14:02:15.135672Z
+K 7
+svn:log
+V 10
+prop delta
+PROPS-END
+
diff --git a/subversion/tests/cmdline/svndumpfilter_tests_data/greek_tree.dump b/subversion/tests/cmdline/svndumpfilter_tests_data/greek_tree.dump
new file mode 100644
index 0000000..e9d3915
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests_data/greek_tree.dump
@@ -0,0 +1,248 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 1308cb08-a008-421e-ba63-a96934a572e8
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2008-07-28T22:36:31.457777Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 129
+Content-length: 129
+
+K 7
+svn:log
+V 27
+Log message for revision 1.
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2008-07-28T22:36:31.512108Z
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
diff --git a/subversion/tests/cmdline/svndumpfilter_tests_data/mergeinfo_included_partial.dump b/subversion/tests/cmdline/svndumpfilter_tests_data/mergeinfo_included_partial.dump
new file mode 100644
index 0000000..e570f82
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests_data/mergeinfo_included_partial.dump
@@ -0,0 +1,769 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 4fdb8097-d6b7-af4b-b818-c79c3d7082dc
+
+Revision-number: 6
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:34:02.734375Z
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: trunk/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/B/E/new_alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: trunk/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 22
+
+PROPS-END
+new content
+
+
+Node-path: trunk/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 22
+
+PROPS-END
+new content
+
+
+Node-path: trunk/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: branches/B1/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: branches/B1/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: branches/B1/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 22
+
+PROPS-END
+new content
+
+
+Node-path: branches/B1/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 22
+
+PROPS-END
+new content
+
+
+Node-path: branches/B1/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: branches/B1/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: branches/B1/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: branches/B1/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branches/B1/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: branches/B1/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: branches/B1/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Revision-number: 7
+Prop-content-length: 138
+Content-length: 138
+
+K 7
+svn:log
+V 37
+Create another branch B2 from trunk@6
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:34:28.500000Z
+PROPS-END
+
+Node-path: branches/B2
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: trunk
+
+
+Revision-number: 8
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:38:53.468750Z
+PROPS-END
+
+Node-path: trunk/B/E/new_alpha
+Node-kind: file
+Node-action: change
+Text-content-length: 61
+Text-content-md5: ac5f7c1c890095cafdb4e2fa0ff2680b
+Text-content-sha1: dc90ed6c9f5254772c7b17f5e710a7c342623390
+Content-length: 61
+
+This is the file 'beta'.
+this is the new alpha based on beta
+
+
+Revision-number: 9
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 18
+More work on trunk
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:39:10.625000Z
+PROPS-END
+
+Node-path: trunk/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 10
+Prop-content-length: 135
+Content-length: 135
+
+K 7
+svn:log
+V 34
+Merge r6 from trunk to branches/B1
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:40:49.015625Z
+PROPS-END
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: change
+Prop-content-length: 42
+Content-length: 42
+
+K 13
+svn:mergeinfo
+V 8
+/trunk:6
+PROPS-END
+
+
+Node-path: branches/B1/B/E/new_alpha
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: trunk/B/E/new_alpha
+Text-copy-source-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-copy-source-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+
+
+Node-path: branches/B1/B/E/beta
+Node-action: delete
+
+
+Revision-number: 11
+Prop-content-length: 135
+Content-length: 135
+
+K 7
+svn:log
+V 34
+Merge r9 from trunk to branches/B2
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:53:44.156250Z
+PROPS-END
+
+Node-path: branches/B2
+Node-kind: dir
+Node-action: change
+Prop-content-length: 42
+Content-length: 42
+
+K 13
+svn:mergeinfo
+V 8
+/trunk:9
+PROPS-END
+
+
+Node-path: branches/B2/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 12
+Text-content-md5: f8a6701de14ec3fcfd9f2fe595e9c9ed
+Text-content-sha1: 8b787bd9293c8b962c7a637a9fdbf627fe68610e
+Content-length: 12
+
+new content
+
+
+Revision-number: 12
+Prop-content-length: 111
+Content-length: 111
+
+K 7
+svn:log
+V 10
+Work on B2
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:55:17.390625Z
+PROPS-END
+
+Node-path: branches/B2/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 10
+Text-content-md5: 7fb893eb43ac0ef015b2b95b88628b8c
+Text-content-sha1: a54c7be3471b59cb5be150b1f679c1d2895b00e0
+Content-length: 10
+
+B2 tweaks
+
+
+Revision-number: 13
+Prop-content-length: 132
+Content-length: 132
+
+K 7
+svn:log
+V 31
+Merge r11 and r12 from B2 to B1
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T14:55:51.703125Z
+PROPS-END
+
+Node-path: branches/B1
+Node-kind: dir
+Node-action: change
+Prop-content-length: 64
+Content-length: 64
+
+K 13
+svn:mergeinfo
+V 29
+/branches/B2:11-12
+/trunk:6,9
+PROPS-END
+
+
+Node-path: branches/B1/D/H/chi
+Node-kind: file
+Node-action: change
+Text-content-length: 10
+Text-content-md5: 7fb893eb43ac0ef015b2b95b88628b8c
+Text-content-sha1: a54c7be3471b59cb5be150b1f679c1d2895b00e0
+Content-length: 10
+
+B2 tweaks
+
+
+Revision-number: 14
+Prop-content-length: 143
+Content-length: 143
+
+K 7
+svn:log
+V 42
+Merge r5 from trunk/B/E to branches/B1/B/E
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-22T15:10:22.234375Z
+PROPS-END
+
+Node-path: branches/B1/B/E
+Node-kind: dir
+Node-action: change
+Prop-content-length: 74
+Content-length: 74
+
+K 13
+svn:mergeinfo
+V 39
+/branches/B2/B/E:11-12
+/trunk/B/E:5-6,9
+PROPS-END
+
+
+Node-path: branches/B1/B/E/alpha
+Node-action: delete
+
+
+Revision-number: 15
+Prop-content-length: 143
+Content-length: 143
+
+K 7
+svn:log
+V 42
+Merge r8 from trunk/B/E to branches/B1/B/E
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-03-24T15:00:17.203125Z
+PROPS-END
+
+Node-path: branches/B1/B/E
+Node-kind: dir
+Node-action: change
+Prop-content-length: 76
+Content-length: 76
+
+K 13
+svn:mergeinfo
+V 41
+/branches/B2/B/E:11-12
+/trunk/B/E:5-6,8-9
+PROPS-END
+
+
+Node-path: branches/B1/B/E/new_alpha
+Node-kind: file
+Node-action: change
+Text-content-length: 60
+Text-content-md5: b1738c908160291bb40ef9d1b8c89e82
+Text-content-sha1: ad6df4488978b8e8eade283028ab753791073a76
+Content-length: 60
+
+This is the file 'beta'.
+this is the new alpha based on beta
+
diff --git a/subversion/tests/cmdline/svndumpfilter_tests_data/simple_v3.dump b/subversion/tests/cmdline/svndumpfilter_tests_data/simple_v3.dump
new file mode 100644
index 0000000..d145f74
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests_data/simple_v3.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svndumpfilter_tests_data/with_merges.dump b/subversion/tests/cmdline/svndumpfilter_tests_data/with_merges.dump
new file mode 100644
index 0000000..471e88c
--- /dev/null
+++ b/subversion/tests/cmdline/svndumpfilter_tests_data/with_merges.dump
@@ -0,0 +1,346 @@
+SVN-fs-dump-format-version: 2
+
+UUID: a1b7b7ba-941c-4386-9e40-393dd6d760dd
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-11-22T07:36:52.981952Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 117
+Content-length: 117
+
+K 7
+svn:log
+V 14
+Creating trunk
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:53.035734Z
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 119
+Content-length: 119
+
+K 7
+svn:log
+V 16
+commit all files
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:54.168549Z
+PROPS-END
+
+Node-path: trunk/1
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 88c16a56754e0f17a93d269ae74dde9b
+Content-length: 25
+
+PROPS-END
+This is file 1
+
+
+Node-path: trunk/2
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: db06069ef1c9f40986ffa06db4fe8fd7
+Content-length: 25
+
+PROPS-END
+This is file 2
+
+
+Node-path: trunk/3
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 95227e10e2c33771e1c1379b17330c86
+Content-length: 25
+
+PROPS-END
+This is file 3
+
+
+Node-path: trunk/4
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 601120837709ea5ee34dadddf5289113
+Content-length: 25
+
+PROPS-END
+This is file 4
+
+
+Node-path: trunk/5
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 657c74cf3521f7760f9203ac7d9d2cdb
+Content-length: 25
+
+PROPS-END
+This is file 5
+
+
+Revision-number: 3
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 15
+Creating branch
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:55.138144Z
+PROPS-END
+
+Node-path: branch1
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 118
+Content-length: 118
+
+K 7
+svn:log
+V 15
+Creating branch
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:55.174720Z
+PROPS-END
+
+Node-path: branch2
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 3
+Node-copyfrom-path: trunk
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 116
+Content-length: 116
+
+K 7
+svn:log
+V 13
+commit change
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:56.137189Z
+PROPS-END
+
+Node-path: branch1/1
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: 4769ac569ba7b962d2d2bf95c90f80b4
+Content-length: 39
+
+This is file 1
+This is file 1 modified
+
+
+Revision-number: 6
+Prop-content-length: 116
+Content-length: 116
+
+K 7
+svn:log
+V 13
+commit change
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:57.144314Z
+PROPS-END
+
+Node-path: branch1/2
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: e4f2deb441ef67de82f3aefc6aef255e
+Content-length: 39
+
+This is file 2
+This is file 2 modified
+
+
+Revision-number: 7
+Prop-content-length: 116
+Content-length: 116
+
+K 7
+svn:log
+V 13
+commit change
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:58.146184Z
+PROPS-END
+
+Node-path: branch2/1
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: 4769ac569ba7b962d2d2bf95c90f80b4
+Content-length: 39
+
+This is file 1
+This is file 1 modified
+
+
+Revision-number: 8
+Prop-content-length: 116
+Content-length: 116
+
+K 7
+svn:log
+V 13
+commit change
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:36:59.148564Z
+PROPS-END
+
+Node-path: branch2/2
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: e4f2deb441ef67de82f3aefc6aef255e
+Content-length: 39
+
+This is file 2
+This is file 2 modified
+
+
+Revision-number: 9
+Prop-content-length: 116
+Content-length: 116
+
+K 7
+svn:log
+V 13
+commit change
+K 10
+svn:author
+V 8
+stylesen
+K 8
+svn:date
+V 27
+2007-11-22T07:37:02.140253Z
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: change
+Prop-content-length: 47
+Content-length: 47
+
+K 13
+svn:mergeinfo
+V 12
+/branch1:4-8
+PROPS-END
+
+
+Node-path: trunk/1
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: 4769ac569ba7b962d2d2bf95c90f80b4
+Content-length: 39
+
+This is file 1
+This is file 1 modified
+
+
+Node-path: trunk/2
+Node-kind: file
+Node-action: change
+Text-content-length: 39
+Text-content-md5: e4f2deb441ef67de82f3aefc6aef255e
+Content-length: 39
+
+This is file 2
+This is file 2 modified
+
+
diff --git a/subversion/tests/cmdline/svneditor.bat b/subversion/tests/cmdline/svneditor.bat
new file mode 100644
index 0000000..36e033d
--- /dev/null
+++ b/subversion/tests/cmdline/svneditor.bat
@@ -0,0 +1,26 @@
+@echo off
+rem
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one
+rem or more contributor license agreements. See the NOTICE file
+rem distributed with this work for additional information
+rem regarding copyright ownership. The ASF licenses this file
+rem to you under the Apache License, Version 2.0 (the
+rem "License"); you may not use this file except in compliance
+rem with the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing,
+rem software distributed under the License is distributed on an
+rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+rem KIND, either express or implied. See the License for the
+rem specific language governing permissions and limitations
+rem under the License.
+rem
+rem
+rem the svneditor.py script is expected to be in the same directory as the
+rem .bat file
+rem SVN_TEST_PYTHON set by svntest/main.py
+"%SVN_TEST_PYTHON%" "%~dp0\svneditor.py" %*
+exit %ERRORLEVEL% \ No newline at end of file
diff --git a/subversion/tests/cmdline/svneditor.py b/subversion/tests/cmdline/svneditor.py
new file mode 100755
index 0000000..7f59987
--- /dev/null
+++ b/subversion/tests/cmdline/svneditor.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# svneditor.py: a mock $SVN_EDITOR for the Subversion test suite
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import sys
+import os
+
+def main():
+ if len(sys.argv) not in [2, 6]:
+ print("usage: svneditor.py file")
+ print(" svneditor.py base theirs mine merged wc_path")
+ print("arguments passed were: %s" % sys.argv)
+ sys.exit(1)
+
+ if len(sys.argv) == 2:
+ filename = sys.argv[1]
+ elif len(sys.argv) == 6:
+ filename = sys.argv[4]
+
+ # Read in the input file.
+ f = open(filename)
+ contents = f.read()
+ f.close()
+
+ funcname = os.environ['SVNTEST_EDITOR_FUNC']
+ func = sys.modules['__main__'].__dict__[funcname]
+
+ # Run the conversion.
+ contents = func(contents)
+
+ # Write edited version back to the file.
+ f = open(filename, 'w')
+ f.write(contents)
+ f.close()
+ return check_conflicts(contents)
+
+def check_conflicts(contents):
+ markers = ['<<<<<<<', '=======', '>>>>>>>']
+ found = 0
+ for line in contents.split('\n'):
+ for marker in markers:
+ if line.startswith(marker):
+ found = found + 1
+ return found >= 3
+
+def foo_to_bar(m):
+ return m.replace('foo', 'bar')
+
+def append_foo(m):
+ return m + 'foo\n'
+
+def identity(m):
+ return m
+
+exitcode = main()
+sys.exit(exitcode)
diff --git a/subversion/tests/cmdline/svnfsfs_tests.py b/subversion/tests/cmdline/svnfsfs_tests.py
new file mode 100755
index 0000000..e1fd73d
--- /dev/null
+++ b/subversion/tests/cmdline/svnfsfs_tests.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+#
+# svnfsfs_tests.py: testing the 'svnfsfs' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import logging
+import re
+import shutil
+import sys
+import threading
+import time
+import gzip
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest.verify import SVNExpectedStdout, SVNExpectedStderr
+from svntest.verify import SVNUnexpectedStderr
+from svntest.verify import UnorderedOutput
+from svntest.main import SVN_PROP_MERGEINFO
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco
+Item = svntest.wc.StateItem
+
+#----------------------------------------------------------------------
+
+# How we currently test 'svnfsfs' --
+#
+# 'svnfsfs stats': Run this on a greek repo, then verify that the
+# various sections are present. The section contents
+# is matched against section-specific patterns.
+#
+# 'svnfsfs dump-index': Tested implicitly by the load-index test
+#
+# 'svnfsfs load-index': Create a greek repo but set shard to 2 and pack
+# it so we can load into a packed shard with more
+# than one revision to test ordering issues etc.
+# r1 also contains a non-trival number of items such
+# that parser issues etc. have a chance to surface.
+#
+# The idea is dump the index of the pack, mess with
+# it to cover lots of UI guarantees but keep the
+# semantics of the relevant bits. Then feed it back
+# to load-index and verify that the result is still
+# a complete, consistent etc. repo.
+#
+######################################################################
+# Helper routines
+
+def patch_format(repo_dir, shard_size):
+ """Rewrite the format of the FSFS repository REPO_DIR so
+ that it would use sharding with SHARDS revisions per shard."""
+
+ format_path = os.path.join(repo_dir, "db", "format")
+ contents = open(format_path, 'rb').read()
+ processed_lines = []
+
+ for line in contents.split(b"\n"):
+ if line.startswith(b"layout "):
+ processed_lines.append(b"layout sharded %d" % shard_size)
+ else:
+ processed_lines.append(line)
+
+ new_contents = b"\n".join(processed_lines)
+ os.chmod(format_path, svntest.main.S_ALL_RW)
+ open(format_path, 'wb').write(new_contents)
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def test_stats(sbox):
+ "stats output"
+
+ sbox.build(create_wc=False)
+
+ exit_code, output, errput = \
+ svntest.actions.run_and_verify_svnfsfs(None, [], 'stats', sbox.repo_dir)
+
+ # split output into sections
+ sections = { }
+
+ last_line = ''
+ section_name = ''
+ section_contents = []
+ for line in output:
+ line = line.rstrip()
+ if line != '':
+
+ # If the first character is not a space, then LINE is a section header
+ if line[0] == ' ':
+ section_contents.append(line)
+ else:
+
+ # Store previous section
+ if section_name != '':
+ sections[section_name] = section_contents
+
+ # Is the output formatted nicely?
+ if last_line != '':
+ logger.warn("Error: no empty line before section '" + line + "'")
+ raise svntest.Failure
+
+ # start new section
+ section_name = line
+ section_contents = []
+
+ last_line = line
+
+ sections[section_name] = section_contents
+
+ # verify that these sections exist
+ sections_to_find = ['Reading revisions',
+ 'Global statistics:',
+ 'Noderev statistics:',
+ 'Representation statistics:',
+ 'Directory representation statistics:',
+ 'File representation statistics:',
+ 'Directory property representation statistics:',
+ 'File property representation statistics:',
+ 'Largest representations:',
+ 'Extensions by number of representations:',
+ 'Extensions by size of changed files:',
+ 'Extensions by size of representations:',
+ 'Histogram of expanded node sizes:',
+ 'Histogram of representation sizes:',
+ 'Histogram of file sizes:',
+ 'Histogram of file representation sizes:',
+ 'Histogram of file property sizes:',
+ 'Histogram of file property representation sizes:',
+ 'Histogram of directory sizes:',
+ 'Histogram of directory representation sizes:',
+ 'Histogram of directory property sizes:',
+ 'Histogram of directory property representation sizes:']
+ patterns_to_find = {
+ 'Reading revisions' : ['\s+ 0[ 0-9]*'],
+ 'Global .*' : ['.*\d+ bytes in .*\d+ revisions',
+ '.*\d+ bytes in .*\d+ changes',
+ '.*\d+ bytes in .*\d+ node revision records',
+ '.*\d+ bytes in .*\d+ representations',
+ '.*\d+ bytes expanded representation size',
+ '.*\d+ bytes with rep-sharing off' ],
+ 'Noderev .*' : ['.*\d+ bytes in .*\d+ nodes total',
+ '.*\d+ bytes in .*\d+ directory noderevs',
+ '.*\d+ bytes in .*\d+ file noderevs' ],
+ 'Representation .*' : ['.*\d+ bytes in .*\d+ representations total',
+ '.*\d+ bytes in .*\d+ directory representations',
+ '.*\d+ bytes in .*\d+ file representations',
+ '.*\d+ bytes in .*\d+ representations of added file nodes',
+ '.*\d+ bytes in .*\d+ directory property representations',
+ '.*\d+ bytes in .*\d+ file property representations',
+ '.*\d+ average delta chain length',
+ '.*\d+ bytes in header & footer overhead' ],
+ '.* representation statistics:' :
+ ['.*\d+ bytes in .*\d+ reps',
+ '.*\d+ bytes in .*\d+ shared reps',
+ '.*\d+ bytes expanded size',
+ '.*\d+ bytes expanded shared size',
+ '.*\d+ bytes with rep-sharing off',
+ '.*\d+ shared references',
+ '.*\d+ average delta chain length'],
+ 'Largest.*:' : ['.*\d+ r\d+ */\S*'],
+ 'Extensions by number .*:' :
+ ['.*\d+ \( ?\d+%\) representations'],
+ 'Extensions by size .*:' :
+ ['.*\d+ \( ?\d+%\) bytes'],
+ 'Histogram of .*:' : ['.*\d+ \.\. < \d+.*\d+ \( ?\d+%\) bytes in *\d+ \( ?\d+%\) items']
+ }
+
+ # check that the output contains all sections
+ for section_name in sections_to_find:
+ if not section_name in sections.keys():
+ logger.warn("Error: section '" + section_name + "' not found")
+ raise svntest.Failure
+
+ # check section contents
+ for section_name in sections.keys():
+ patterns = []
+
+ # find the suitable patterns for the current section
+ for pattern in patterns_to_find.keys():
+ if re.match(pattern, section_name):
+ patterns = patterns_to_find[pattern]
+ break;
+
+ if len(patterns) == 0:
+ logger.warn("Error: unexpected section '" + section_name + "' found'")
+ logger.warn(sections[section_name])
+ raise svntest.Failure
+
+ # each line in the section must match one of the patterns
+ for line in sections[section_name]:
+ found = False
+
+ for pattern in patterns:
+ if re.match(pattern, line):
+ found = True
+ break
+
+ if not found:
+ logger.warn("Error: unexpected line '" + line + "' in section '"
+ + section_name + "'")
+ logger.warn(sections[section_name])
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+@SkipUnless(svntest.main.fs_has_pack)
+@SkipUnless(svntest.main.is_fs_log_addressing)
+def load_index_sharded(sbox):
+ "load-index in a packed repo"
+
+ # Configure two files per shard to trigger packing.
+ sbox.build(create_wc=False)
+ patch_format(sbox.repo_dir, shard_size=2)
+
+ expected_output = ["Packing revisions in shard 0...done.\n"]
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ "pack", sbox.repo_dir)
+
+ # Read P2L index using svnfsfs.
+ exit_code, items, errput = \
+ svntest.actions.run_and_verify_svnfsfs(None, [], "dump-index", "-r0",
+ sbox.repo_dir)
+
+ # load-index promises to deal with input that
+ #
+ # * uses the same encoding as the dump-index output
+ # * is not in ascending item offset order
+ # * contains lines with the full table header
+ # * invalid or incorrect data in the checksum column and beyond
+ # * starts with an item which does not belong to the first revision
+ # in the pack file
+ #
+ # So, let's mess with the ITEMS list to call in on these promises.
+
+ # not in ascending order
+ items.reverse()
+
+ # multiple headers (there is already one now at the bottom)
+ items.insert(0, " Start Length Type Revision Item Checksum\n")
+
+ # make columns have a variable size
+ # mess with the checksums
+ # add a junk column
+ # keep header lines as are
+ for i in range(0, len(items)):
+ if items[i].find("Start") == -1:
+ columns = items[i].split()
+ columns[5] = columns[5].replace('f','-').replace('0','9')
+ columns.append("junk")
+ items[i] = ' '.join(columns) + "\n"
+
+ # first entry shall be for rev 1, pack starts at rev 0, though
+ for i in range(0, len(items)):
+ if items[i].split()[3] == "1":
+ if i != 1:
+ items[i],items[1] = items[1],items[i]
+ break
+
+ assert(items[1].split()[3] == "1")
+
+ # The STDIN data must be binary.
+ items = svntest.main.ensure_list(map(str.encode, items))
+
+ # Reload the index
+ exit_code, output, errput = svntest.main.run_command_stdin(
+ svntest.main.svnfsfs_binary, [], 0, False, items,
+ "load-index", sbox.repo_dir)
+
+ # Run verify to see whether we broke anything.
+ expected_output = ["* Verifying metadata at revision 0 ...\n",
+ "* Verifying repository metadata ...\n",
+ "* Verified revision 0.\n",
+ "* Verified revision 1.\n"]
+ svntest.actions.run_and_verify_svnadmin(expected_output, [],
+ "verify", sbox.repo_dir)
+
+@SkipUnless(svntest.main.is_fs_type_fsfs)
+def test_stats_on_empty_repo(sbox):
+ "stats on empty repo shall not crash"
+
+ sbox.build(create_wc=False, empty=True)
+
+ exit_code, output, errput = \
+ svntest.actions.run_and_verify_svnfsfs(None, [], 'stats', sbox.repo_dir)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ test_stats,
+ load_index_sharded,
+ test_stats_on_empty_repo,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnlook_tests.py b/subversion/tests/cmdline/svnlook_tests.py
new file mode 100755
index 0000000..4315168
--- /dev/null
+++ b/subversion/tests/cmdline/svnlook_tests.py
@@ -0,0 +1,748 @@
+#!/usr/bin/env python
+#
+# svnlook_tests.py: testing the 'svnlook' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import re, os, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+#----------------------------------------------------------------------
+
+# Convenience functions to make writing more tests easier
+
+def run_svnlook(*varargs):
+ """Run svnlook with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators).
+ Raises Failure if any stderr messages.
+ """
+ exit_code, output, dummy_errput = svntest.main.run_command(
+ svntest.main.svnlook_binary, 0, False, *varargs)
+
+ return output
+
+
+def expect(tag, expected, got):
+ if expected != got:
+ logger.warn("When testing: %s", tag)
+ logger.warn("Expected: %s", expected)
+ logger.warn(" Got: %s", got)
+ raise svntest.Failure
+
+
+# Tests
+
+def test_misc(sbox):
+ "test miscellaneous svnlook features"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Make a couple of local mods to files
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # give the repo a new UUID
+ uuid = b"01234567-89ab-cdef-89ab-cdef01234567"
+ svntest.main.run_command_stdin(svntest.main.svnadmin_binary, None, 0, True,
+ [b"SVN-fs-dump-format-version: 2\n",
+ b"\n",
+ b"UUID: ", uuid, b"\n",
+ ],
+ 'load', '--force-uuid', repo_dir)
+
+ expect('youngest', [ '2\n' ], run_svnlook('youngest', repo_dir))
+
+ expect('uuid', [ uuid.decode() + '\n' ], run_svnlook('uuid', repo_dir))
+
+ # it would be nice to test the author too, but the current test framework
+ # does not pull a username when testing over ra_neon or ra_svn,
+ # so the commits have an empty author.
+
+ expect('log', [ 'log msg\n' ], run_svnlook('log', repo_dir))
+
+ # check if the 'svnlook tree' output can be expanded to
+ # the 'svnlook tree --full-paths' output if demanding the whole repository
+ treelist = run_svnlook('tree', repo_dir)
+ treelistfull = run_svnlook('tree', '--full-paths', repo_dir)
+
+ path = ''
+ treelistexpand = []
+ for entry in treelist:
+ len1 = len(entry)
+ len2 = len(entry.lstrip())
+ path = path[0:2*(len1-len2)-1] + entry.strip() + '\n'
+ if path == '/\n':
+ treelistexpand.append(path)
+ else:
+ treelistexpand.append(path[1:])
+
+ treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
+ svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
+ treelistexpand, treelistfull)
+
+ # check if the 'svnlook tree' output is the ending of
+ # the 'svnlook tree --full-paths' output if demanding
+ # any part of the repository
+ treelist = run_svnlook('tree', repo_dir, '/A/B')
+ treelistfull = run_svnlook('tree', '--full-paths', repo_dir, '/A/B')
+
+ path = ''
+ treelistexpand = []
+ for entry in treelist:
+ len1 = len(entry)
+ len2 = len(entry.lstrip())
+ path = path[0:2*(len1-len2)] + entry.strip() + '\n'
+ treelistexpand.append('/A/' + path)
+
+ treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
+ svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
+ treelistexpand, treelistfull)
+
+ treelist = run_svnlook('tree', repo_dir, '/')
+ if treelist[0] != '/\n':
+ raise svntest.Failure
+
+ expect('propget svn:log', [ 'log msg' ],
+ run_svnlook('propget', '--revprop', repo_dir, 'svn:log'))
+
+
+ proplist = run_svnlook('proplist', '--revprop', repo_dir)
+ proplist = sorted([prop.strip() for prop in proplist])
+
+ # We cannot rely on svn:author's presence. ra_svn doesn't set it.
+ if not (proplist == [ 'svn:author', 'svn:date', 'svn:log' ]
+ or proplist == [ 'svn:date', 'svn:log' ]):
+ logger.warn("Unexpected result from proplist: %s", proplist)
+ raise svntest.Failure
+
+ prop_name = 'foo:bar-baz-quux'
+ exit_code, output, errput = svntest.main.run_svnlook('propget',
+ '--revprop', repo_dir,
+ prop_name)
+
+ expected_err = "Property '%s' not found on revision " % prop_name
+ for line in errput:
+ if line.find(expected_err) != -1:
+ break
+ else:
+ raise svntest.main.SVNUnmatchedError
+
+ exit_code, output, errput = svntest.main.run_svnlook('propget',
+ '-r1', repo_dir,
+ prop_name, '/')
+
+ expected_err = "Property '%s' not found on path '/' in revision " % prop_name
+ for line in errput:
+ if line.find(expected_err) != -1:
+ break
+ else:
+ raise svntest.main.SVNUnmatchedError
+
+#----------------------------------------------------------------------
+# Issue 1089
+@Issue(1089)
+def delete_file_in_moved_dir(sbox):
+ "delete file in moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # move E to E2 and delete E2/alpha
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ E2_path = os.path.join(wc_dir, 'A', 'B', 'E2')
+ svntest.actions.run_and_verify_svn(None, [], 'mv', E_path, E2_path)
+ alpha_path = os.path.join(E2_path, 'alpha')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path)
+
+ # commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Deleting'),
+ 'A/B/E2' : Item(verb='Adding'),
+ 'A/B/E2/alpha' : Item(verb='Deleting'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_status.add({
+ 'A/B/E2' : Item(status=' ', wc_rev=2),
+ 'A/B/E2/beta' : Item(status=' ', wc_rev=2),
+ })
+ ### this commit fails. the 'alpha' node is marked 'not-present' since it
+ ### is a deleted child of a move/copy. this is all well and proper.
+ ### however, during the commit, the parent node is committed down to just
+ ### the BASE node. at that point, 'alpha' has no parent in WORKING which
+ ### is a schema violation. there is a plan for committing in this kind of
+ ### situation, layed out in wc-ng-design. that needs to be implemented
+ ### in order to get this commit working again.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ exit_code, output, errput = svntest.main.run_svnlook("dirs-changed",
+ repo_dir)
+ if errput:
+ raise svntest.Failure
+
+ # Okay. No failure, but did we get the right output?
+ if len(output) != 2:
+ raise svntest.Failure
+ if not ((output[0].strip() == 'A/B/')
+ and (output[1].strip() == 'A/B/E2/')):
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# Issue 1241
+@Issue(1241)
+def test_print_property_diffs(sbox):
+ "test the printing of property diffs"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Add a bogus property to iota
+ iota_path = os.path.join(wc_dir, 'iota')
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'bogus_prop', 'bogus_val', iota_path)
+
+ # commit the change
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', iota_path)
+
+ # Grab the diff
+ exit_code, expected_output, err = svntest.actions.run_and_verify_svn(
+ None, [], 'diff', '-r', 'PREV', iota_path)
+
+ exit_code, output, errput = svntest.main.run_svnlook("diff", repo_dir)
+ if errput:
+ raise svntest.Failure
+
+ # Okay. No failure, but did we get the right output?
+ if len(output) != len(expected_output):
+ raise svntest.Failure
+
+ canonical_iota_path = iota_path.replace(os.path.sep, '/')
+
+ # replace wcdir/iota with iota in expected_output
+ for i in range(len(expected_output)):
+ expected_output[i] = expected_output[i].replace(canonical_iota_path,
+ 'iota')
+
+ # Check that the header filenames match.
+ if expected_output[2].split()[1] != output[2].split()[1]:
+ raise svntest.Failure
+ if expected_output[3].split()[1] != output[3].split()[1]:
+ raise svntest.Failure
+
+ svntest.verify.compare_and_display_lines('', '',
+ expected_output[4:],
+ output[4:])
+
+#----------------------------------------------------------------------
+# Check that svnlook info repairs allows inconsistent line endings in logs.
+
+def info_bad_newlines(sbox):
+ "svnlook info must allow inconsistent newlines"
+
+ dump_str = b"""SVN-fs-dump-format-version: 2
+
+UUID: dc40867b-38f6-0310-9f5f-f81aa277e06e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-05-03T19:09:41.129900Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 99
+Content-length: 99
+
+K 7
+svn:log
+V 3
+\n\r\n
+K 10
+svn:author
+V 2
+pl
+K 8
+svn:date
+V 27
+2005-05-03T19:10:19.975578Z
+PROPS-END
+
+Node-path: file
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 5
+Text-content-md5: e1cbb0c3879af8347246f12c559a86b5
+Content-length: 15
+
+PROPS-END
+text
+
+
+"""
+
+ # load dumpfile with inconsistent newlines into repos.
+ svntest.actions.load_repo(sbox, dump_str=dump_str,
+ bypass_prop_validation=True)
+
+ exit_code, output, errput = svntest.main.run_svnlook("info",
+ sbox.repo_dir, "-r1")
+ if errput:
+ raise svntest.Failure
+
+def changed_copy_info(sbox):
+ "test --copy-info flag on the changed command"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ # Copy alpha to /A/alpha2.
+ E_path = os.path.join(wc_dir, 'A', 'B', 'E')
+ alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
+ alpha2_path = os.path.join(wc_dir, 'A', 'alpha2')
+ svntest.actions.run_and_verify_svn(None, [], 'cp', alpha_path,
+ alpha2_path)
+
+ # commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/alpha2' : Item(verb='Adding'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/alpha2' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ exit_code, output, errput = svntest.main.run_svnlook("changed", repo_dir)
+ if errput:
+ raise svntest.Failure
+
+ expect("changed without --copy-info", ["A A/alpha2\n"], output)
+
+ exit_code, output, errput = svntest.main.run_svnlook("changed",
+ repo_dir, "--copy-info")
+ if errput:
+ raise svntest.Failure
+
+ expect("changed with --copy-info",
+ ["A + A/alpha2\n",
+ " (from A/B/E/alpha:r1)\n"],
+ output)
+
+#----------------------------------------------------------------------
+# Issue 2663
+@Issue(2663)
+def tree_non_recursive(sbox):
+ "test 'svnlook tree --non-recursive'"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+
+ expected_results_root = ('/', ' iota', ' A/')
+ expected_results_deep = ('B/', ' lambda', ' E/', ' F/')
+
+ # check the output of svnlook --non-recursive on the
+ # root of the repository
+ treelist = run_svnlook('tree', '--non-recursive', repo_dir)
+ for entry in treelist:
+ if not entry.rstrip() in expected_results_root:
+ logger.warn("Unexpected result from tree with --non-recursive:")
+ logger.warn(" entry : %s", entry.rstrip())
+ raise svntest.Failure
+ if len(treelist) != len(expected_results_root):
+ logger.warn("Expected %i output entries, found %i",
+ len(expected_results_root), len(treelist))
+ raise svntest.Failure
+
+ # check the output of svnlook --non-recursive on a
+ # subdirectory of the repository
+ treelist = run_svnlook('tree', '--non-recursive', repo_dir, '/A/B')
+ for entry in treelist:
+ if not entry.rstrip() in expected_results_deep:
+ logger.warn("Unexpected result from tree with --non-recursive:")
+ logger.warn(" entry : %s", entry.rstrip())
+ raise svntest.Failure
+ if len(treelist) != len(expected_results_deep):
+ logger.warn("Expected %i output entries, found %i",
+ len(expected_results_deep), len(treelist))
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def limit_history(sbox):
+ "history --limit"
+ sbox.build(create_wc=False)
+ repo_url = sbox.repo_url
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', '-m', 'log msg',
+ repo_url + "/iota", repo_url + "/iota2")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', '-m', 'log msg',
+ repo_url + "/A/mu", repo_url + "/iota")
+ history = run_svnlook("history", "--limit=1", sbox.repo_dir)
+ # Ignore the two lines of header, and verify expected number of items.
+ if len(history[2:]) != 1:
+ raise svntest.Failure("Output not limited to expected number of items")
+
+#----------------------------------------------------------------------
+def diff_ignore_whitespace(sbox):
+ "test 'svnlook diff -x -b' and 'svnlook diff -x -w'"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+
+ # Make whitespace-only changes to mu
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_write(mu_path, "This is the file 'mu'.\n", "wb")
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Check the output of 'svnlook diff -x --ignore-space-change' on mu.
+ # It should not print anything.
+ output = run_svnlook('diff', '-r2', '-x', '--ignore-space-change',
+ repo_dir)
+ if output != []:
+ raise svntest.Failure
+
+ # Check the output of 'svnlook diff -x --ignore-all-space' on mu.
+ # It should not print anything.
+ output = run_svnlook('diff', '-r2', '-x', '--ignore-all-space',
+ repo_dir)
+ if output != []:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+def diff_ignore_eolstyle(sbox):
+ "test 'svnlook diff -x --ignore-eol-style'"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ rev = 1
+ # do the --ignore-eol-style test for each eol-style
+ for eol, eolchar in zip(['CRLF', 'CR', 'native', 'LF'],
+ [crlf, '\015', '\n', '\012']):
+ # rewrite file mu and set the eol-style property.
+ svntest.main.file_write(mu_path, "This is the file 'mu'." + eolchar, 'wb')
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol, mu_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at
+ # revision 1, but mu should be at revision rev + 1.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=rev + 1)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Grab the diff
+ exit_code, expected_output, err = svntest.actions.run_and_verify_svn(
+ None, [],
+ 'diff', '-r', 'PREV', '-x', '--ignore-eol-style', mu_path)
+
+
+ output = run_svnlook('diff', '-r', str(rev + 1), '-x',
+ '--ignore-eol-style', repo_dir)
+ rev += 1
+
+ canonical_mu_path = mu_path.replace(os.path.sep, '/')
+
+ # replace wcdir/A/mu with A/mu in expected_output
+ for i in range(len(expected_output)):
+ expected_output[i] = expected_output[i].replace(canonical_mu_path,
+ 'A/mu')
+
+ # Check that the header filenames match.
+ if expected_output[2].split()[1] != output[2].split()[1]:
+ raise svntest.Failure
+ if expected_output[3].split()[1] != output[3].split()[1]:
+ raise svntest.Failure
+
+ svntest.verify.compare_and_display_lines('', '',
+ expected_output[4:],
+ output[4:])
+
+
+#----------------------------------------------------------------------
+def diff_binary(sbox):
+ "test 'svnlook diff' on binary files"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+
+ # Set A/mu to a binary mime-type, tweak its text, and commit.
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, 'new appended text for mu')
+ svntest.main.run_svn(binary_mime_type_on_text_file_warning,
+ 'propset', 'svn:mime-type',
+ 'application/octet-stream', mu_path)
+ svntest.main.run_svn(None, 'ci', '-m', 'log msg', mu_path)
+
+ # Now run 'svnlook diff' and look for the "Binary files differ" message.
+ output = run_svnlook('diff', repo_dir)
+ if not "(Binary files differ)\n" in output:
+ raise svntest.Failure("No 'Binary files differ' indication in "
+ "'svnlook diff' output.")
+
+#----------------------------------------------------------------------
+def test_filesize(sbox):
+ "test 'svnlook filesize'"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+
+ tree_output = run_svnlook('tree', '--full-paths', repo_dir)
+ for line in tree_output:
+ # Drop line endings
+ line = line.rstrip()
+ # Skip directories
+ if line[-1] == '/':
+ continue
+ # Run 'svnlook cat' and measure the size of the output.
+ cat_output = run_svnlook('cat', repo_dir, line)
+ cat_size = len("".join(cat_output))
+ # Run 'svnlook filesize' and compare the results with the CAT_SIZE.
+ filesize_output = run_svnlook('filesize', repo_dir, line)
+ if len(filesize_output) != 1:
+ raise svntest.Failure("'svnlook filesize' printed something other than "
+ "a single line of output.")
+ filesize = int(filesize_output[0].strip())
+ if filesize != cat_size:
+ raise svntest.Failure("'svnlook filesize' and the counted length of "
+ "'svnlook cat's output differ for the path "
+ "'%s'." % (line))
+
+#----------------------------------------------------------------------
+def verify_logfile(logfilename, expected_data):
+ if os.path.exists(logfilename):
+ fp = open(logfilename)
+ else:
+ raise svntest.verify.SVNUnexpectedOutput("hook logfile %s not found"\
+ % logfilename)
+
+ actual_data = fp.readlines()
+ fp.close()
+ os.unlink(logfilename)
+ svntest.verify.compare_and_display_lines('wrong hook logfile content',
+ 'STDOUT',
+ expected_data, actual_data)
+
+def test_txn_flag(sbox):
+ "test 'svnlook * -t'"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+ wc_dir = sbox.wc_dir
+ logfilepath = os.path.join(repo_dir, 'hooks.log')
+
+ # List changed dirs and files in this transaction
+ hook_template = """import sys,os,subprocess
+svnlook_bin=%s
+
+fp = open(os.path.join(sys.argv[1], 'hooks.log'), 'wb')
+def output_command(fp, cmd, opt):
+ command = [svnlook_bin, cmd, '-t', sys.argv[2], sys.argv[1]] + opt
+ process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, bufsize=-1)
+ (output, errors) = process.communicate()
+ status = process.returncode
+ fp.write(output)
+ fp.write(errors)
+ return status
+
+for (svnlook_cmd, svnlook_opt) in %s:
+ output_command(fp, svnlook_cmd, svnlook_opt.split())
+fp.close()"""
+ pre_commit_hook = svntest.main.get_pre_commit_hook_path(repo_dir)
+
+ # 1. svnlook 'changed' -t and 'dirs-changed' -t
+ hook_instance = hook_template % (repr(svntest.main.svnlook_binary),
+ repr([('changed', ''),
+ ('dirs-changed', '')]))
+ svntest.main.create_python_hook_script(pre_commit_hook,
+ hook_instance)
+
+ # Change files mu and rho
+ A_path = os.path.join(wc_dir, 'A')
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # commit, and check the hook's logfile
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ expected_data = [ 'U A/D/G/rho\n', 'U A/mu\n', 'A/\n', 'A/D/G/\n' ]
+ verify_logfile(logfilepath, expected_data)
+
+ # 2. svnlook 'propget' -t, 'proplist' -t
+ # 2. Change a dir and revision property
+ hook_instance = hook_template % (repr(svntest.main.svnlook_binary),
+ repr([('propget', 'bogus_prop /A'),
+ ('propget', '--revprop bogus_rev_prop'),
+ ('proplist', '/A'),
+ ('proplist', '--revprop')]))
+ svntest.main.create_python_hook_script(pre_commit_hook,
+ hook_instance)
+
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'bogus_prop', 'bogus_val\n', A_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', wc_dir,
+ '--with-revprop', 'bogus_rev_prop=bogus_rev_val\n')
+ # Now check the logfile
+ expected_data = [ 'bogus_val\n',
+ 'bogus_rev_val\n',
+ "Properties on '/A':\n",
+ ' bogus_prop\n',
+ ' svn:log\n', ' svn:author\n',
+ ' bogus_rev_prop\n',
+ ' svn:date\n',
+ ' svn:txn-client-compat-version\n',
+ ' svn:txn-user-agent\n',
+ ]
+ verify_logfile(logfilepath, svntest.verify.UnorderedOutput(expected_data))
+
+# From r1293375 until fixed in r1303856, 'svnlook changed' and 'svnlook diff'
+# produced no output on a property delete.
+def property_delete(sbox):
+ "property delete"
+
+ sbox.build()
+ repo_dir = sbox.repo_dir
+
+ sbox.simple_propset('foo', 'bar', 'A/mu')
+ sbox.simple_commit()
+ sbox.simple_propdel('foo', 'A/mu')
+ sbox.simple_commit()
+
+ svntest.actions.run_and_verify_svnlook(["_U A/mu\n"], [],
+ 'changed', repo_dir)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ test_misc,
+ delete_file_in_moved_dir,
+ test_print_property_diffs,
+ info_bad_newlines,
+ changed_copy_info,
+ tree_non_recursive,
+ limit_history,
+ diff_ignore_whitespace,
+ diff_ignore_eolstyle,
+ diff_binary,
+ test_filesize,
+ test_txn_flag,
+ property_delete,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnmover_tests.py b/subversion/tests/cmdline/svnmover_tests.py
new file mode 100755
index 0000000..bfdbb1f
--- /dev/null
+++ b/subversion/tests/cmdline/svnmover_tests.py
@@ -0,0 +1,1711 @@
+#!/usr/bin/env python
+#
+# svnmover_tests.py: tests of svnmover
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import svntest
+import os, re
+
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+
+_commit_re = re.compile('^Committed r([0-9]+)')
+_log_re = re.compile('^ ([ADRM] /[^\(]+($| \(from .*:[0-9]+\)$))')
+_err_re = re.compile('^svnmover: (.*)$')
+
+def mk_file(sbox, file_name):
+ """Make an unversioned file named FILE_NAME, with some text content,
+ in some convenient directory, and return a path to it.
+ """
+ file_path = os.path.join(sbox.repo_dir, file_name)
+ svntest.main.file_append(file_path, "This is the file '" + file_name + "'.")
+ return file_path
+
+def populate_trunk(sbox, trunk):
+ """Create some files and dirs under the existing dir (relpath) TRUNK.
+ """
+ test_svnmover(sbox.repo_url + '/' + trunk, None,
+ 'put', mk_file(sbox, 'README'), 'README',
+ 'mkdir lib',
+ 'mkdir lib/foo',
+ 'mkdir lib/foo/x',
+ 'mkdir lib/foo/y',
+ 'put', mk_file(sbox, 'file'), 'lib/foo/file')
+
+def initial_content_A_iota(sbox):
+ """Commit something in place of a greek tree for revision 1.
+ """
+ test_svnmover(sbox.repo_url, None,
+ 'mkdir A',
+ 'put', mk_file(sbox, 'iota'), 'iota')
+
+def initial_content_ttb(sbox):
+ """Make a 'trunk' branch and 'tags' and 'branches' dirs.
+ """
+ test_svnmover(sbox.repo_url, None,
+ 'mkbranch trunk',
+ 'mkdir tags',
+ 'mkdir branches')
+
+def initial_content_projects_ttb(sbox):
+ """Make multiple project dirs, each with its own 'trunk' branch and 'tags'
+ and 'branches' dirs.
+ """
+ test_svnmover(sbox.repo_url, None,
+ 'mkdir proj1',
+ 'mkbranch proj1/trunk',
+ 'mkdir proj1/tags',
+ 'mkdir proj1/branches',
+ 'mkdir proj2',
+ 'mkbranch proj2/trunk',
+ 'mkdir proj2/tags',
+ 'mkdir proj2/branches')
+
+def initial_content_in_trunk(sbox):
+ initial_content_ttb(sbox)
+
+ # create initial state in trunk
+ # (r3)
+ populate_trunk(sbox, 'trunk')
+
+def sbox_build_svnmover(sbox, content=None):
+ """Create a sandbox repo containing one revision, with a directory 'A' and
+ a file 'iota'.
+
+ Use svnmover for every commit so as to get the branching/moving
+ metadata. This will no longer be necessary if we make 'svnmover'
+ fill in missing metadata automatically.
+ """
+ sbox.build(create_wc=False, empty=True)
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ if content:
+ content(sbox)
+
+def test_svnmover3(sbox, relpath, expected_changes, expected_eids, *varargs):
+
+ test_svnmover2(sbox, relpath, expected_changes, *varargs)
+
+ if expected_eids:
+ exit_code, outlines, errlines = svntest.main.run_svnmover('-U',
+ sbox.repo_url,
+ '--ui=serial',
+ 'ls-br-r')
+ eid_tree = svntest.wc.State.from_eids(outlines)
+ try:
+ expected_eids.compare_and_display('eids', eid_tree)
+ except svntest.tree.SVNTreeError:
+ raise
+
+def test_svnmover2(sbox, relpath, expected_changes, *varargs):
+ """Run svnmover with the list of SVNMOVER_ARGS arguments. Verify that
+ its run results in a new commit with 'svnmover diff -c HEAD' changes
+ that match the list of EXPECTED_CHANGES (an unordered list of regexes).
+ """
+ repo_url = sbox.repo_url
+ if relpath:
+ repo_url += '/' + relpath
+
+ # Split arguments at spaces
+ varargs = ' '.join(varargs).split()
+ # First, run svnmover.
+ exit_code, outlines, errlines = svntest.main.run_svnmover('-U', repo_url,
+ *varargs)
+ if exit_code or errlines:
+ raise svntest.main.SVNCommitFailure(str(errlines))
+ # Find the committed revision
+ for line in outlines:
+ m = _commit_re.match(line)
+ if m:
+ commit_rev = int(m.group(1))
+ break
+ else:
+ raise svntest.main.SVNLineUnequal(str(outlines))
+
+ # Now, run 'svnmover diff -c HEAD'
+ exit_code, outlines, errlines = svntest.main.run_svnmover('-U', sbox.repo_url,
+ '--ui=paths',
+ 'diff',
+ '.@' + str(commit_rev - 1),
+ '.@' + str(commit_rev))
+ if exit_code or errlines:
+ raise svntest.main.SVNCommitFailure(str(errlines))
+
+ if expected_changes:
+ expected_changes = svntest.verify.UnorderedRegexListOutput(expected_changes)
+ outlines = [l.strip() for l in outlines]
+ svntest.verify.verify_outputs(None, outlines, None, expected_changes, None)
+
+def test_svnmover_verify_log(repo_url, expected_path_changes):
+ """Run 'svn log' and verify the output"""
+
+ if expected_path_changes is not None:
+ # Now, run 'svn log -vq -rHEAD'
+ changed_paths = []
+ exit_code, outlines, errlines = \
+ svntest.main.run_svn(None, 'log', '-vqrHEAD', repo_url)
+ if errlines:
+ raise svntest.Failure("Unable to verify commit with 'svn log': %s"
+ % (str(errlines)))
+ for line in outlines:
+ match = _log_re.match(line)
+ if match:
+ changed_paths.append(match.group(1).rstrip('\n\r'))
+
+ expected_path_changes.sort()
+ changed_paths.sort()
+ if changed_paths != expected_path_changes:
+ raise svntest.Failure("Logged path changes differ from expectations\n"
+ " expected: %s\n"
+ " actual: %s" % (str(expected_path_changes),
+ str(changed_paths)))
+
+def test_svnmover(repo_url, expected_path_changes, *varargs):
+ """Run svnmover with the list of SVNMOVER_ARGS arguments. Verify that
+ its run results in a new commit with 'svn log -rHEAD' changed paths
+ that match the list of EXPECTED_PATH_CHANGES."""
+
+ # Split arguments at spaces
+ varargs = ' '.join(varargs).split()
+ # First, run svnmover.
+ exit_code, outlines, errlines = svntest.main.run_svnmover('-U', repo_url,
+ *varargs)
+ if exit_code or errlines:
+ raise svntest.main.SVNCommitFailure(str(errlines))
+ if not any(map(_commit_re.match, outlines)):
+ raise svntest.main.SVNLineUnequal(str(outlines))
+
+ test_svnmover_verify_log(repo_url, expected_path_changes)
+
+def xtest_svnmover(repo_url, error_re_string, *varargs):
+ """Run svnmover with the list of VARARGS arguments. Verify that
+ its run produces an error, and that the error matches ERROR_RE_STRING
+ if that is not None.
+ """
+
+ # Split arguments at spaces
+ varargs = ' '.join(varargs).split()
+ # First, run svnmover.
+ exit_code, outlines, errlines = svntest.main.run_svnmover('-U', repo_url,
+ *varargs)
+ if not exit_code:
+ raise svntest.main.Failure("Expected an error, but exit code is 0")
+ if error_re_string:
+ if not error_re_string.startswith(".*"):
+ error_re_string = ".*(" + error_re_string + ")"
+ else:
+ error_re_string = ".*"
+
+ expected_err = svntest.verify.RegexOutput(error_re_string, match_all=False)
+ svntest.verify.verify_outputs(None, None, errlines, None, expected_err)
+
+def expected_ls_output(paths, subbranch_paths=[]):
+ """Return an expected output object matching the output of 'svnmover ls'
+ for the given plain PATHS and subbranch-root paths SUBBRANCH_PATHS.
+ """
+ expected_out = svntest.verify.UnorderedRegexListOutput(
+ [r' ' + re.escape(p) + ' *\n'
+ for p in paths] +
+ [r' ' + re.escape(p) + r' +\(branch B[0-9.]+\)' + ' *\n'
+ for p in subbranch_paths])
+ return expected_out
+
+def verify_paths_in_branch(sbox, branch_path, paths, subbranch_paths=[]):
+ """Verify that the branch in which BRANCH_PATH lies contains elements at
+ the paths PATHS and subbranch-roots at the paths SUBBRANCH_PATHS.
+ """
+ expected_out = expected_ls_output(paths, subbranch_paths)
+ svntest.actions.run_and_verify_svnmover(expected_out, None,
+ '-U', sbox.repo_url,
+ '--ui=paths',
+ 'ls', branch_path)
+
+######################################################################
+
+def basic_svnmover(sbox):
+ "basic svnmover tests"
+ # a copy of svnmucc_tests 1
+
+ sbox_build_svnmover(sbox, content=initial_content_A_iota)
+
+ empty_file = os.path.join(sbox.repo_dir, 'empty')
+ svntest.main.file_append(empty_file, '')
+
+ # revision 2
+ test_svnmover(sbox.repo_url,
+ ['A /top0/foo'
+ ], # ---------
+ 'mkdir foo')
+
+ # revision 3
+ test_svnmover(sbox.repo_url,
+ ['A /top0/z.c',
+ ], # ---------
+ 'put', empty_file, 'z.c')
+
+ # revision 4
+ test_svnmover(sbox.repo_url,
+ ['A /top0/foo/z.c (from /top0/z.c:3)',
+ 'A /top0/foo/bar (from /top0/foo:3)',
+ ], # ---------
+ 'cp 3 z.c foo/z.c',
+ 'cp 3 foo foo/bar')
+
+ # revision 5
+ test_svnmover(sbox.repo_url,
+ ['A /top0/zig (from /top0/foo:4)',
+ 'D /top0/zig/bar',
+ 'D /top0/foo',
+ 'A /top0/zig/zag (from /top0/foo:4)',
+ ], # ---------
+ 'cp 4 foo zig',
+ 'rm zig/bar',
+ 'mv foo zig/zag')
+
+ # revision 6
+ test_svnmover(sbox.repo_url,
+ ['D /top0/z.c',
+ 'A /top0/zig/zag/bar/y.c (from /top0/z.c:5)',
+ 'A /top0/zig/zag/bar/x.c (from /top0/z.c:3)',
+ ], # ---------
+ 'mv z.c zig/zag/bar/y.c',
+ 'cp 3 z.c zig/zag/bar/x.c')
+
+ # revision 7
+ test_svnmover(sbox.repo_url,
+ ['D /top0/zig/zag/bar/y.c',
+ 'A /top0/zig/zag/bar/y_y.c (from /top0/zig/zag/bar/y.c:6)',
+ 'A /top0/zig/zag/bar/y+y.c (from /top0/zig/zag/bar/y.c:6)',
+ ], # ---------
+ 'mv zig/zag/bar/y.c zig/zag/bar/y_y.c',
+ 'cp HEAD zig/zag/bar/y.c zig/zag/bar/y+y.c')
+
+ # revision 8
+ test_svnmover(sbox.repo_url,
+ ['D /top0/zig/zag/bar/y_y.c',
+ 'A /top0/zig/zag/bar/z_z1.c (from /top0/zig/zag/bar/y_y.c:7)',
+ 'A /top0/zig/zag/bar/z+z.c (from /top0/zig/zag/bar/y+y.c:7)',
+ 'A /top0/zig/zag/bar/z_z2.c (from /top0/zig/zag/bar/y_y.c:7)',
+ ], #---------
+ 'mv zig/zag/bar/y_y.c zig/zag/bar/z_z1.c',
+ 'cp HEAD zig/zag/bar/y+y.c zig/zag/bar/z+z.c',
+ 'cp HEAD zig/zag/bar/y_y.c zig/zag/bar/z_z2.c')
+
+
+ # revision 9
+ test_svnmover(sbox.repo_url,
+ ['D /top0/zig/zag',
+ 'A /top0/zig/foo (from /top0/zig/zag:8)',
+ 'D /top0/zig/foo/bar/z+z.c',
+ 'D /top0/zig/foo/bar/z_z2.c',
+ 'R /top0/zig/foo/bar/z_z1.c (from /top0/zig/zag/bar/x.c:6)',
+ ], #---------
+ 'mv zig/zag zig/foo',
+ 'rm zig/foo/bar/z_z1.c',
+ 'rm zig/foo/bar/z_z2.c',
+ 'rm zig/foo/bar/z+z.c',
+ 'cp 6 zig/zag/bar/x.c zig/foo/bar/z_z1.c')
+
+ # revision 10
+ test_svnmover(sbox.repo_url,
+ ['R /top0/zig/foo/bar (from /top0/zig/z.c:9)',
+ ], #---------
+ 'rm zig/foo/bar',
+ 'cp 9 zig/z.c zig/foo/bar')
+
+ # revision 11
+ test_svnmover(sbox.repo_url,
+ ['R /top0/zig/foo/bar (from /top0/zig/foo/bar:9)',
+ 'D /top0/zig/foo/bar/z_z1.c',
+ ], #---------
+ 'rm zig/foo/bar',
+ 'cp 9 zig/foo/bar zig/foo/bar',
+ 'rm zig/foo/bar/z_z1.c')
+
+ # revision 12
+ test_svnmover(sbox.repo_url,
+ ['R /top0/zig/foo (from /top0/zig/foo/bar:11)',
+ ], #---------
+ 'rm zig/foo',
+ 'cp head zig/foo/bar zig/foo')
+
+ # revision 13
+ test_svnmover(sbox.repo_url,
+ ['D /top0/zig',
+ 'A /top0/foo (from /top0/foo:4)',
+ 'A /top0/foo/foo (from /top0/foo:4)',
+ 'A /top0/foo/foo/foo (from /top0/foo:4)',
+ 'D /top0/foo/foo/bar',
+ 'R /top0/foo/foo/foo/bar (from /top0/foo:4)',
+ ], #---------
+ 'rm zig',
+ 'cp 4 foo foo',
+ 'cp 4 foo foo/foo',
+ 'cp 4 foo foo/foo/foo',
+ 'rm foo/foo/bar',
+ 'rm foo/foo/foo/bar',
+ 'cp 4 foo foo/foo/foo/bar')
+
+ # revision 14
+ test_svnmover(sbox.repo_url,
+ ['A /top0/boozle (from /top0/foo:4)',
+ 'A /top0/boozle/buz',
+ 'A /top0/boozle/buz/nuz',
+ ], #---------
+ 'cp 4 foo boozle',
+ 'mkdir boozle/buz',
+ 'mkdir boozle/buz/nuz')
+
+ # revision 15
+ test_svnmover(sbox.repo_url,
+ ['A /top0/boozle/buz/svnmover-test.py',
+ 'A /top0/boozle/guz (from /top0/boozle/buz:14)',
+ 'A /top0/boozle/guz/svnmover-test.py',
+ ], #---------
+ 'put', empty_file, 'boozle/buz/svnmover-test.py',
+ 'cp 14 boozle/buz boozle/guz',
+ 'put', empty_file, 'boozle/guz/svnmover-test.py')
+
+ # revision 16
+ test_svnmover(sbox.repo_url,
+ ['R /top0/boozle/guz/svnmover-test.py',
+ ], #---------
+ 'put', empty_file, 'boozle/buz/svnmover-test.py',
+ 'rm boozle/guz/svnmover-test.py',
+ 'put', empty_file, 'boozle/guz/svnmover-test.py')
+
+ # Expected missing revision error
+ xtest_svnmover(sbox.repo_url,
+ "E205000: Syntax error parsing peg revision 'a'",
+ #---------
+ 'cp a b')
+
+ # Expected cannot be younger error
+ xtest_svnmover(sbox.repo_url,
+ "E160006: No such revision 42",
+ #---------
+ 'cp 42 a b')
+
+ # Expected already exists error
+ xtest_svnmover(sbox.repo_url,
+ "already exists .*'foo'",
+ #---------
+ 'cp 16 A foo')
+
+ # Expected copy-child already exists error
+ xtest_svnmover(sbox.repo_url,
+ "already exists .*'a/bar'",
+ #---------
+ 'cp 16 foo a',
+ 'cp 16 foo/foo a/bar')
+
+ # Expected not found error
+ xtest_svnmover(sbox.repo_url,
+ "not found .*'a@.*'",
+ #---------
+ 'cp 16 a b')
+
+
+def nested_replaces(sbox):
+ "nested replaces"
+ # a copy of svnmucc_tests 2
+
+ sbox_build_svnmover(sbox)
+ repo_url = sbox.repo_url
+
+ # r1
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url, '-m', 'r1: create tree',
+ 'mkdir', 'A', 'mkdir', 'A/B', 'mkdir', 'A/B/C',
+ 'mkdir', 'M', 'mkdir', 'M/N', 'mkdir', 'M/N/O',
+ 'mkdir', 'X', 'mkdir', 'X/Y', 'mkdir', 'X/Y/Z')
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url, '-m', 'r2: nested replaces',
+ *("""
+rm A rm M rm X
+cp HEAD X/Y/Z A cp HEAD A/B/C M cp HEAD M/N/O X
+cp HEAD A/B A/B cp HEAD M/N M/N cp HEAD X/Y X/Y
+rm A/B/C rm M/N/O rm X/Y/Z
+cp HEAD X A/B/C cp HEAD A M/N/O cp HEAD M X/Y/Z
+rm A/B/C/Y
+ """.split()))
+
+ # ### TODO: need a smarter run_and_verify_log() that verifies copyfrom
+ escaped = svntest.main.ensure_list(map(re.escape, [
+ ' R /top0/A (from /top0/X/Y/Z:1)',
+ ' A /top0/A/B (from /top0/A/B:1)',
+ ' R /top0/A/B/C (from /top0/X:1)',
+ ' R /top0/M (from /top0/A/B/C:1)',
+ ' A /top0/M/N (from /top0/M/N:1)',
+ ' R /top0/M/N/O (from /top0/A:1)',
+ ' R /top0/X (from /top0/M/N/O:1)',
+ ' A /top0/X/Y (from /top0/X/Y:1)',
+ ' R /top0/X/Y/Z (from /top0/M:1)',
+ ' D /top0/A/B/C/Y',
+ ]))
+ expected_output = svntest.verify.UnorderedRegexListOutput(escaped
+ + ['^-', '^r2', '^-', '^Changed paths:',])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-qvr2', repo_url)
+
+def merges(sbox):
+ "merges"
+ sbox_build_svnmover(sbox, content=initial_content_ttb)
+ repo_url = sbox.repo_url
+
+ # Create some nodes in trunk, each one named for how we will modify it.
+ # The name 'rm_no', for example, means we are going to 'rm' this node on
+ # trunk and make 'no' change on the branch.
+ # (r2)
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url,
+ 'mkdir', 'trunk/no_no',
+ 'mkdir', 'trunk/rm_no',
+ 'mkdir', 'trunk/no_rm',
+ 'mkdir', 'trunk/mv_no',
+ 'mkdir', 'trunk/no_mv',
+ 'mkdir', 'trunk/rm_mv',
+ 'mkdir', 'trunk/mv_rm')
+
+ # branch (r3)
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url,
+ 'branch', 'trunk', 'branches/br1')
+
+ # modify (r4, r5)
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url + '/trunk',
+ 'mkdir', 'add_no',
+ 'rm', 'rm_no',
+ 'rm', 'rm_mv',
+ 'mkdir', 'D1',
+ 'mv', 'mv_no', 'D1/mv_no',
+ 'mv', 'mv_rm', 'mv_rm_D1')
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url + '/branches/br1',
+ 'mkdir', 'no_add',
+ 'rm', 'no_rm',
+ 'rm', 'mv_rm',
+ 'mkdir', 'D2',
+ 'mv', 'no_mv', 'D2/no_mv_B',
+ 'mv', 'rm_mv', 'D2/rm_mv_B')
+
+ # a merge that makes no changes
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url,
+ 'merge', 'trunk', 'branches/br1', 'trunk@4')
+
+ # a merge that makes changes with no conflict
+ svntest.actions.run_and_verify_svnmover(None, [],
+ '-U', repo_url,
+ 'merge', 'branches/br1', 'trunk', 'trunk@4')
+
+ # a merge that makes changes, with conflicts
+ svntest.actions.run_and_verify_svnmover(None, svntest.verify.AnyOutput,
+ '-U', repo_url,
+ 'merge', 'trunk@5', 'branches/br1', 'trunk@2')
+
+
+######################################################################
+
+# Expected output of 'svnmover diff'
+
+def reported_element_del_line(rpath, branch_text=''):
+ return 'D ' + re.escape(rpath) + branch_text
+
+def reported_element_add_line(rpath, branch_text=''):
+ return 'A ' + re.escape(rpath) + branch_text
+
+def reported_branch_del_line(subbranch_fullpath):
+ return r'--- deleted branch [reB:0-9.]+ at /%s' % (re.escape(subbranch_fullpath),)
+
+def reported_branch_add_line(subbranch_fullpath):
+ return r'--- added branch [rBe:0-9.]+ at /%s' % (re.escape(subbranch_fullpath),)
+
+def reported_br_params(path1, path2):
+ """Return (SUBBRANCH_RPATH, SUBBRANCH_FULLPATH).
+
+ Parameters are either (OUTER_BRANCH_FULLPATH, SUBBRANCH_RPATH) or for
+ a first-level branch (SUBBRANCH_RPATH, None). 'FULLPATH' means relpath
+ from the repo root; 'RPATH' means relpath from the outer branch.
+ """
+ if path2 is None:
+ subbranch_rpath = path1
+ subbranch_fullpath = path1
+ else:
+ subbranch_rpath = path2
+ subbranch_fullpath = path1 + '/' + path2
+ return subbranch_rpath, subbranch_fullpath
+
+def reported_mg_diff():
+ return [] #[r'--- history ...']
+
+def reported_br_diff(path1, path2=None):
+ """Return expected header lines for diff of a branch, or subtree in a branch.
+
+ PATH1 is the 'left' and PATH2 the 'right' side path. Both are full paths
+ from the repo root. If PATH2 is None, the branch ids and paths are
+ expected to be *the same* on both sides; otherwise the branch ids and/or
+ paths are expected to be *different* on each side.
+ """
+ if path2 is None:
+ return [r'--- diff branch [rBe:0-9.]+ at /%s' % (re.escape(path1),)]
+ return [r'--- diff branch [rBe:0-9.]+ at /%s : [rBe:0-9.]+ at /%s' % (
+ re.escape(path1), re.escape(path2))]
+
+def reported_del(one_path=None, paths=[], branches=[]):
+ """Return expected lines for deletion of an element.
+
+ PATH is the relpath of the element within its branch.
+ """
+ lines = []
+ if one_path is not None:
+ paths = [one_path] + paths
+ all_paths = paths + branches
+
+ for path in paths:
+ if os.path.dirname(path) in all_paths:
+ code = 'd'
+ else:
+ code = 'D'
+
+ lines.append(code + ' ' + re.escape(path))
+
+ for path in branches:
+ if os.path.dirname(path) in all_paths:
+ code = 'd'
+ else:
+ code = 'D'
+
+ branch_text = r' \(branch B[0-9.]+\)'
+ lines.append(code + ' ' + re.escape(path) + branch_text)
+
+ lines.append(reported_branch_del_line(path))
+
+ return lines
+
+def reported_br_del(path1, path2=None):
+ """Return expected lines for deletion of a (sub)branch.
+
+ Params are (SUBBRANCH_RPATH) or (OUTER_BRANCH_FULLPATH, SUBBRANCH_RPATH).
+ """
+ subbranch_rpath, subbranch_fullpath = reported_br_params(path1, path2)
+ return [reported_element_del_line(subbranch_rpath, r' \(branch B[0-9.]+\)'),
+ reported_branch_del_line(subbranch_fullpath)]
+
+def reported_add(path):
+ """Return expected lines for addition of an element.
+
+ PATH is the relpath of the element within its branch.
+ """
+ return ['A ' + re.escape(path)]
+
+def reported_br_add(path1, path2=None):
+ """Return expected lines for addition of a (sub)branch.
+
+ Params are (SUBBRANCH_RPATH) or (OUTER_BRANCH_FULLPATH, SUBBRANCH_RPATH).
+ """
+ subbranch_rpath, subbranch_fullpath = reported_br_params(path1, path2)
+ return [reported_element_add_line(subbranch_rpath, r' \(branch B[0-9.]+\)'),
+ reported_branch_add_line(subbranch_fullpath)]
+
+def reported_br_nested_add(path1, path2=None):
+ """Return expected lines for addition of a subbranch that is nested inside
+ an outer branch that is also added: there is no accompanying 'added
+ element' line.
+
+ Params are (SUBBRANCH_RPATH) or (OUTER_BRANCH_FULLPATH, SUBBRANCH_RPATH).
+ """
+ subbranch_rpath, subbranch_fullpath = reported_br_params(path1, path2)
+ return [reported_branch_add_line(subbranch_fullpath)]
+
+def reported_move(path1, path2, branch_text=''):
+ """Return expected lines for a move, optionally of a (sub)branch.
+ """
+ dir1, name1 = os.path.split(path1)
+ dir2, name2 = os.path.split(path2)
+ if name1 == name2:
+ return ['Mv ' + re.escape(path2) + branch_text
+ + r' \(moved from ' + re.escape(dir1 + '/...') + r'\)']
+ elif dir1 == dir2:
+ return ['M r ' + re.escape(path2) + branch_text
+ + r' \(renamed from ' + re.escape('.../' + name1) + r'\)']
+ else:
+ return ['Mvr ' + re.escape(path2) + branch_text
+ + r' \(moved\+renamed from ' + re.escape(path1) + r'\)']
+
+def reported_br_move(path1, path2):
+ """Return expected lines for a move of a (sub)branch.
+ """
+ return reported_move(path1, path2, r' \(branch B[0-9.]+\)')
+
+
+######################################################################
+
+#@XFail() # There is a bug in the conversion to old-style commits:
+# in r6 'bar' is plain-added instead of copied.
+def merge_edits_with_move(sbox):
+ "merge_edits_with_move"
+ sbox_build_svnmover(sbox, content=initial_content_ttb)
+ repo_url = sbox.repo_url
+
+ # create initial state in trunk
+ # (r2)
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_add('lib') +
+ reported_add('lib/foo') +
+ reported_add('lib/foo/x') +
+ reported_add('lib/foo/y'),
+ 'mkdir lib',
+ 'mkdir lib/foo',
+ 'mkdir lib/foo/x',
+ 'mkdir lib/foo/y')
+
+ # branch (r3)
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('branches/br1'),
+ 'branch trunk branches/br1')
+
+ # on trunk: make edits under 'foo' (r4)
+ test_svnmover2(sbox, 'trunk',
+ reported_br_diff('trunk') +
+ reported_del('lib/foo/x') +
+ reported_move('lib/foo/y', 'lib/foo/y2') +
+ reported_add('lib/foo/z'),
+ 'rm lib/foo/x',
+ 'mv lib/foo/y lib/foo/y2',
+ 'mkdir lib/foo/z')
+
+ # on branch: move/rename 'foo' (r5)
+ test_svnmover2(sbox, 'branches/br1',
+ reported_br_diff('branches/br1') +
+ reported_move('lib/foo', 'bar'),
+ 'mv lib/foo bar')
+
+ # merge the move to trunk (r6)
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('trunk') +
+ reported_move('lib/foo', 'bar'),
+ 'merge branches/br1@5 trunk trunk@2')
+
+ # merge the edits in trunk (excluding the merge r6) to branch (r7)
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('branches/br1') +
+ reported_del('bar/x') +
+ reported_move('bar/y', 'bar/y2') +
+ reported_add('bar/z'),
+ 'merge trunk@5 branches/br1 trunk@2')
+
+# Exercise simple moves (not cyclic or hierarchy-inverting):
+# - {file,dir}
+# - {rename-only,move-only,rename-and-move}
+def simple_moves_within_a_branch(sbox):
+ "simple moves within a branch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+
+ # rename only, file
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_move('README', 'README.txt'),
+ 'mv README README.txt')
+ # move only, file
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_move('README.txt', 'lib/README.txt'),
+ 'mv README.txt lib/README.txt')
+ # rename only, empty dir
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_move('lib/foo/y', 'lib/foo/y2'),
+ 'mv lib/foo/y lib/foo/y2')
+ # move only, empty dir
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_move('lib/foo/y2', 'y2'),
+ 'mv lib/foo/y2 y2')
+ # move and rename, dir with children
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('') +
+ reported_add('subdir') +
+ reported_move('lib', 'subdir/lib2'),
+ 'mkdir subdir',
+ 'mv lib subdir/lib2',
+ )
+
+ # moves and renames together
+ # (put it all back to how it was, in one commit)
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('') +
+ reported_move('subdir/lib2/README.txt', 'README') +
+ reported_move('subdir/lib2', 'lib') +
+ reported_move('y2', 'lib/foo/y') +
+ reported_del('subdir'),
+ 'mv subdir/lib2 lib',
+ 'rm subdir',
+ 'mv y2 lib/foo/y',
+ 'mv lib/README.txt README'
+ )
+
+# Exercise moving content from one branch to another by means of
+# 'branch-into-and-delete' (which I previously called 'branch-and-delete').
+# In this test, the elements being moved do not already exist in the target
+# branch.
+def move_to_related_branch(sbox):
+ "move to related branch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+
+ # branch
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('branches/br1'),
+ 'branch trunk branches/br1')
+
+ # remove all elements from branch so we can try moving them there
+ test_svnmover2(sbox, '',
+ reported_br_diff('branches/br1') +
+ reported_del('README') +
+ reported_del(paths=['lib',
+ 'lib/foo',
+ 'lib/foo/file',
+ 'lib/foo/x',
+ 'lib/foo/y']),
+ 'rm branches/br1/README',
+ 'rm branches/br1/lib')
+
+ # move from trunk to branch 'br1'
+ test_svnmover2(sbox, '',
+ reported_br_diff('branches/br1') +
+ reported_br_diff('trunk') +
+ reported_del('README') +
+ reported_del(paths=['lib',
+ 'lib/foo',
+ 'lib/foo/file',
+ 'lib/foo/x',
+ 'lib/foo/y']) +
+ reported_add('README') +
+ reported_add('subdir') +
+ reported_add('subdir/lib2') +
+ reported_add('subdir/lib2/foo') +
+ reported_add('subdir/lib2/foo/file') +
+ reported_add('subdir/lib2/foo/x') +
+ reported_add('y2'),
+ # keeping same relpath
+ 'branch-into-and-delete trunk/README branches/br1/README',
+ # with a move-within-branch and rename as well
+ 'branch-into-and-delete trunk/lib/foo/y branches/br1/y2',
+ # dir with children, also renaming and moving within branch
+ 'mkdir branches/br1/subdir',
+ 'branch-into-and-delete trunk/lib branches/br1/subdir/lib2')
+
+# Exercise moving content from one branch to another by means of
+# 'branch-into-and-delete' (which I previously called 'branch-and-delete').
+# In this test, there are existing instances of the same elements in the
+# target branch, which should be overwritten.
+def move_to_related_branch_element_already_exists(sbox):
+ "move to related branch; element already exists"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+
+ # branch
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('branches/br1'),
+ 'branch trunk branches/br1')
+
+ # move to a branch where same element already exists: should overwrite
+ test_svnmover2(sbox, '',
+ reported_br_diff('trunk') +
+ reported_del('README') +
+ reported_br_diff('branches/br1') +
+ reported_move('README', 'README2'),
+ # single file: element already exists, at different relpath
+ 'branch-into-and-delete trunk/README branches/br1/README2')
+ test_svnmover2(sbox, '',
+ reported_br_diff('branches/br1') +
+ reported_move('lib', 'lib2') +
+ reported_br_diff('trunk') +
+ reported_del(paths=['lib',
+ 'lib/foo',
+ 'lib/foo/file',
+ 'lib/foo/x',
+ 'lib/foo/y']),
+ # dir: child elements already exist (at different relpaths)
+ 'mv branches/br1/lib/foo/x branches/br1/x2',
+ 'branch-into-and-delete trunk/lib branches/br1/lib2')
+
+# Exercise moving content by copy-and-delete from one branch to another.
+# In this test the branches have no elements in common.
+def move_to_unrelated_branch(sbox):
+ "move to unrelated branch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+
+ # move from trunk to a directory in the root branch
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_diff('trunk') +
+ reported_del('README') +
+ reported_add('README') +
+ reported_del(paths=['lib',
+ 'lib/foo',
+ 'lib/foo/file',
+ 'lib/foo/x',
+ 'lib/foo/y']) +
+ reported_add('y2') +
+ reported_add('subdir/lib2') +
+ reported_add('subdir/lib2/foo') +
+ reported_add('subdir/lib2/foo/file') +
+ reported_add('subdir/lib2/foo/x') +
+ reported_add('subdir'),
+ # keeping same relpath
+ 'copy-and-delete trunk/README README',
+ # with a move-within-branch and rename as well
+ 'copy-and-delete trunk/lib/foo/y y2',
+ # dir with children, also renaming and moving within branch
+ 'mkdir subdir',
+ 'copy-and-delete trunk/lib subdir/lib2')
+
+# Move a whole branch within the same parent branch.
+def move_branch_within_same_parent_branch(sbox):
+ "move branch within same parent branch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+
+ # make a subbranch
+ test_svnmover2(sbox, '',
+ reported_br_diff('trunk') +
+ reported_br_add('trunk', 'sub'),
+ 'mkbranch trunk/sub')
+
+ # move trunk
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_add('D') +
+ reported_add('D/E') +
+ reported_br_move('trunk', 'D/E/trunk2'),
+ 'mkdir D',
+ 'mkdir D/E',
+ 'mv trunk D/E/trunk2')
+
+ # move trunk and also modify it
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_del(paths=['D',
+ 'D/E']) +
+ reported_br_move('D/E/trunk2', 'trunk') +
+ reported_br_diff('D/E/trunk2', 'trunk') +
+ reported_add('new'),
+ 'mv D/E/trunk2 trunk',
+ 'rm D',
+ 'mkdir trunk/new')
+
+ # move a subbranch of trunk
+ test_svnmover2(sbox, 'trunk',
+ reported_br_diff('trunk') +
+ reported_br_move('sub', 'sub2'),
+ 'mv sub sub2'
+ )
+
+# This tests one variant of rearranging a trunk/tags/branches structure.
+#
+# From a single set of branches (each branch containing multiple
+# more-or-less-independent projects) to a separate set of branches for
+# each project.
+#
+# +- /TRUNK/ +- proj1/
+# | +- proj1/... ___________ | +- TRUNK/...
+# | +- proj2/... ___ | +- branches/
+# | \ ____ | +- BR1/...
+# +- /branches/ \ / |
+# +- BR1/ X +- proj2/
+# +- proj1/... ____/ \______ +- TRUNK/...
+# +- proj2/... _____ +- branches/
+# \_______ +- BR1/...
+#
+# (UPPER CASE denotes a branch root.)
+#
+# This rearrangement is achieved entirely by branching from subtrees of the
+# existing branches.
+#
+def restructure_repo_ttb_projects_to_projects_ttb(sbox):
+ "restructure repo: ttb/projects to projects/ttb"
+ sbox_build_svnmover(sbox, content=initial_content_ttb)
+ repo_url = sbox.repo_url
+
+ test_svnmover2(sbox, 'trunk', None,
+ 'mkdir proj1',
+ 'mkdir proj1/lib',
+ 'mkdir proj1/lib/foo',
+ 'mkdir proj1/lib/foo/x',
+ 'mkdir proj1/lib/foo/y')
+ # branch
+ test_svnmover2(sbox, '', None,
+ 'branch', 'trunk', 'branches/br1')
+
+ # make 'proj2' (on branch, for no particular reason) (r4)
+ test_svnmover2(sbox, 'branches/br1', None,
+ 'mkdir proj2',
+ 'mkdir proj2/foo',
+ 'mkdir proj2/bar')
+
+
+ # on trunk: make edits (r5)
+ test_svnmover2(sbox, 'trunk', None,
+ 'rm proj1/lib/foo/x',
+ 'mv proj1/lib/foo/y proj1/lib/foo/y2',
+ 'mkdir proj1/lib/foo/z')
+
+ # on branch: make edits (r6)
+ test_svnmover2(sbox, 'branches/br1/proj1', None,
+ 'mv lib/foo bar')
+
+ # merge the branch to trunk (r7)
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('trunk') +
+ reported_move('proj1/lib/foo', 'proj1/bar') +
+ reported_add('proj2') +
+ reported_add('proj2/bar') +
+ reported_add('proj2/foo'),
+ 'merge branches/br1 trunk trunk@3')
+
+ # merge the edits in trunk (excluding the merge r6) to branch (r7)
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('branches/br1') +
+ reported_del('proj1/bar/x') +
+ reported_move('proj1/bar/y', 'proj1/bar/y2') +
+ reported_add('proj1/bar/z'),
+ 'merge trunk@5 branches/br1 trunk@2')
+
+ # Make the new project directories
+ test_svnmover2(sbox, '', None,
+ 'mkdir proj1',
+ 'mkdir proj1/branches',
+ 'mkdir proj2',
+ 'mkdir proj2/branches',
+ )
+ # Rearrange: {t,t,b}/{proj} => {proj}/{t,t,b}
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('proj1/trunk') +
+ reported_br_add('proj2/trunk') +
+ reported_br_add('proj1/branches/br1') +
+ reported_br_add('proj2/branches/br1'),
+ 'branch trunk/proj1 proj1/trunk',
+ 'branch trunk/proj2 proj2/trunk',
+ 'branch branches/br1/proj1 proj1/branches/br1',
+ 'branch branches/br1/proj2 proj2/branches/br1',
+ )
+ # Delete the remaining root dir of the old trunk and branches
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_del('branches', branches=[
+ 'branches/br1',
+ 'trunk']),
+ 'rm trunk',
+ 'rm branches',
+ )
+
+ ### It's all very well to see that the dirs and files now appear at the
+ ### right places, but what should we test to ensure the history is intact?
+
+# This tests one variant of rearranging a trunk/tags/branches structure.
+#
+# From a separate set of branches for each project to a single set of branches
+# (each branch containing multiple more-or-less-independent projects).
+#
+# +- proj1/ +- /TRUNK/
+# | +- TRUNK/... ___________ | +- proj1/...
+# | +- branches/ ___ | +- proj2/...
+# | +- BR1/... ____ / |
+# | \ / +- /branches/
+# +- proj2/ X +- BR1/
+# +- TRUNK/... ____/ \______ +- proj1/...
+# +- branches/ _______ +- proj2/...
+# +- BR1/... _____/
+#
+# (UPPER CASE denotes a branch root.)
+#
+# This
+# rearrangement is achieved entirely by branching the existing branches into
+# subtrees of the new big branches.
+#
+def restructure_repo_projects_ttb_to_ttb_projects(sbox):
+ "restructure repo: projects/ttb to ttb/projects"
+ sbox_build_svnmover(sbox, content=initial_content_projects_ttb)
+ repo_url = sbox.repo_url
+ head = 1
+
+ # populate proj1 and proj2, each with a trunk, a branch and a merge
+ for proj in ['proj1', 'proj2']:
+ # make a trunk, some trunk content, and a branch from it
+ populate_trunk(sbox, proj + '/trunk')
+ test_svnmover2(sbox, proj, None,
+ 'branch trunk branches/br1')
+ head += 2
+ trunk_old_rev = head
+
+ # make edits on trunk and on branch
+ test_svnmover2(sbox, proj + '/trunk', None,
+ 'rm lib/foo/x',
+ 'mv lib/foo/y lib/foo/y2',
+ 'mkdir lib/foo/z')
+ test_svnmover2(sbox, proj + '/branches/br1', None,
+ 'mv lib/foo bar',
+ 'rm lib')
+ head += 2
+
+ # merge trunk to branch
+ test_svnmover2(sbox, proj,
+ reported_mg_diff() +
+ reported_br_diff(proj + '/branches/br1') +
+ reported_del('bar/x') +
+ reported_move('bar/y', 'bar/y2') +
+ reported_add('bar/z'),
+ 'merge trunk branches/br1 trunk@' + str(trunk_old_rev))
+ head += 1
+
+ # Restructuring
+ # Make the new T/T/B structure
+ test_svnmover2(sbox, '', None,
+ 'mkbranch trunk',
+ 'mkdir tags',
+ 'mkdir branches',
+ 'branch trunk branches/br1',
+ )
+ # Rearrange: {proj}/{t,t,b} => {t,t,b}/{proj}
+ #
+ # This is a form of 'branching'. We want to create new branched content in
+ # the existing target branch rather than a separate new branch nested inside
+ # the existing branch. Conceptually this is a form of 'branch' or 'merge' or
+ # 'instantiate'. With the current 'svnmover' UI it is called 'branch-into'.
+ for proj in ['proj1', 'proj2']:
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_del(branches=[proj + '/trunk']) +
+ reported_br_diff('trunk') +
+ reported_add(proj) +
+ reported_add(proj + '/README') +
+ reported_add(proj + '/lib') +
+ reported_add(proj + '/lib/foo') +
+ reported_add(proj + '/lib/foo/file') +
+ reported_add(proj + '/lib/foo/y2') +
+ reported_add(proj + '/lib/foo/z'),
+ 'branch-into', proj + '/trunk', 'trunk/' + proj,
+ 'rm', proj + '/trunk',
+ )
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_del(branches=[proj + '/branches/br1']) +
+ reported_br_diff('branches/br1') +
+ reported_add(proj) +
+ reported_add(proj + '/README') +
+ reported_add(proj + '/bar') +
+ reported_add(proj + '/bar/file') +
+ reported_add(proj + '/bar/y2') +
+ reported_add(proj + '/bar/z'),
+ 'branch-into', proj + '/branches/br1', 'branches/br1/' + proj,
+ 'rm', proj + '/branches/br1',
+ )
+ # Remove the old project directory
+ test_svnmover2(sbox, '', None,
+ 'rm', proj)
+
+ verify_paths_in_branch(sbox, '.',
+ ['.', 'tags', 'branches'],
+ ['trunk', 'branches/br1'])
+ verify_paths_in_branch(sbox, 'trunk', [
+ '.',
+ 'proj1',
+ 'proj1/README',
+ 'proj1/lib',
+ 'proj1/lib/foo',
+ 'proj1/lib/foo/file',
+ 'proj1/lib/foo/y2',
+ 'proj1/lib/foo/z',
+ 'proj2',
+ 'proj2/README',
+ 'proj2/lib',
+ 'proj2/lib/foo',
+ 'proj2/lib/foo/file',
+ 'proj2/lib/foo/y2',
+ 'proj2/lib/foo/z',
+ ])
+
+ ### It's all very well to see that the dirs and files now appear at the
+ ### right places, but what should we test to ensure the history is intact?
+
+# Brane's example on IRC 2015-04-14
+# "e.g., in our tree, libsvn_fs_x is a branch of libsvn_fs_fs; are we still
+# allowed to merge branches/foo to trunk, and will the merge correctly reflect
+# changes in these two sub-branches, and will a subsequent merge from fs_fs to
+# fs_x produce sane results?"
+def subbranches1(sbox):
+ "subbranches1"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+ head = 1
+
+ # create content in a trunk subtree 'libsvn_fs_fs'
+ test_svnmover2(sbox, 'trunk', None,
+ 'mv lib libsvn_fs_fs',
+ 'put', mk_file(sbox, 'file.c'), 'libsvn_fs_fs/file.c')
+ # branch 'trunk/libsvn_fs_fs' to 'trunk/libsvn_fs_x'
+ test_svnmover2(sbox, 'trunk', None,
+ 'branch libsvn_fs_fs libsvn_fs_x')
+ # branch 'trunk' to 'branches/foo'
+ test_svnmover2(sbox, '', None,
+ 'branch trunk branches/foo')
+
+ # make edits in 'branches/foo' and its subbranch
+ test_svnmover2(sbox, 'branches/foo', None,
+ 'mkdir docs',
+ 'mv libsvn_fs_fs/file.c libsvn_fs_fs/file2.c')
+ test_svnmover2(sbox, 'branches/foo/libsvn_fs_x', None,
+ 'mkdir reps',
+ 'mv file.c reps/file.c')
+
+ # merge 'branches/foo' to 'trunk'
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('trunk') +
+ reported_add('docs') +
+ reported_move('libsvn_fs_fs/file.c', 'libsvn_fs_fs/file2.c') +
+ reported_br_diff('trunk/libsvn_fs_x') +
+ reported_add('reps') +
+ reported_move('file.c', 'reps/file.c'),
+ 'merge branches/foo trunk trunk@4')
+
+ # merge 'trunk/libsvn_fs_fs' to 'trunk/libsvn_fs_x'
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('trunk/libsvn_fs_x') +
+ reported_move('reps/file.c', 'reps/file2.c'),
+ 'merge trunk/libsvn_fs_fs trunk/libsvn_fs_x trunk/libsvn_fs_fs@4')
+
+def merge_deleted_subbranch(sbox):
+ "merge deleted subbranch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+ head = 1
+
+ # add a subbranch in 'trunk'
+ test_svnmover2(sbox, 'trunk', None,
+ 'branch lib lib2')
+
+ yca_rev = 4
+
+ # branch 'trunk' to 'branches/foo'
+ test_svnmover2(sbox, '', None,
+ 'branch trunk branches/foo')
+ # delete a subbranch in 'trunk'
+ test_svnmover2(sbox, 'trunk', None,
+ 'rm lib2')
+
+ # merge 'trunk' to 'branches/foo'
+ #
+ # This should delete the subbranch 'lib2'
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('branches/foo') +
+ reported_br_del('branches/foo', 'lib2'),
+ 'merge trunk branches/foo trunk@' + str(yca_rev))
+
+def merge_added_subbranch(sbox):
+ "merge added subbranch"
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+ repo_url = sbox.repo_url
+ head = 1
+
+ yca_rev = 3
+
+ # branch 'trunk' to 'branches/foo'
+ test_svnmover2(sbox, '', None,
+ 'branch trunk branches/foo')
+ # add a subbranch in 'trunk'
+ test_svnmover2(sbox, 'trunk', None,
+ 'branch lib lib2')
+
+ # merge 'trunk' to 'branches/foo'
+ #
+ # This should add the subbranch 'lib2'
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('branches/foo') +
+ reported_br_add('branches/foo', 'lib2'),
+ 'merge trunk branches/foo trunk@' + str(yca_rev))
+
+def branch_to_subbranch_of_self(sbox):
+ "branch to subbranch of self"
+ # When branching, put the new branch inside the source subtree. This should
+ # not lead to infinite recursion.
+ # * source is a { whole branch | subtree of a branch }
+ # * target is a new path in { the source subtree |
+ # a subbranch in the source branch }
+ sbox_build_svnmover(sbox, content=initial_content_in_trunk)
+
+ # branch 'trunk' to 'trunk/foo'
+ test_svnmover2(sbox, '', None,
+ 'branch trunk trunk/foo')
+ # add another subbranch nested under that
+ test_svnmover2(sbox, 'trunk', None,
+ 'branch lib foo/lib2')
+
+ # branch 'trunk' to 'trunk/foo/lib2/x'
+ #
+ # This should not recurse infinitely
+ test_svnmover2(sbox, '',
+ reported_br_diff('trunk/foo/lib2') +
+ reported_br_add('trunk/foo/lib2', 'x') +
+ reported_br_nested_add('trunk/foo/lib2/x', 'foo') +
+ reported_br_nested_add('trunk/foo/lib2/x/foo', 'lib2'),
+ 'branch trunk trunk/foo/lib2/x')
+
+def merge_from_subbranch_to_subtree(sbox):
+ "merge from subbranch to subtree"
+ # Merge from the root of a subbranch to an instance of that same element
+ # that appears as a non-subbranch in a bigger branch (for example its
+ # 'parent' branch).
+ sbox_build_svnmover(sbox)
+
+ # Make a subtree 'C1' and a subbranch of it 'C2'
+ test_svnmover2(sbox, '', None,
+ 'mkdir A mkdir A/B1 mkdir A/B1/C1')
+ test_svnmover2(sbox, '', None,
+ 'branch A/B1/C1 A/B1/C2')
+
+ # Make a modification in 'C2'
+ test_svnmover2(sbox, '', None,
+ 'mkdir A/B1/C2/D')
+
+ # Merge 'C2' to 'C1'. The broken merge code saw the merge root element as
+ # having changed its parent-eid and name from {A/B1,'C1'} at the YCA to
+ # nil on the merge source-right, and tried to make that same change in the
+ # target.
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('') +
+ reported_add('A/B1/C1/D'),
+ 'merge A/B1/C2 A/B1/C1 A/B1/C1@2')
+
+def modify_payload_of_branch_root_element(sbox):
+ "modify payload of branch root element"
+ sbox_build_svnmover(sbox)
+
+ # Make a file, and branch it
+ test_svnmover2(sbox, '', None,
+ 'put ' + mk_file(sbox, 'f1') + ' f1 ' +
+ 'branch f1 f2')
+
+ # Modify the file-branch
+ test_svnmover2(sbox, '', None,
+ 'put ' + mk_file(sbox, 'f2') + ' f2')
+
+def merge_swap_abc(sbox):
+ "merge swaps A and C in A/B/C"
+ sbox_build_svnmover(sbox)
+
+ expected_eids = svntest.wc.State('', {
+ 'B0' : Item(eid=0),
+ 'B0/X' : Item(eid=1),
+ 'B0.1' : Item(eid=2),
+ 'B0.1/A' : Item(eid=3),
+ 'B0.1/A/a1' : Item(eid=4),
+ 'B0.1/A/B' : Item(eid=5),
+ 'B0.1/A/B/C' : Item(eid=6),
+ 'B0.1/A/B/C/c1' : Item(eid=7),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('X'),
+ expected_eids,
+ 'mkbranch X ' +
+ 'mkdir X/A ' +
+ 'mkdir X/A/a1 ' +
+ 'mkdir X/A/B ' +
+ 'mkdir X/A/B/C ' +
+ 'mkdir X/A/B/C/c1')
+
+ expected_eids.add({
+ 'B0/Y' : Item(eid=8),
+ 'B0.8' : Item(eid=2),
+ 'B0.8/A' : Item(eid=3),
+ 'B0.8/A/a1' : Item(eid=4),
+ 'B0.8/A/B' : Item(eid=5),
+ 'B0.8/A/B/C' : Item(eid=6),
+ 'B0.8/A/B/C/c1' : Item(eid=7),
+ })
+ test_svnmover3(sbox, '', None, expected_eids,
+ 'branch X Y')
+
+ expected_eids.rename({
+ 'B0.1/A/B/C' : 'B0.1/A',
+ 'B0.1/A/B' : 'B0.1/A/B',
+ 'B0.1/A' : 'B0.1/A/B/C',
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('X') +
+ reported_move('A/B/C', 'A') +
+ reported_move('A/B', 'A/B') +
+ reported_move('A', 'A/B/C'),
+ expected_eids,
+ 'mv X/A/B/C X/C ' +
+ 'mv X/A/B X/C/B ' +
+ 'mv X/A X/C/B/C ' +
+ 'mv X/C X/A')
+
+ expected_eids.rename({
+ 'B0.8/A' : 'B0.8/A/B/C',
+ 'B0.8/A/B' : 'B0.8/A/B',
+ 'B0.8/A/B/C' : 'B0.8/A',
+ })
+ test_svnmover3(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('Y') +
+ reported_move('A/B/C', 'A') +
+ reported_move('A/B', 'A/B') +
+ reported_move('A', 'A/B/C'),
+ expected_eids,
+ 'merge X Y X@2')
+
+def move_to_related_branch_2(sbox):
+ "move to related branch 2"
+ sbox_build_svnmover(sbox)
+
+ expected_eids = svntest.wc.State('', {
+ 'B0' : Item(eid=0),
+ 'B0/X' : Item(eid=1),
+ 'B0.1' : Item(eid=2),
+ 'B0.1/A' : Item(eid=3),
+ 'B0.1/A/B' : Item(eid=4),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('X'),
+ expected_eids,
+ 'mkbranch X ' +
+ 'mkdir X/A ' +
+ 'mkdir X/A/B')
+
+ expected_eids.add({
+ 'B0/Y' : Item(eid=5),
+ 'B0.5' : Item(eid=2),
+ 'B0.5/A' : Item(eid=3),
+ 'B0.5/A/B' : Item(eid=4),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('Y'),
+ expected_eids,
+ 'branch X Y')
+
+ expected_eids.add({
+ 'B0.1/A/ax' : Item(eid=6),
+ 'B0.1/A/B/bx' : Item(eid=7),
+ 'B0.5/A/ay' : Item(eid=8),
+ 'B0.5/A/B/by' : Item(eid=9),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('X') +
+ reported_add('A/B/bx') +
+ reported_add('A/ax') +
+ reported_br_diff('Y') +
+ reported_add('A/B/by') +
+ reported_add('A/ay'),
+ expected_eids,
+ 'mkdir X/A/ax ' +
+ 'mkdir X/A/B/bx ' +
+ 'mkdir Y/A/ay ' +
+ 'mkdir Y/A/B/by ')
+
+ # X and Y are related, X/A/B contains X/A/B/bx, Y/A/B contains Y/A/B/by.
+ # Moving X/A/B to Y/B, i.e. from X to Y, by branch-into-and-delete,
+ # results in Y/B that contains both bx and by.
+ expected_eids.rename({'B0.1/A/B' : 'B0.5/B'})
+ expected_eids.remove('B0.5/A/B', 'B0.5/A/B/by')
+ expected_eids.add({
+ 'B0.5/B/by' : Item(eid=9),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('X') +
+ reported_del(paths=['A/B',
+ 'A/B/bx']) +
+ reported_br_diff('Y') +
+ reported_move('A/B', 'B') +
+ reported_add('B/bx'),
+ expected_eids,
+ 'branch-into-and-delete X/A/B Y/B')
+
+def tree_conflict_detect(sbox,
+ initial_state_cmds,
+ side1_cmds,
+ side2_cmds):
+ """Set up an initial state on one branch using INITIAL_STATE_CMDS,
+ branch it to a second branch, make changes on each branch using
+ SIDE1_CMDS and SIDE2_CMDS, merge the first branch to the second,
+ and expect a conflict."""
+ sbox_build_svnmover(sbox)
+
+ # initial state
+ test_svnmover2(sbox, '', None,
+ 'mkbranch trunk')
+ if initial_state_cmds:
+ test_svnmover2(sbox, 'trunk', None,
+ initial_state_cmds)
+ # branching
+ test_svnmover2(sbox, '', None,
+ 'branch trunk br1')
+ # conflicting changes
+ if side1_cmds:
+ test_svnmover2(sbox, 'trunk', None,
+ side1_cmds)
+ if side2_cmds:
+ test_svnmover2(sbox, 'br1', None,
+ side2_cmds)
+ # merge
+ xtest_svnmover(sbox.repo_url, 'E123456: Cannot commit because there are unresolved conflicts',
+ 'merge trunk br1 trunk@2')
+
+# A simple single-element tree conflict
+def tree_conflict_element_1(sbox):
+ "tree_conflict_element_1"
+ tree_conflict_detect(sbox,
+ 'mkdir a',
+ 'mv a b',
+ 'mv a c')
+
+# A simple name-clash tree conflict
+def tree_conflict_clash_1(sbox):
+ "tree_conflict_clash_1"
+ tree_conflict_detect(sbox,
+ 'mkdir a '
+ 'mkdir b',
+ 'mv a c',
+ 'mv b c')
+
+# A simple name-clash tree conflict
+def tree_conflict_clash_2(sbox):
+ "tree_conflict_clash_2"
+ tree_conflict_detect(sbox,
+ None,
+ 'mkdir c',
+ 'mkdir c')
+
+# A simple cycle tree conflict
+def tree_conflict_cycle_1(sbox):
+ "tree_conflict_cycle_1"
+ tree_conflict_detect(sbox,
+ 'mkdir a '
+ 'mkdir b',
+ 'mv a b/a',
+ 'mv b a/b')
+
+# A simple orphan tree conflict
+def tree_conflict_orphan_1(sbox):
+ "tree_conflict_orphan_1"
+ tree_conflict_detect(sbox,
+ 'mkdir orphan-parent',
+ 'mkdir orphan-parent/orphan',
+ 'rm orphan-parent')
+
+@XFail()
+def replace_via_rm_cp(sbox):
+ """replace by deleting and copying"""
+
+ sbox_build_svnmover(sbox)
+
+ expected_eids = svntest.wc.State('', {
+ 'B0' : Item(eid=0),
+ 'B0/X' : Item(eid=1),
+ 'B0.1' : Item(eid=2),
+ 'B0.1/A' : Item(eid=3),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('X'),
+ expected_eids,
+ 'mkbranch X ' +
+ 'mkdir X/A')
+
+ expected_eids.tweak('B0.1/A', eid=4)
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_del('A') +
+ reported_add('A'),
+ expected_eids,
+ 'rm X/A ' +
+ 'cp 1 X/A X/A')
+
+ # The compatibility layer doesn't record the replace.
+ test_svnmover_verify_log(sbox.repo_url,
+ ['D /top0/X/A',
+ 'A /top0/X/A (from /top0/X/A:1)'])
+
+@XFail()
+# After making a commit, svnmover currently can't (within the same execution)
+# look up paths in the revision it just committed.
+def see_the_revision_just_committed(sbox):
+ """see the revision just committed"""
+
+ sbox_build_svnmover(sbox)
+ # Make a commit, and then check we can copy something from that committed
+ # revision.
+ test_svnmover2(sbox, '', None,
+ 'mkdir A '
+ 'commit ' # r1
+ 'cp 1 A A2 '
+ 'commit') # r2
+ # Conversely, check we cannot copy something from a revision after a newly
+ # committed revision.
+ xtest_svnmover(sbox.repo_url, 'No such revision 4',
+ 'mkdir B '
+ 'commit ' # r3
+ 'cp 4 B B2 '
+ 'commit') # r4
+
+
+@XFail()
+def simple_branch(sbox):
+ """simple branch"""
+ sbox_build_svnmover(sbox)
+
+ expected_eids = svntest.wc.State('', {
+ 'B0' : Item(eid=0),
+ 'B0/X' : Item(eid=1),
+ 'B0.1' : Item(eid=2),
+ 'B0.1/A' : Item(eid=3),
+ 'B0/Y' : Item(eid=4),
+ 'B0.4' : Item(eid=2),
+ 'B0.4/A' : Item(eid=3),
+ })
+ test_svnmover3(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('X'),
+ expected_eids,
+ 'mkbranch X ' +
+ 'commit ' +
+ 'mkdir X/A ' +
+ 'commit ' +
+ 'branch X Y')
+
+ # The compatibility layer doesn't record the copy properly
+ test_svnmover_verify_log(sbox.repo_url,
+ ['A /top0/Y (from /top0/X:1)',
+ 'A /top0/Y/A (from /top0/X/A:2)'])
+
+def merge_move_into_subtree(sbox):
+ "merge move into subtree"
+ sbox_build_svnmover(sbox, content=initial_content_ttb)
+ repo_url = sbox.repo_url
+
+ # This tests the behaviour of merging a subtree. In this case, we expect
+ # each element in the union of (YCA subtree, source subtree, target subtree)
+ # to be merged. (Other behaviours -- such as merging only the elements in
+ # the intersection of those three subtrees -- could be provided in future.)
+ #
+ # This test tests a merge with no conflicts.
+
+ # create initial state in trunk
+ # (r2)
+ test_svnmover2(sbox, '/trunk',
+ reported_br_diff('trunk') +
+ reported_add('A') +
+ reported_add('B2') +
+ reported_add('B2/C2'),
+ 'mkdir A',
+ 'mkdir B2',
+ 'mkdir B2/C2')
+
+ # branch (r3)
+ test_svnmover2(sbox, '',
+ reported_br_diff('') +
+ reported_br_add('branches/br1'),
+ 'branch trunk branches/br1')
+
+ # on trunk: move B2 into subtree A (r4)
+ test_svnmover2(sbox, 'trunk',
+ reported_br_diff('trunk') +
+ reported_move('B2', 'A/B2'),
+ 'mv B2 A/B2')
+
+ # on branch: make a non-conflicting change to 'B2' (r5)
+ test_svnmover2(sbox, 'branches/br1',
+ reported_br_diff('branches/br1') +
+ reported_move('B2', 'B3'),
+ 'mv B2 B3')
+
+ # merge subtree 'A' from trunk to branch (r6)
+ # expect the move-into-subtree to be merged with the rename-outside-subtree
+ test_svnmover2(sbox, '',
+ reported_mg_diff() +
+ reported_br_diff('branches/br1') +
+ reported_move('B3', 'A/B3'),
+ 'merge trunk/A@4 branches/br1/A trunk/A@2')
+
+######################################################################
+
+test_list = [ None,
+ basic_svnmover,
+ nested_replaces,
+ merges,
+ merge_edits_with_move,
+ simple_moves_within_a_branch,
+ move_to_related_branch,
+ move_to_related_branch_element_already_exists,
+ move_to_unrelated_branch,
+ move_branch_within_same_parent_branch,
+ restructure_repo_ttb_projects_to_projects_ttb,
+ restructure_repo_projects_ttb_to_ttb_projects,
+ subbranches1,
+ merge_deleted_subbranch,
+ merge_added_subbranch,
+ branch_to_subbranch_of_self,
+ merge_from_subbranch_to_subtree,
+ modify_payload_of_branch_root_element,
+ merge_swap_abc,
+ move_to_related_branch_2,
+ tree_conflict_element_1,
+ tree_conflict_clash_1,
+ tree_conflict_clash_2,
+ tree_conflict_cycle_1,
+ tree_conflict_orphan_1,
+ replace_via_rm_cp,
+ see_the_revision_just_committed,
+ simple_branch,
+ merge_move_into_subtree,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
diff --git a/subversion/tests/cmdline/svnmucc_tests.py b/subversion/tests/cmdline/svnmucc_tests.py
new file mode 100755
index 0000000..f95c558
--- /dev/null
+++ b/subversion/tests/cmdline/svnmucc_tests.py
@@ -0,0 +1,607 @@
+#!/usr/bin/env python
+#
+# svnmucc_tests.py: tests of svnmucc
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import svntest
+import re
+
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+
+######################################################################
+
+@Issues(3895,3953)
+def reject_bogus_mergeinfo(sbox):
+ "reject bogus mergeinfo"
+
+ sbox.build(create_wc=False)
+
+ expected_error = ".*(E200020.*Invalid revision|E175002.*PROPPATCH)"
+
+ # At present this tests the server, but if we ever make svnmucc
+ # validate the mergeinfo up front then it will only test the client
+ svntest.actions.run_and_verify_svnmucc([], expected_error,
+ 'propset', 'svn:mergeinfo', '/B:0',
+ '-m', 'log msg',
+ sbox.repo_url + '/A')
+
+_svnmucc_re = re.compile(b'^(r[0-9]+) committed by jrandom at (.*)$')
+_log_re = re.compile('^ ([ADRM] /[^\(]+($| \(from .*:[0-9]+\)$))')
+_err_re = re.compile('^svnmucc: (.*)$')
+
+def test_svnmucc(repo_url, expected_path_changes, *varargs):
+ """Run svnmucc with the list of SVNMUCC_ARGS arguments. Verify that
+ its run results in a new commit with 'svn log -rHEAD' changed paths
+ that match the list of EXPECTED_PATH_CHANGES."""
+
+ # First, run svnmucc.
+ exit_code, outlines, errlines = svntest.main.run_svnmucc('-U', repo_url,
+ *varargs)
+ if errlines:
+ raise svntest.main.SVNCommitFailure(str(errlines))
+ if len(outlines) != 1 or not _svnmucc_re.match(outlines[0]):
+ raise svntest.main.SVNLineUnequal(str(outlines))
+
+ # Now, run 'svn log -vq -rHEAD'
+ changed_paths = []
+ exit_code, outlines, errlines = \
+ svntest.main.run_svn(None, 'log', '-vqrHEAD', repo_url)
+ if errlines:
+ raise svntest.Failure("Unable to verify commit with 'svn log': %s"
+ % (str(errlines)))
+ for line in outlines:
+ match = _log_re.match(line)
+ if match:
+ changed_paths.append(match.group(1).rstrip('\n\r'))
+
+ expected_path_changes.sort()
+ changed_paths.sort()
+ if changed_paths != expected_path_changes:
+ raise svntest.Failure("Logged path changes differ from expectations\n"
+ " expected: %s\n"
+ " actual: %s" % (str(expected_path_changes),
+ str(changed_paths)))
+
+def xtest_svnmucc(repo_url, expected_errors, *varargs):
+ """Run svnmucc with the list of SVNMUCC_ARGS arguments. Verify that
+ its run results match the list of EXPECTED_ERRORS."""
+
+ # First, run svnmucc.
+ exit_code, outlines, errlines = svntest.main.run_svnmucc('-U', repo_url,
+ *varargs)
+ errors = []
+ for line in errlines:
+ match = _err_re.match(line)
+ if match:
+ errors.append(line.rstrip('\n\r'))
+ if errors != expected_errors:
+ raise svntest.main.SVNUnmatchedError(str(errors))
+
+
+def basic_svnmucc(sbox):
+ "basic svnmucc tests"
+
+ sbox.build()
+ empty_file = sbox.ospath('empty')
+ file = sbox.ospath('file')
+ svntest.main.file_append(empty_file, '')
+ svntest.main.file_append(file, 'file')
+
+ # revision 2
+ test_svnmucc(sbox.repo_url,
+ ['A /foo'
+ ], # ---------
+ '-m', 'log msg',
+ 'mkdir', 'foo')
+
+ # revision 3
+ test_svnmucc(sbox.repo_url,
+ ['A /z.c',
+ ], # ---------
+ '-m', 'log msg',
+ 'put', empty_file, 'z.c')
+
+ # revision 4
+ test_svnmucc(sbox.repo_url,
+ ['A /foo/z.c (from /z.c:3)',
+ 'A /foo/bar (from /foo:3)',
+ ], # ---------
+ '-m', 'log msg',
+ 'cp', '3', 'z.c', 'foo/z.c',
+ 'cp', '3', 'foo', 'foo/bar')
+
+ # revision 5
+ test_svnmucc(sbox.repo_url,
+ ['A /zig (from /foo:4)',
+ 'D /zig/bar',
+ 'D /foo',
+ 'A /zig/zag (from /foo:4)',
+ ], # ---------
+ '-m', 'log msg',
+ 'cp', '4', 'foo', 'zig',
+ 'rm', 'zig/bar',
+ 'mv', 'foo', 'zig/zag')
+
+ # revision 6
+ test_svnmucc(sbox.repo_url,
+ ['D /z.c',
+ 'A /zig/zag/bar/y.c (from /z.c:5)',
+ 'A /zig/zag/bar/x.c (from /z.c:3)',
+ ], # ---------
+ '-m', 'log msg',
+ 'mv', 'z.c', 'zig/zag/bar/y.c',
+ 'cp', '3', 'z.c', 'zig/zag/bar/x.c')
+
+ # revision 7
+ test_svnmucc(sbox.repo_url,
+ ['D /zig/zag/bar/y.c',
+ 'A /zig/zag/bar/y y.c (from /zig/zag/bar/y.c:6)',
+ 'A /zig/zag/bar/y%20y.c (from /zig/zag/bar/y.c:6)',
+ ], # ---------
+ '-m', 'log msg',
+ 'mv', 'zig/zag/bar/y.c', 'zig/zag/bar/y%20y.c',
+ 'cp', 'HEAD', 'zig/zag/bar/y.c', 'zig/zag/bar/y%2520y.c')
+
+ # revision 8
+ test_svnmucc(sbox.repo_url,
+ ['D /zig/zag/bar/y y.c',
+ 'A /zig/zag/bar/z z1.c (from /zig/zag/bar/y y.c:7)',
+ 'A /zig/zag/bar/z%20z.c (from /zig/zag/bar/y%20y.c:7)',
+ 'A /zig/zag/bar/z z2.c (from /zig/zag/bar/y y.c:7)',
+ ], #---------
+ '-m', 'log msg',
+ 'mv', 'zig/zag/bar/y%20y.c', 'zig/zag/bar/z z1.c',
+ 'cp', 'HEAD', 'zig/zag/bar/y%2520y.c', 'zig/zag/bar/z%2520z.c',
+ 'cp', 'HEAD', 'zig/zag/bar/y y.c', 'zig/zag/bar/z z2.c')
+
+
+ # revision 9
+ test_svnmucc(sbox.repo_url,
+ ['D /zig/zag',
+ 'A /zig/foo (from /zig/zag:8)',
+ 'D /zig/foo/bar/z%20z.c',
+ 'D /zig/foo/bar/z z2.c',
+ 'R /zig/foo/bar/z z1.c (from /zig/zag/bar/x.c:6)',
+ ], #---------
+ '-m', 'log msg',
+ 'mv', 'zig/zag', 'zig/foo',
+ 'rm', 'zig/foo/bar/z z1.c',
+ 'rm', 'zig/foo/bar/z%20z2.c',
+ 'rm', 'zig/foo/bar/z%2520z.c',
+ 'cp', '6', 'zig/zag/bar/x.c', 'zig/foo/bar/z%20z1.c')
+
+ # revision 10
+ test_svnmucc(sbox.repo_url,
+ ['R /zig/foo/bar (from /zig/z.c:9)',
+ ], #---------
+ '-m', 'log msg',
+ 'rm', 'zig/foo/bar',
+ 'cp', '9', 'zig/z.c', 'zig/foo/bar')
+
+ # revision 11
+ test_svnmucc(sbox.repo_url,
+ ['R /zig/foo/bar (from /zig/foo/bar:9)',
+ 'D /zig/foo/bar/z z1.c',
+ ], #---------
+ '-m', 'log msg',
+ 'rm', 'zig/foo/bar',
+ 'cp', '9', 'zig/foo/bar', 'zig/foo/bar',
+ 'rm', 'zig/foo/bar/z%20z1.c')
+
+ # revision 12
+ test_svnmucc(sbox.repo_url,
+ ['R /zig/foo (from /zig/foo/bar:11)',
+ ], #---------
+ '-m', 'log msg',
+ 'rm', 'zig/foo',
+ 'cp', 'head', 'zig/foo/bar', 'zig/foo')
+
+ # revision 13
+ test_svnmucc(sbox.repo_url,
+ ['D /zig',
+ 'A /foo (from /foo:4)',
+ 'A /foo/foo (from /foo:4)',
+ 'A /foo/foo/foo (from /foo:4)',
+ 'D /foo/foo/bar',
+ 'R /foo/foo/foo/bar (from /foo:4)',
+ ], #---------
+ '-m', 'log msg',
+ 'rm', 'zig',
+ 'cp', '4', 'foo', 'foo',
+ 'cp', '4', 'foo', 'foo/foo',
+ 'cp', '4', 'foo', 'foo/foo/foo',
+ 'rm', 'foo/foo/bar',
+ 'rm', 'foo/foo/foo/bar',
+ 'cp', '4', 'foo', 'foo/foo/foo/bar')
+
+ # revision 14
+ test_svnmucc(sbox.repo_url,
+ ['A /boozle (from /foo:4)',
+ 'A /boozle/buz',
+ 'A /boozle/buz/nuz',
+ ], #---------
+ '-m', 'log msg',
+ 'cp', '4', 'foo', 'boozle',
+ 'mkdir', 'boozle/buz',
+ 'mkdir', 'boozle/buz/nuz')
+
+ # revision 15
+ test_svnmucc(sbox.repo_url,
+ ['A /boozle/buz/svnmucc-test.py',
+ 'A /boozle/guz (from /boozle/buz:14)',
+ 'A /boozle/guz/svnmucc-test.py',
+ ], #---------
+ '-m', 'log msg',
+ 'put', empty_file, 'boozle/buz/svnmucc-test.py',
+ 'cp', '14', 'boozle/buz', 'boozle/guz',
+ 'put', empty_file, 'boozle/guz/svnmucc-test.py')
+
+ # revision 16
+ test_svnmucc(sbox.repo_url,
+ ['M /boozle/buz/svnmucc-test.py',
+ 'R /boozle/guz/svnmucc-test.py',
+ ], #---------
+ '-m', 'log msg',
+ 'put', empty_file, 'boozle/buz/svnmucc-test.py',
+ 'rm', 'boozle/guz/svnmucc-test.py',
+ 'put', empty_file, 'boozle/guz/svnmucc-test.py')
+
+ # revision 17
+ test_svnmucc(sbox.repo_url,
+ ['R /foo/bar (from /foo/foo:16)'
+ ], #---------
+ '-m', 'log msg',
+ 'rm', 'foo/bar',
+ 'cp', '16', 'foo/foo', 'foo/bar',
+ 'propset', 'testprop', 'true', 'foo/bar')
+
+ # revision 18
+ test_svnmucc(sbox.repo_url,
+ ['M /foo/bar'
+ ], #---------
+ '-m', 'log msg',
+ 'propdel', 'testprop', 'foo/bar')
+
+ # revision 19
+ test_svnmucc(sbox.repo_url,
+ ['M /foo/z.c',
+ 'M /foo/foo',
+ ], #---------
+ '-m', 'log msg',
+ 'propset', 'testprop', 'true', 'foo/z.c',
+ 'propset', 'testprop', 'true', 'foo/foo')
+
+ # revision 20
+ test_svnmucc(sbox.repo_url,
+ ['M /foo/z.c',
+ 'M /foo/foo',
+ ], #---------
+ '-m', 'log msg',
+ 'propsetf', 'testprop', empty_file, 'foo/z.c',
+ 'propsetf', 'testprop', empty_file, 'foo/foo')
+
+ # revision 21
+ test_svnmucc(sbox.repo_url,
+ ['M /foo/z.c',
+ ], #---------
+ '-m', 'log msg',
+ 'propset', 'testprop', 'false', 'foo/z.c',
+ 'put', file, 'foo/z.c')
+
+ # Expected missing revision error
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200004: 'a' is not a revision"
+ ], #---------
+ '-m', 'log msg',
+ 'cp', 'a', 'b')
+
+ # Expected cannot be younger error
+ xtest_svnmucc(sbox.repo_url,
+ ['svnmucc: E160006: No such revision 42',
+ ], #---------
+ '-m', 'log msg',
+ 'cp', '42', 'a', 'b')
+
+ # Expected already exists error
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160020: Path 'foo' already exists",
+ ], #---------
+ '-m', 'log msg',
+ 'cp', '17', 'a', 'foo')
+
+ # Expected copy_src already exists error
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160020: Path 'a/bar' already exists",
+ ], #---------
+ '-m', 'log msg',
+ 'cp', '17', 'foo', 'a',
+ 'cp', '17', 'foo/foo', 'a/bar')
+
+ # Expected not found error
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160013: Path 'a' not found in revision 17",
+ ], #---------
+ '-m', 'log msg',
+ 'cp', '17', 'a', 'b')
+
+
+def propset_root_internal(sbox, target):
+ ## propset on ^/
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'log msg',
+ 'propset', 'foo', 'bar',
+ target)
+ svntest.actions.run_and_verify_svn('bar', [],
+ 'propget', '--no-newline', 'foo',
+ target)
+
+ ## propdel on ^/
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'log msg',
+ 'propdel', 'foo',
+ target)
+ svntest.actions.run_and_verify_svn([],
+ '.*W200017: Property.*not found',
+ 'propget', '--no-newline', 'foo',
+ target)
+
+@Issues(3663)
+def propset_root(sbox):
+ "propset/propdel on repos root"
+
+ sbox.build(create_wc=False)
+ propset_root_internal(sbox, sbox.repo_url)
+ propset_root_internal(sbox, sbox.repo_url + '/iota')
+
+
+def too_many_log_messages(sbox):
+ "test log message mutual exclusivity checks"
+
+ sbox.build() # would use read-only=True, but need a place to stuff msg_file
+ msg_file = sbox.ospath('svnmucc_msg')
+ svntest.main.file_append(msg_file, 'some log message')
+ err_msg = ["svnmucc: E205000: --message (-m), --file (-F), and "
+ "--with-revprop=svn:log are mutually exclusive"]
+
+ xtest_svnmucc(sbox.repo_url, err_msg,
+ '--non-interactive',
+ '-m', 'log msg',
+ '-F', msg_file,
+ 'mkdir', 'A/subdir')
+ xtest_svnmucc(sbox.repo_url, err_msg,
+ '--non-interactive',
+ '-m', 'log msg',
+ '--with-revprop', 'svn:log=proppy log message',
+ 'mkdir', 'A/subdir')
+ xtest_svnmucc(sbox.repo_url, err_msg,
+ '--non-interactive',
+ '-F', msg_file,
+ '--with-revprop', 'svn:log=proppy log message',
+ 'mkdir', 'A/subdir')
+ xtest_svnmucc(sbox.repo_url, err_msg,
+ '--non-interactive',
+ '-m', 'log msg',
+ '-F', msg_file,
+ '--with-revprop', 'svn:log=proppy log message',
+ 'mkdir', 'A/subdir')
+
+@Issues(3418)
+def no_log_msg_non_interactive(sbox):
+ "test non-interactive without a log message"
+
+ sbox.build(create_wc=False)
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E205001: Cannot invoke editor to get log message "
+ "when non-interactive"
+ ], #---------
+ '--non-interactive',
+ 'mkdir', 'A/subdir')
+
+
+def nested_replaces(sbox):
+ "nested replaces"
+
+ sbox.build(create_wc=False)
+ repo_url = sbox.repo_url
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', repo_url, '-m', 'r2: create tree',
+ 'rm', 'A',
+ 'rm', 'iota',
+ 'mkdir', 'A', 'mkdir', 'A/B', 'mkdir', 'A/B/C',
+ 'mkdir', 'M', 'mkdir', 'M/N', 'mkdir', 'M/N/O',
+ 'mkdir', 'X', 'mkdir', 'X/Y', 'mkdir', 'X/Y/Z')
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', repo_url, '-m', 'r3: nested replaces',
+ *("""
+rm A rm M rm X
+cp HEAD X/Y/Z A cp HEAD A/B/C M cp HEAD M/N/O X
+cp HEAD A/B A/B cp HEAD M/N M/N cp HEAD X/Y X/Y
+rm A/B/C rm M/N/O rm X/Y/Z
+cp HEAD X A/B/C cp HEAD A M/N/O cp HEAD M X/Y/Z
+rm A/B/C/Y
+ """.split()))
+
+ # ### TODO: need a smarter run_and_verify_log() that verifies copyfrom
+ excaped = svntest.main.ensure_list(map(re.escape, [
+ ' R /A (from /X/Y/Z:2)',
+ ' A /A/B (from /A/B:2)',
+ ' R /A/B/C (from /X:2)',
+ ' R /M (from /A/B/C:2)',
+ ' A /M/N (from /M/N:2)',
+ ' R /M/N/O (from /A:2)',
+ ' R /X (from /M/N/O:2)',
+ ' A /X/Y (from /X/Y:2)',
+ ' R /X/Y/Z (from /M:2)',
+ ' D /A/B/C/Y',
+ ]))
+ expected_output = svntest.verify.UnorderedRegexListOutput(excaped
+ + ['^-', '^r3', '^-', '^Changed paths:',])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'log', '-qvr3', repo_url)
+
+
+def prohibited_deletes_and_moves(sbox):
+ "test prohibited delete and move operations"
+
+ # These action sequences were allowed in 1.8.13, but are prohibited in 1.9.x
+ # and later. Most of them probably indicate an inadvertent user mistake.
+ # See dev@, 2015-05-11, "Re: Issue 4579 / svnmucc fails to process certain
+ # deletes", <http://svn.haxx.se/dev/archive-2015-05/0038.shtml>
+
+ sbox.build(read_only = True)
+ svntest.main.file_write(sbox.ospath('file'), "New contents")
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'iota'",
+ ], #---------
+ '-m', 'r2: modify and delete /iota',
+ 'put', sbox.ospath('file'), 'iota',
+ 'rm', 'iota')
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'iota'",
+ ], #---------
+ '-m', 'r2: propset and delete /iota',
+ 'propset', 'prop', 'val', 'iota',
+ 'rm', 'iota')
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160013: Can't delete node at 'iota' as it does "
+ "not exist",
+ ], #---------
+ '-m', 'r2: delete and delete /iota',
+ 'rm', 'iota',
+ 'rm', 'iota')
+
+ # Subversion 1.8.13 used to move /iota without applying the text change.
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'iota'",
+ ], #---------
+ '-m', 'r2: modify and move /iota',
+ 'put', sbox.ospath('file'), 'iota',
+ 'mv', 'iota', 'iota2')
+
+ # Subversion 1.8.13 used to move /A without applying the inner remove.
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'A'",
+ ], #---------
+ '-m', 'r2: delete /A/B and move /A',
+ 'rm', 'A/B',
+ 'mv', 'A', 'A1')
+
+def svnmucc_type_errors(sbox):
+ "test type errors"
+
+ sbox.build(read_only=True)
+
+ sbox.simple_append('file', 'New contents')
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160016: Can't operate on 'B' "
+ "because 'A' is not a directory"],
+ '-m', '',
+ 'put', sbox.ospath('file'), 'A',
+ 'mkdir', 'A/B',
+ 'propset', 'iota', 'iota', 'iota')
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'A'"],
+ '-m', '',
+ 'mkdir', 'A/Z',
+ 'put', sbox.ospath('file'), 'A')
+
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160020: Path 'Z' already exists"],
+ '-m', '',
+ 'mkdir', 'A/Z',
+ 'put', sbox.ospath('file'), 'A/Z')
+
+def svnmucc_propset_and_put(sbox):
+ "propset and put"
+
+ sbox.build()
+
+ sbox.simple_append('file', 'New contents')
+
+ # First in the sane order: put, then propset
+ xtest_svnmucc(sbox.repo_url,
+ [],
+ '-m', '',
+ 'put', sbox.ospath('file'), 't1',
+ 'propset', 't1', 't1', 't1')
+
+ # And now in an impossible order: propset, then put
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't set properties at not existing 't2'"],
+ '-m', '',
+ 'propset', 't2', 't2', 't2',
+ 'put', sbox.ospath('file'), 't2')
+
+ # And if the target already exists (dir)
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't delete node at 'A'"],
+ '-m', '',
+ 'propset', 'A', 'A', 'A',
+ 'put', sbox.ospath('file'), 'A')
+
+ # And if the target already exists (file) # fixed in r1702467
+ xtest_svnmucc(sbox.repo_url,
+ [],
+ '-m', '',
+ 'propset', 'iota', 'iota', 'iota',
+ 'put', sbox.ospath('file'), 'iota')
+
+ # Put same file twice (non existing)
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E160020: Path 't3' already exists"],
+ '-m', '',
+ 'put', sbox.ospath('file'), 't3',
+ 'put', sbox.ospath('file'), 't3')
+
+ # Put same file twice (existing)
+ xtest_svnmucc(sbox.repo_url,
+ ["svnmucc: E200009: Can't update file at 't1'"],
+ '-m', '',
+ 'put', sbox.ospath('file'), 't1',
+ 'put', sbox.ospath('file'), 't1')
+
+
+######################################################################
+
+test_list = [ None,
+ reject_bogus_mergeinfo,
+ basic_svnmucc,
+ propset_root,
+ too_many_log_messages,
+ no_log_msg_non_interactive,
+ nested_replaces,
+ prohibited_deletes_and_moves,
+ svnmucc_type_errors,
+ svnmucc_propset_and_put,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
diff --git a/subversion/tests/cmdline/svnrdump_tests.py b/subversion/tests/cmdline/svnrdump_tests.py
new file mode 100755
index 0000000..ae6a7e0
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests.py
@@ -0,0 +1,1053 @@
+#!/usr/bin/env python
+#
+# svnrdump_tests.py: Tests svnrdump's remote repository dumping capabilities.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, os
+import re
+
+# Our testing module
+import svntest
+from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr
+from svntest.verify import SVNExpectedStderr
+from svntest.main import write_restrictive_svnserve_conf
+from svntest.main import server_has_partial_replay
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+## Mismatched headers during dumping operation
+# Text-copy-source-* and *-sha1 headers are not provided by the RA
+# layer. `svnadmin dump` is able to provide them because it works on
+# the FS layer. Also, svnrdump attaches "Prop-delta: true" with
+# everything whether it's really a delta or a new prop (delta from
+# /dev/null). This is really harmless, but `svnadmin dump` contains
+# the logic for differentiating between these two cases.
+
+mismatched_headers_re = re.compile(
+ b"Prop-delta: .*|Text-content-sha1: .*|Text-copy-source-md5: .*|" +
+ b"Text-copy-source-sha1: .*|Text-delta-base-sha1: .*"
+)
+
+######################################################################
+# Helper routines
+
+def compare_repos_dumps(sbox, other_dumpfile,
+ bypass_prop_validation=False):
+ """Compare two dumpfiles, one created from SBOX, and other given
+ by OTHER_DUMPFILE. The dumpfiles do not need to match linewise, as the
+ OTHER_DUMPFILE contents will first be loaded into a repository and then
+ re-dumped to do the match, which should generate the same dumpfile as
+ dumping SBOX."""
+
+
+ sbox_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir)
+
+ # Load and dump the other dumpfile (using svnadmin)
+ other_sbox = sbox.clone_dependent()
+ other_sbox.build(create_wc=False, empty=True)
+ svntest.actions.run_and_verify_load(other_sbox.repo_dir, other_dumpfile,
+ bypass_prop_validation)
+ other_dumpfile = svntest.actions.run_and_verify_dump(other_sbox.repo_dir)
+
+ ### This call kind-of assumes EXPECTED is first and ACTUAL is second.
+ svntest.verify.compare_dump_files(
+ "Dump files", "DUMP", other_dumpfile, sbox_dumpfile)
+
+def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None,
+ subdir = None, bypass_prop_validation = False,
+ ignore_base_checksums = False, extra_options = []):
+
+ """Load a dumpfile using 'svnadmin load', dump it with 'svnrdump
+ dump' and check that the same dumpfile is produced or that
+ expected_dumpfile_name is produced if provided. Additionally, the
+ subdir argument appends itself to the URL. EXTRA_OPTIONS is an
+ array of optional additional options to pass to 'svnrdump dump'."""
+
+ # Create an empty sandbox repository
+ sbox.build(create_wc=False, empty=True)
+
+ # This directory contains all the dump files
+ svnrdump_tests_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data')
+
+ # Load the specified dump file into the sbox repository using
+ # svnadmin load
+ original_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ dumpfile_name),
+ 'rb').readlines()
+ svntest.actions.run_and_verify_load(sbox.repo_dir, original_dumpfile,
+ bypass_prop_validation)
+
+ repo_url = sbox.repo_url
+ if subdir:
+ repo_url = repo_url + subdir
+
+ # Create a dump file using svnrdump
+ opts = extra_options + ['-q', 'dump', repo_url]
+ svnrdump_dumpfile = \
+ svntest.actions.run_and_verify_svnrdump(None, svntest.verify.AnyOutput,
+ [], 0, *opts)
+
+ if expected_dumpfile_name:
+ expected_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ expected_dumpfile_name),
+ 'rb').readlines()
+ # Compare the output from stdout
+ if ignore_base_checksums:
+ expected_dumpfile = [l for l in expected_dumpfile
+ if not l.startswith(b'Text-delta-base-md5')]
+ svnrdump_dumpfile = [l for l in svnrdump_dumpfile
+ if not l.startswith(b'Text-delta-base-md5')]
+ expected_dumpfile = [l for l in expected_dumpfile
+ if not mismatched_headers_re.match(l)]
+ svnrdump_dumpfile = [l for l in svnrdump_dumpfile
+ if not mismatched_headers_re.match(l)]
+
+ expected_dumpfile = svntest.verify.UnorderedOutput(expected_dumpfile)
+
+ svntest.verify.compare_and_display_lines(
+ "Dump files", "DUMP", expected_dumpfile, svnrdump_dumpfile,
+ None)
+
+ else:
+ # The expected dumpfile is the result of dumping SBOX.
+ compare_repos_dumps(sbox, svnrdump_dumpfile, bypass_prop_validation)
+
+def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None,
+ expect_deltas = True):
+ """Load a dumpfile using 'svnrdump load', dump it with 'svnadmin
+ dump' and check that the same dumpfile is produced"""
+
+ # Create an empty sandbox repository
+ sbox.build(create_wc=False, empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # This directory contains all the dump files
+ svnrdump_tests_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data')
+
+ # Load the specified dump file into the sbox repository using
+ # svnrdump load
+ original_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ dumpfile_name),
+ 'rb').readlines()
+
+ # Set the UUID of the sbox repository to the UUID specified in the
+ # dumpfile ### RA layer doesn't have a set_uuid functionality
+ uuid = original_dumpfile[2].split(b' ')[1][:-1].decode()
+ svntest.actions.run_and_verify_svnadmin2(None, None, 0,
+ 'setuuid', sbox.repo_dir,
+ uuid)
+
+ svntest.actions.run_and_verify_svnrdump(original_dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+ # Re-dump the rdump-loaded repo using svnadmin dump
+ resulted_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir,
+ expect_deltas)
+
+ if expected_dumpfile_name:
+ expected_dumpfile = open(os.path.join(svnrdump_tests_dir,
+ expected_dumpfile_name),
+ 'rb').readlines()
+
+ # Compare the output from stdout
+ svntest.verify.compare_and_display_lines(
+ "Dump files", "DUMP", expected_dumpfile, resulted_dumpfile)
+
+ else:
+ expected_dumpfile = original_dumpfile
+ compare_repos_dumps(sbox, expected_dumpfile)
+
+######################################################################
+# Tests
+
+def basic_dump(sbox):
+ "dump: standard sbox repos"
+ sbox.build(read_only = True, create_wc = False)
+
+ out = \
+ svntest.actions.run_and_verify_svnrdump(None, svntest.verify.AnyOutput,
+ [], 0, '-q', 'dump',
+ sbox.repo_url)
+
+ if not out[0].startswith(b'SVN-fs-dump-format-version:'):
+ raise svntest.Failure('No valid output')
+
+def revision_0_dump(sbox):
+ "dump: revision zero"
+ run_dump_test(sbox, "revision-0.dump")
+
+def revision_0_load(sbox):
+ "load: revision zero"
+ run_load_test(sbox, "revision-0.dump")
+
+# skeleton.dump repository layout
+#
+# Projects/ (Added r1)
+# README (Added r2)
+# Project-X (Added r3)
+# Project-Y (Added r4)
+# Project-Z (Added r5)
+# docs/ (Added r6)
+# README (Added r6)
+
+def skeleton_dump(sbox):
+ "dump: skeleton repository"
+ run_dump_test(sbox, "skeleton.dump")
+
+def skeleton_load(sbox):
+ "load: skeleton repository"
+ run_load_test(sbox, "skeleton.dump")
+
+def sparse_propchanges_dump(sbox):
+ "dump: sparse file/dir propchanges"
+ run_dump_test(sbox, "sparse-propchanges.dump")
+
+@Issue(3902)
+def sparse_propchanges_load(sbox):
+ "load: sparse file/dir propchanges"
+ run_load_test(sbox, "sparse-propchanges.dump")
+
+def copy_and_modify_dump(sbox):
+ "dump: copy and modify"
+ run_dump_test(sbox, "copy-and-modify.dump")
+
+def copy_and_modify_load(sbox):
+ "load: copy and modify"
+ run_load_test(sbox, "copy-and-modify.dump")
+
+def no_author_dump(sbox):
+ "dump: copy revs with no svn:author revprops"
+ run_dump_test(sbox, "no-author.dump")
+
+def no_author_load(sbox):
+ "load: copy revs with no svn:author revprops"
+ run_load_test(sbox, "no-author.dump")
+
+def copy_from_previous_version_and_modify_dump(sbox):
+ "dump: copy from previous version and modify"
+ run_dump_test(sbox, "copy-from-previous-version-and-modify.dump")
+
+def copy_from_previous_version_and_modify_load(sbox):
+ "load: copy from previous version and modify"
+ run_load_test(sbox, "copy-from-previous-version-and-modify.dump")
+
+def modified_in_place_dump(sbox):
+ "dump: modified in place"
+ run_dump_test(sbox, "modified-in-place.dump")
+
+def modified_in_place_load(sbox):
+ "load: modified in place"
+ run_load_test(sbox, "modified-in-place.dump")
+
+def move_and_modify_in_the_same_revision_dump(sbox):
+ "dump: move parent & modify child file in same rev"
+ run_dump_test(sbox, "move-and-modify.dump")
+
+def move_and_modify_in_the_same_revision_load(sbox):
+ "load: move parent & modify child file in same rev"
+ run_load_test(sbox, "move-and-modify.dump")
+
+def tag_empty_trunk_dump(sbox):
+ "dump: tag empty trunk"
+ run_dump_test(sbox, "tag-empty-trunk.dump")
+
+def tag_empty_trunk_load(sbox):
+ "load: tag empty trunk"
+ run_load_test(sbox, "tag-empty-trunk.dump")
+
+def tag_trunk_with_file_dump(sbox):
+ "dump: tag trunk containing a file"
+ run_dump_test(sbox, "tag-trunk-with-file.dump")
+
+def tag_trunk_with_file_load(sbox):
+ "load: tag trunk containing a file"
+ run_load_test(sbox, "tag-trunk-with-file.dump")
+
+def tag_trunk_with_file2_dump(sbox):
+ "dump: tag trunk containing a file (#2)"
+ run_dump_test(sbox, "tag-trunk-with-file2.dump")
+
+def tag_trunk_with_file2_load(sbox):
+ "load: tag trunk containing a file (#2)"
+ run_load_test(sbox, "tag-trunk-with-file2.dump")
+
+def dir_prop_change_dump(sbox):
+ "dump: directory property changes"
+ run_dump_test(sbox, "dir-prop-change.dump")
+
+def dir_prop_change_load(sbox):
+ "load: directory property changes"
+ run_load_test(sbox, "dir-prop-change.dump")
+
+def copy_parent_modify_prop_dump(sbox):
+ "dump: copy parent and modify prop"
+ run_dump_test(sbox, "copy-parent-modify-prop.dump")
+
+def copy_parent_modify_prop_load(sbox):
+ "load: copy parent and modify prop"
+ run_load_test(sbox, "copy-parent-modify-prop.dump")
+
+def copy_revprops_dump(sbox):
+ "dump: copy revprops other than svn:*"
+ run_dump_test(sbox, "revprops.dump")
+
+def copy_revprops_load(sbox):
+ "load: copy revprops other than svn:*"
+ run_load_test(sbox, "revprops.dump")
+
+def only_trunk_dump(sbox):
+ "dump: subdirectory"
+ run_dump_test(sbox, "trunk-only.dump", subdir="/trunk",
+ expected_dumpfile_name="trunk-only.expected.dump")
+
+def only_trunk_A_with_changes_dump(sbox):
+ "dump: subdirectory with changes on root"
+ run_dump_test(sbox, "trunk-A-changes.dump", subdir="/trunk/A",
+ expected_dumpfile_name="trunk-A-changes.expected.dump")
+
+def url_encoding_dump(sbox):
+ "dump: url encoding issues"
+ run_dump_test(sbox, "url-encoding-bug.dump")
+
+def url_encoding_load(sbox):
+ "load: url encoding issues"
+ run_load_test(sbox, "url-encoding-bug.dump")
+
+def copy_bad_line_endings_dump(sbox):
+ "dump: inconsistent line endings in svn:* props"
+ run_dump_test(sbox, "copy-bad-line-endings.dump",
+ expected_dumpfile_name="copy-bad-line-endings.expected.dump",
+ bypass_prop_validation=True)
+
+@Issue(4263)
+def copy_bad_line_endings_load(sbox):
+ "load: inconsistent line endings in svn:* props"
+ run_load_test(sbox, "copy-bad-line-endings.dump",
+ expected_dumpfile_name="copy-bad-line-endings.expected.dump")
+
+def copy_bad_line_endings2_dump(sbox):
+ "dump: non-LF line endings in svn:* props"
+ run_dump_test(sbox, "copy-bad-line-endings2.dump",
+ expected_dumpfile_name="copy-bad-line-endings2.expected.dump",
+ bypass_prop_validation=True, ignore_base_checksums=True)
+
+def commit_a_copy_of_root_dump(sbox):
+ "dump: commit a copy of root"
+ run_dump_test(sbox, "repo-with-copy-of-root-dir.dump")
+
+def commit_a_copy_of_root_load(sbox):
+ "load: commit a copy of root"
+ run_load_test(sbox, "repo-with-copy-of-root-dir.dump")
+
+def descend_into_replace_dump(sbox):
+ "dump: descending into replaced dir looks in src"
+ run_dump_test(sbox, "descend-into-replace.dump", subdir='/trunk/H',
+ expected_dumpfile_name = "descend-into-replace.expected.dump")
+
+def descend_into_replace_load(sbox):
+ "load: descending into replaced dir looks in src"
+ run_load_test(sbox, "descend-into-replace.dump")
+
+@Issue(3847)
+def add_multi_prop_dump(sbox):
+ "dump: add with multiple props"
+ run_dump_test(sbox, "add-multi-prop.dump")
+
+@Issue(3844)
+def multi_prop_edit_load(sbox):
+ "load: multiple prop edits on a file"
+ run_load_test(sbox, "multi-prop-edits.dump", None, False)
+
+#----------------------------------------------------------------------
+# This test replicates svnadmin_tests.py 16 'reflect dropped renumbered
+# revs in svn:mergeinfo' but uses 'svnrdump load' in place of
+# 'svnadmin load'.
+@Issue(3890)
+def reflect_dropped_renumbered_revs(sbox):
+ "svnrdump renumbers dropped revs in mergeinfo"
+
+ # Create an empty sandbox repository
+ sbox.build(create_wc=False, empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # Load the specified dump file into the sbox repository using
+ # svnrdump load
+ dump_file = open(os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data',
+ 'with_merges.dump'),
+ 'rb')
+ svnrdump_dumpfile = dump_file.readlines()
+ dump_file.close()
+
+ # svnrdump load the dump file.
+ svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+ # Create the 'toplevel' directory in repository and then load the same
+ # dumpfile into that subtree.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 10.\n'],
+ [], "mkdir", sbox.repo_url + "/toplevel",
+ "-m", "Create toplevel dir to load into")
+ svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load',
+ sbox.repo_url + "/toplevel")
+ # Verify the svn:mergeinfo properties
+ url = sbox.repo_url
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "/trunk - /branch1:4-8\n",
+ url + "/toplevel/trunk - /toplevel/branch1:14-18\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+# Full or incremental dump-load cycles should result in the same
+# mergeinfo in the loaded repository.
+#
+# Given a repository 'SOURCE-REPOS' with mergeinfo, and a repository
+# 'TARGET-REPOS' (which may or may not be empty), either of the following
+# methods to move 'SOURCE-REPOS' to 'TARGET-REPOS' should result in
+# the same mergeinfo on 'TARGET-REPOS':
+#
+# 1) Dump -r1:HEAD from 'SOURCE-REPOS' and load it in one shot to
+# 'TARGET-REPOS'.
+#
+# 2) Dump 'SOURCE-REPOS' in a series of incremental dumps and load
+# each of them to 'TARGET-REPOS'.
+#
+# See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc13
+#
+# This test replicates svnadmin_tests.py 20 'don't filter mergeinfo revs
+# from incremental dump' but uses 'svnrdump [dump|load]' in place of
+# 'svnadmin [dump|load]'.
+@Issue(3890)
+def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox):
+ "don't drop mergeinfo revs in incremental svnrdump"
+
+ # Create an empty repos.
+ sbox.build(empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # PART 1: Load a full dump to an empty repository.
+ #
+ # The test repository used here, 'mergeinfo_included_full.dump', is
+ # this repos:
+ # __________________________________________
+ # | |
+ # | ____________________________|_____
+ # | | | |
+ # trunk---r2---r3-----r5---r6-------r8---r9---------------> | |
+ # r1 | | | | | |
+ # initial | | | |______ | |
+ # import copy | copy | merge merge
+ # | | | merge (r5) (r8)
+ # | | | (r9) | |
+ # | | | | | |
+ # | | V V | |
+ # | | branches/B2-------r11---r12----> | |
+ # | | r7 |____| | |
+ # | | | | |
+ # | merge |___ | |
+ # | (r6) | | |
+ # | |_________________ | | |
+ # | | merge | |
+ # | | (r11-12) | |
+ # | | | | |
+ # V V V | |
+ # branches/B1-------------------r10--------r13--> | |
+ # r4 | |
+ # | V V
+ # branches/B1/B/E------------------------------r14---r15->
+ #
+ #
+ # The mergeinfo on this repos@15 is:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /branches/B2:11-12
+ # /trunk:6,9
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /branches/B2/B/E:11-12
+ # /trunk/B/E:5-6,8-9
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /trunk:9
+ dump_fp = open(os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data',
+ 'mergeinfo_included_full.dump'),
+ 'rb')
+ dumpfile_full = dump_fp.readlines()
+ dump_fp.close()
+
+ svntest.actions.run_and_verify_svnrdump(dumpfile_full,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+ # Check that the mergeinfo is as expected.
+ url = sbox.repo_url + '/branches/'
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "B1 - /branches/B2:11-12\n",
+ "/trunk:6,9\n",
+ url + "B2 - /trunk:9\n",
+ url + "B1/B/E - /branches/B2/B/E:11-12\n",
+ "/trunk/B/E:5-6,8-9\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # PART 2: Load a series of incremental dumps to an empty repository.
+ #
+ # Incrementally dump the repository into three dump files:
+ dump_file_r1_10 = sbox.get_tempname("r1-10-dump")
+ output = svntest.actions.run_and_verify_svnrdump(None,
+ svntest.verify.AnyOutput,
+ [], 0, '-q', 'dump', '-r1:10',
+ sbox.repo_url)
+ dump_fp = open(dump_file_r1_10, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ dump_file_r11_13 = sbox.get_tempname("r11-13-dump")
+ output = svntest.actions.run_and_verify_svnrdump(None,
+ svntest.verify.AnyOutput,
+ [], 0, '-q', 'dump',
+ '--incremental', '-r11:13',
+ sbox.repo_url)
+ dump_fp = open(dump_file_r11_13, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ dump_file_r14_15 = sbox.get_tempname("r14-15-dump")
+ output = svntest.actions.run_and_verify_svnrdump(None,
+ svntest.verify.AnyOutput,
+ [], 0, '-q', 'dump',
+ '--incremental', '-r14:15',
+ sbox.repo_url)
+ dump_fp = open(dump_file_r14_15, 'wb')
+ dump_fp.writelines(output)
+ dump_fp.close()
+
+ # Blow away the current repos and create an empty one in its place.
+ svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve
+ sbox.build(empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # Load the three incremental dump files in sequence.
+ dump_fp = open(dump_file_r1_10, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+ dump_fp.close()
+ dump_fp = open(dump_file_r11_13, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+ dump_fp.close()
+ dump_fp = open(dump_file_r14_15, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+ dump_fp.close()
+
+ # Check the mergeinfo, we use the same expected output as before,
+ # as it (duh!) should be exactly the same as when we loaded the
+ # repos in one shot.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # Now repeat the above two scenarios, but with an initially non-empty target
+ # repository. First, try the full dump-load in one shot.
+ #
+ # PART 3: Load a full dump to an non-empty repository.
+ #
+ # Reset our sandbox.
+ svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve
+ sbox.build(empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # Load this skeleton repos into the empty target:
+ #
+ # Projects/ (Added r1)
+ # README (Added r2)
+ # Project-X (Added r3)
+ # Project-Y (Added r4)
+ # Project-Z (Added r5)
+ # docs/ (Added r6)
+ # README (Added r6)
+ dump_fp = open(os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data',
+ 'skeleton.dump'),
+ 'rb')
+ dumpfile_skeleton = dump_fp.readlines()
+ dump_fp.close()
+ svntest.actions.run_and_verify_svnrdump(dumpfile_skeleton,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+ # Load 'svnadmin_tests_data/mergeinfo_included_full.dump' in one shot:
+ svntest.actions.run_and_verify_svnrdump(dumpfile_full,
+ svntest.verify.AnyOutput,
+ [], 0, 'load',
+ sbox.repo_url + '/Projects/Project-X')
+
+ # Check that the mergeinfo is as expected. This is exactly the
+ # same expected mergeinfo we previously checked, except that the
+ # revisions are all offset +6 to reflect the revions already in
+ # the skeleton target before we began loading and the leading source
+ # paths are adjusted by the --parent-dir:
+ #
+ # Properties on 'branches/B1':
+ # svn:mergeinfo
+ # /Projects/Project-X/branches/B2:17-18
+ # /Projects/Project-X/trunk:12,15
+ # Properties on 'branches/B1/B/E':
+ # svn:mergeinfo
+ # /Projects/Project-X/branches/B2/B/E:17-18
+ # /Projects/Project-X/trunk/B/E:11-12,14-15
+ # Properties on 'branches/B2':
+ # svn:mergeinfo
+ # /Projects/Project-X/trunk:15
+ url = sbox.repo_url + '/Projects/Project-X/branches/'
+ expected_output = svntest.verify.UnorderedOutput([
+ url + "B1 - /Projects/Project-X/branches/B2:17-18\n",
+ "/Projects/Project-X/trunk:12,15\n",
+ url + "B2 - /Projects/Project-X/trunk:15\n",
+ url + "B1/B/E - /Projects/Project-X/branches/B2/B/E:17-18\n",
+ "/Projects/Project-X/trunk/B/E:11-12,14-15\n"])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+ # PART 4: Load a a series of incremental dumps to an non-empty repository.
+ #
+ # Reset our sandbox.
+ svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve
+ sbox.build(empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # Load the skeleton repos into the empty target:
+ svntest.actions.run_and_verify_svnrdump(dumpfile_skeleton,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+ # Load the three incremental dump files in sequence.
+ #
+ # The first load fails the same as PART 3.
+ dump_fp = open(dump_file_r1_10, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load',
+ sbox.repo_url + '/Projects/Project-X')
+ dump_fp.close()
+ dump_fp = open(dump_file_r11_13, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load',
+ sbox.repo_url + '/Projects/Project-X')
+ dump_fp.close()
+ dump_fp = open(dump_file_r14_15, 'rb')
+ svntest.actions.run_and_verify_svnrdump(dump_fp.readlines(),
+ svntest.verify.AnyOutput,
+ [], 0, 'load',
+ sbox.repo_url + '/Projects/Project-X')
+ dump_fp.close()
+
+ # Check the resulting mergeinfo. We expect the exact same results
+ # as Part 3.
+ # See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc16.
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'propget', 'svn:mergeinfo', '-R',
+ sbox.repo_url)
+
+#----------------------------------------------------------------------
+@Issue(3890)
+def svnrdump_load_partial_incremental_dump(sbox):
+ "svnrdump load partial incremental dump"
+
+ # Create an empty sandbox repository
+ sbox.build(create_wc=False, empty=True)
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+
+ # Create the 'A' directory in repository and then load the partial
+ # incremental dump into the root of the repository.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 1.\n'],
+ [], "mkdir", sbox.repo_url + "/A",
+ "-m", "Create toplevel dir to load into")
+
+ # Load the specified dump file into the sbox repository using
+ # svnrdump load
+ dump_file = open(os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnrdump_tests_data',
+ 'partial_incremental.dump'),
+ 'rb')
+ svnrdump_dumpfile = dump_file.readlines()
+ dump_file.close()
+ svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', sbox.repo_url)
+
+
+#----------------------------------------------------------------------
+@Issue(4101)
+def range_dump(sbox):
+ "dump: using -rX:Y"
+ run_dump_test(sbox, "trunk-only.dump",
+ expected_dumpfile_name="root-range.expected.dump",
+ extra_options=['-r2:HEAD'])
+
+@Issue(4101)
+def only_trunk_range_dump(sbox):
+ "dump: subdirectory using -rX:Y"
+ run_dump_test(sbox, "trunk-only.dump", subdir="/trunk",
+ expected_dumpfile_name="trunk-only-range.expected.dump",
+ extra_options=['-r1:HEAD'])
+
+@Issue(4101)
+def only_trunk_A_range_dump(sbox):
+ "dump: deeper subdirectory using -rX:Y"
+ run_dump_test(sbox, "trunk-only.dump", subdir="/trunk/A",
+ expected_dumpfile_name="trunk-A-range.expected.dump",
+ extra_options=['-r2:HEAD'])
+
+
+#----------------------------------------------------------------------
+
+@Issue(4490)
+def load_prop_change_in_non_deltas_dump(sbox):
+ "load: prop change in non-deltas dump"
+ # 'svnrdump load' crashed when processing a node record with a non-delta
+ # properties block if the node previously had any svn:* properties.
+
+ sbox.build()
+ sbox.simple_propset('svn:eol-style', 'native', 'iota', 'A/mu', 'A/B/lambda')
+ sbox.simple_commit()
+
+ # Any prop change on a node that had an svn:* prop triggered the crash,
+ # so test an svn:* prop deletion and also some other prop changes.
+ sbox.simple_propdel('svn:eol-style', 'iota')
+ sbox.simple_propset('svn:eol-style', 'LF', 'A/mu')
+ sbox.simple_propset('p1', 'v1', 'A/B/lambda')
+ sbox.simple_commit()
+
+ # Create a non-deltas dump. Use 'svnadmin', as svnrdump doesn't have that
+ # option.
+ dump = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Try to load that dump.
+ sbox.build(create_wc=False, empty=True)
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dump,
+ [], [], 0,
+ '-q', 'load', sbox.repo_url)
+
+#----------------------------------------------------------------------
+
+@Issue(4476)
+def dump_mergeinfo_contains_r0(sbox):
+ "dump: mergeinfo that contains r0"
+ ### We pass the original dump file name as 'expected_dumpfile_name' because
+ ### run_dump_test is currently broken when we don't.
+ run_dump_test(sbox, "mergeinfo-contains-r0.dump",
+ bypass_prop_validation=True)
+
+#----------------------------------------------------------------------
+
+@XFail()
+@Issue(4476)
+def load_mergeinfo_contains_r0(sbox):
+ "load: mergeinfo that contains r0"
+ run_load_test(sbox, "mergeinfo-contains-r0.dump",
+ expected_dumpfile_name="mergeinfo-contains-r0.expected.dump")
+
+#----------------------------------------------------------------------
+
+# Regression test for issue 4551 "svnrdump load commits wrong properties,
+# or fails, on a non-deltas dumpfile". In this test, the copy source does
+# not exist and the failure mode is to error out.
+@Issue(4551)
+def load_non_deltas_copy_with_props(sbox):
+ "load non-deltas copy with props"
+ sbox.build()
+
+ # Case (1): Copies that do not replace anything: the copy target path
+ # at (new rev - 1) does not exist
+
+ # Set properties on each node to be copied
+ sbox.simple_propset('p', 'v', 'A/mu', 'A/B', 'A/B/E')
+ sbox.simple_propset('q', 'v', 'A/mu', 'A/B', 'A/B/E')
+ sbox.simple_commit()
+ sbox.simple_update() # avoid mixed-rev
+
+ # Do the copies
+ sbox.simple_copy('A/mu@2', 'A/mu_COPY')
+ sbox.simple_copy('A/B@2', 'A/B_COPY')
+ # Also add new nodes inside the copied dir, to test more code paths
+ sbox.simple_copy('A/B/E@2', 'A/B_COPY/copied')
+ sbox.simple_mkdir('A/B_COPY/added')
+ sbox.simple_copy('A/B/E@2', 'A/B_COPY/added/copied')
+ # On each copied node, delete a prop
+ sbox.simple_propdel('p', 'A/mu_COPY', 'A/B_COPY', 'A/B_COPY/E',
+ 'A/B_COPY/copied', 'A/B_COPY/added/copied')
+
+ sbox.simple_commit()
+
+ # Dump with 'svnadmin' (non-deltas mode)
+ dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Load with 'svnrdump'. This used to throw an error:
+ # svnrdump: E160013: File not found: revision 2, path '/A/B_COPY'
+ new_repo_dir, new_repo_url = sbox.add_repo_path('new_repo')
+ svntest.main.create_repos(new_repo_dir)
+ svntest.actions.enable_revprop_changes(new_repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', new_repo_url)
+
+ # Check that property 'p' really was deleted on each copied node
+ for tgt_path in ['A/mu_COPY', 'A/B_COPY', 'A/B_COPY/E',
+ 'A/B_COPY/copied', 'A/B_COPY/added/copied']:
+ tgt_url = new_repo_url + '/' + tgt_path
+ _, out, _ = svntest.main.run_svn(None, 'proplist', tgt_url)
+ expected = ["Properties on '%s':" % (tgt_url,),
+ 'q']
+ actual = map(str.strip, out)
+ svntest.verify.compare_and_display_lines(None, 'PROPS', expected, actual)
+
+# Regression test for issue 4551 "svnrdump load commits wrong properties,
+# or fails, on a non-deltas dumpfile". In this test, the copy source does
+# exist and the failure mode is to fail to delete a property.
+@Issue(4551)
+def load_non_deltas_replace_copy_with_props(sbox):
+ "load non-deltas replace&copy with props"
+ sbox.build()
+
+ # Case (2): Copies that replace something: the copy target path
+ # at (new rev - 1) exists and has no property named 'p'
+
+ # Set props on the copy source nodes (a file, a dir, a child of the dir)
+ sbox.simple_propset('p', 'v', 'A/mu', 'A/B', 'A/B/E')
+ sbox.simple_propset('q', 'v', 'A/mu', 'A/B', 'A/B/E')
+ sbox.simple_commit()
+ sbox.simple_update() # avoid mixed-rev
+
+ # Do the copies, replacing something
+ sbox.simple_rm('A/D/gamma', 'A/C')
+ sbox.simple_copy('A/mu@2', 'A/D/gamma')
+ sbox.simple_copy('A/B@2', 'A/C')
+ # On the copy, delete a prop that wasn't present on the node that it replaced
+ sbox.simple_propdel('p', 'A/D/gamma', 'A/C', 'A/C/E')
+
+ sbox.simple_commit()
+
+ # Dump with 'svnadmin' (non-deltas mode)
+ dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Load with 'svnrdump'
+ new_repo_dir, new_repo_url = sbox.add_repo_path('new_repo')
+ svntest.main.create_repos(new_repo_dir)
+ svntest.actions.enable_revprop_changes(new_repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', new_repo_url)
+
+ # Check that property 'p' really was deleted on each copied node
+ # This used to fail, finding that property 'p' was still present
+ for tgt_path in ['A/D/gamma', 'A/C', 'A/C/E']:
+ tgt_url = new_repo_url + '/' + tgt_path
+ _, out, _ = svntest.main.run_svn(None, 'proplist', tgt_url)
+ expected = ["Properties on '%s':" % (tgt_url,),
+ 'q']
+ actual = map(str.strip, out)
+ svntest.verify.compare_and_display_lines(None, 'PROPS', expected, actual)
+
+# Regression test for issue #4552 "svnrdump writes duplicate headers for a
+# replace-with-copy". 'svnrdump dump' wrote the Node-path and Node-kind
+# headers twice for the 'delete' record of a replace-with-copy.
+@Issue(4552)
+def dump_replace_with_copy(sbox):
+ "dump replace with copy"
+ sbox.build()
+
+ # Copy file/dir, replacing something
+ sbox.simple_rm('A/D/gamma', 'A/C')
+ sbox.simple_copy('A/mu@1', 'A/D/gamma')
+ sbox.simple_copy('A/B@1', 'A/C')
+ sbox.simple_commit()
+
+ # Dump with 'svnrdump'
+ dumpfile = svntest.actions.run_and_verify_svnrdump(
+ None, svntest.verify.AnyOutput, [], 0,
+ 'dump', '--quiet', '--incremental', '-r2',
+ sbox.repo_url)
+
+ # Check the 'delete' record headers: expect this parse to fail if headers
+ # are duplicated
+ svntest.verify.DumpParser(dumpfile).parse()
+
+# Regression test for issue 4551 "svnrdump load commits wrong properties,
+# or fails, on a non-deltas dumpfile". In this test, a node's props are
+# modified, and the failure mode is that RA-serf would end up deleting
+# properties that should remain on the node.
+@Issue(4551)
+def load_non_deltas_with_props(sbox):
+ "load non-deltas with props"
+ sbox.build()
+
+ # Case (3): A node's props are modified, and at least one of its previous
+ # props remains after the modification. svnrdump made two prop mod method
+ # calls for the same property (delete, then set). RA-serf's commit editor
+ # didn't expect this and performed the deletes after the non-deletes, and
+ # so ended up deleting a property that should not be deleted.
+
+ # Set properties on each node to be modified
+ sbox.simple_propset('p', 'v', 'A/mu')
+ sbox.simple_propset('q', 'v', 'A/mu', 'A/B')
+ sbox.simple_commit()
+
+ # Do the modifications: a different kind of mod on each node
+ sbox.simple_propdel('p', 'A/mu')
+ sbox.simple_propset('q', 'v2', 'A/B')
+ sbox.simple_commit()
+
+ # Dump with 'svnadmin' (non-deltas mode)
+ dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False)
+
+ # Load with 'svnrdump'
+ new_repo_dir, new_repo_url = sbox.add_repo_path('new_repo')
+ svntest.main.create_repos(new_repo_dir)
+ svntest.actions.enable_revprop_changes(new_repo_dir)
+ svntest.actions.run_and_verify_svnrdump(dumpfile,
+ svntest.verify.AnyOutput,
+ [], 0, 'load', new_repo_url)
+
+ # Check that property 'q' remains on each modified node
+ for tgt_path in ['A/mu', 'A/B']:
+ tgt_url = new_repo_url + '/' + tgt_path
+ _, out, _ = svntest.main.run_svn(None, 'proplist', tgt_url)
+ expected = ["Properties on '%s':" % (tgt_url,),
+ 'q']
+ actual = map(str.strip, out)
+ svntest.verify.compare_and_display_lines(None, 'PROPS', expected, actual)
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_dump,
+ revision_0_dump,
+ revision_0_load,
+ skeleton_dump,
+ skeleton_load,
+ sparse_propchanges_dump,
+ sparse_propchanges_load,
+ copy_and_modify_dump,
+ copy_and_modify_load,
+ copy_from_previous_version_and_modify_dump,
+ copy_from_previous_version_and_modify_load,
+ modified_in_place_dump,
+ modified_in_place_load,
+ tag_empty_trunk_dump,
+ tag_empty_trunk_load,
+ tag_trunk_with_file_dump,
+ tag_trunk_with_file_load,
+ tag_trunk_with_file2_dump,
+ tag_trunk_with_file2_load,
+ dir_prop_change_dump,
+ dir_prop_change_load,
+ copy_parent_modify_prop_dump,
+ copy_parent_modify_prop_load,
+ url_encoding_dump,
+ url_encoding_load,
+ copy_revprops_dump,
+ copy_revprops_load,
+ only_trunk_dump,
+ only_trunk_A_with_changes_dump,
+ no_author_dump,
+ no_author_load,
+ move_and_modify_in_the_same_revision_dump,
+ move_and_modify_in_the_same_revision_load,
+ copy_bad_line_endings_dump,
+ copy_bad_line_endings_load,
+ copy_bad_line_endings2_dump,
+ commit_a_copy_of_root_dump,
+ commit_a_copy_of_root_load,
+ descend_into_replace_dump,
+ descend_into_replace_load,
+ add_multi_prop_dump,
+ multi_prop_edit_load,
+ reflect_dropped_renumbered_revs,
+ dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads,
+ svnrdump_load_partial_incremental_dump,
+ range_dump,
+ only_trunk_range_dump,
+ only_trunk_A_range_dump,
+ load_prop_change_in_non_deltas_dump,
+ dump_mergeinfo_contains_r0,
+ load_mergeinfo_contains_r0,
+ load_non_deltas_copy_with_props,
+ load_non_deltas_replace_copy_with_props,
+ dump_replace_with_copy,
+ load_non_deltas_with_props,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/add-multi-prop.dump b/subversion/tests/cmdline/svnrdump_tests_data/add-multi-prop.dump
new file mode 100644
index 0000000..52fcf26
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/add-multi-prop.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-and-modify.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-and-modify.dump
new file mode 100644
index 0000000..4907499
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-and-modify.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.dump
new file mode 100644
index 0000000..effd8a7
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.dump
@@ -0,0 +1,49 @@
+SVN-fs-dump-format-version: 3
+
+UUID: 0e893bae-2d4c-452c-8616-b774d5b9104e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-03-27T19:25:43.035921Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 151
+Content-length: 151
+
+K 10
+svn:author
+V 7
+smithma
+K 8
+svn:date
+V 27
+2009-03-27T19:33:51.178381Z
+K 7
+svn:log
+V 49
+added svn:ignore using CRLF to terminate lines
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-delta: true
+Prop-content-length: 55
+Content-length: 55
+
+K 10
+svn:ignore
+V 23
+target
+target-eclipse
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.expected.dump
new file mode 100644
index 0000000..37031dc
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings.expected.dump
@@ -0,0 +1,50 @@
+SVN-fs-dump-format-version: 3
+
+UUID: 0e893bae-2d4c-452c-8616-b774d5b9104e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-03-27T19:25:43.035921Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 150
+Content-length: 150
+
+K 10
+svn:author
+V 7
+smithma
+K 8
+svn:date
+V 27
+2009-03-27T19:33:51.178381Z
+K 7
+svn:log
+V 48
+added svn:ignore using CRLF to
+ terminate lines
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-delta: true
+Prop-content-length: 54
+Content-length: 54
+
+K 10
+svn:ignore
+V 22
+target
+target-eclipse
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.dump
new file mode 100644
index 0000000..fbc5df8
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.dump
@@ -0,0 +1,141 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 1676c7c8-1042-47ba-811d-d6f56ff7be54
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-09-14T00:20:38.518085Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 150
+Content-length: 150
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T00:22:19.558161Z
+K 7
+svn:log
+V 46
+This log message
+contains CRLF
+line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:04:52.173534Z
+K 7
+svn:log
+V 42
+This log message contains CR line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Text-content-length: 5
+Text-content-md5: d8e8fca2dc0f896fd7cb4cb0031ba249
+Text-content-sha1: 4e1243bd22c66e76c2ba9eddc1f91394e57f9f83
+Content-length: 5
+
+test
+
+
+Revision-number: 3
+Prop-content-length: 151
+Content-length: 151
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:16:41.718227Z
+K 7
+svn:log
+V 47
+Add `svn:ignore` using CRLF to terminate lines.
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 49
+Content-length: 49
+
+K 10
+svn:ignore
+V 17
+blah.o
+blah2.o
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 153
+Content-length: 153
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:17:35.488565Z
+K 7
+svn:log
+V 49
+Add `x:related-to` using CRLF to terminate lines.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Prop-content-length: 51
+Content-length: 51
+
+K 12
+x:related-to
+V 17
+blah.o
+blah2.o
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.expected.dump
new file mode 100644
index 0000000..feb1842
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-bad-line-endings2.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-from-previous-version-and-modify.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-from-previous-version-and-modify.dump
new file mode 100644
index 0000000..43f8295
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-from-previous-version-and-modify.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/copy-parent-modify-prop.dump b/subversion/tests/cmdline/svnrdump_tests_data/copy-parent-modify-prop.dump
new file mode 100644
index 0000000..2fc4c19
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/copy-parent-modify-prop.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.dump b/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.dump
new file mode 100644
index 0000000..ca8d8a0
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.expected.dump
new file mode 100644
index 0000000..d1e7b98
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/descend-into-replace.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/dir-prop-change.dump b/subversion/tests/cmdline/svnrdump_tests_data/dir-prop-change.dump
new file mode 100644
index 0000000..ce2cbf5
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/dir-prop-change.dump
@@ -0,0 +1,48 @@
+SVN-fs-dump-format-version: 3
+
+UUID: b760f8ea-7608-0410-b9cc-88a00389c87b
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-12-21T23:12:28.152181Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-12-21T23:12:58.931794Z
+K 7
+svn:log
+V 5
+blah
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-delta: true
+Prop-content-length: 37
+Content-length: 37
+
+K 10
+svn:ignore
+V 6
+stuff
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump
new file mode 100644
index 0000000..2b7b50e
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump
new file mode 100644
index 0000000..6fdbf1c
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo_included_full.dump b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo_included_full.dump
new file mode 100644
index 0000000..7ad89e6
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo_included_full.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/modified-in-place.dump b/subversion/tests/cmdline/svnrdump_tests_data/modified-in-place.dump
new file mode 100644
index 0000000..1358d70
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/modified-in-place.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/move-and-modify.dump b/subversion/tests/cmdline/svnrdump_tests_data/move-and-modify.dump
new file mode 100644
index 0000000..5364bf2
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/move-and-modify.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/multi-prop-edits.dump b/subversion/tests/cmdline/svnrdump_tests_data/multi-prop-edits.dump
new file mode 100644
index 0000000..4cb3904
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/multi-prop-edits.dump
@@ -0,0 +1,298 @@
+SVN-fs-dump-format-version: 2
+
+UUID: cafefeed-babe-face-dead-beeff00dfade
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-06-07T15:16:48.947371Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 129
+Content-length: 129
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-06-07T15:16:49.020461Z
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 103
+Content-length: 103
+
+K 10
+svn:author
+V 5
+Hyrum
+K 8
+svn:date
+V 27
+2011-03-25T15:13:51.205960Z
+K 7
+svn:log
+V 4
+msg
+
+PROPS-END
+
+Node-path: iota
+Node-kind: file
+Node-action: change
+Prop-content-length: 44
+Content-length: 44
+
+K 3
+bar
+V 4
+Foo
+
+K 3
+foo
+V 4
+Bar
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/no-author.dump b/subversion/tests/cmdline/svnrdump_tests_data/no-author.dump
new file mode 100644
index 0000000..c84a2d3
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/no-author.dump
@@ -0,0 +1,38 @@
+SVN-fs-dump-format-version: 3
+
+UUID: 55b1b577-cf20-0410-8f4d-bb837066729e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-10-27T18:55:45.622477Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 77
+Content-length: 77
+
+K 8
+svn:date
+V 27
+2006-10-27T18:55:49.840787Z
+K 7
+svn:log
+V 4
+foo
+
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/partial_incremental.dump b/subversion/tests/cmdline/svnrdump_tests_data/partial_incremental.dump
new file mode 100644
index 0000000..2c296f8
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/partial_incremental.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/repo-with-copy-of-root-dir.dump b/subversion/tests/cmdline/svnrdump_tests_data/repo-with-copy-of-root-dir.dump
new file mode 100644
index 0000000..edbe1fb
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/repo-with-copy-of-root-dir.dump
@@ -0,0 +1,39 @@
+SVN-fs-dump-format-version: 3
+
+UUID: 7c1d1c84-647b-4ca5-9d20-057768670fee
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-06-29T12:22:48.566680Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 104
+Content-length: 104
+
+K 10
+svn:author
+V 6
+kamesh
+K 8
+svn:date
+V 27
+2009-06-29T12:23:55.134926Z
+K 7
+svn:log
+V 4
+wwww
+PROPS-END
+
+Node-path: full
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 0
+Node-copyfrom-path:
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/revision-0.dump b/subversion/tests/cmdline/svnrdump_tests_data/revision-0.dump
new file mode 100644
index 0000000..8f9ed01
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/revision-0.dump
@@ -0,0 +1,30 @@
+SVN-fs-dump-format-version: 3
+
+UUID: 95f5730d-843b-4fe3-8879-f46da52f2123
+
+Revision-number: 0
+Prop-content-length: 258
+Content-length: 258
+
+K 8
+svn:date
+V 27
+2003-01-08T10:33:40.549533Z
+K 26
+svn:sync-currently-copying
+V 2
+15
+K 17
+svn:sync-from-url
+V 31
+http://svn.apache.org/repos/asf
+K 18
+svn:sync-from-uuid
+V 36
+13f79535-47bb-0310-9956-ffa450edef68
+K 24
+svn:sync-last-merged-rev
+V 2
+14
+PROPS-END
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/revprops.dump b/subversion/tests/cmdline/svnrdump_tests_data/revprops.dump
new file mode 100644
index 0000000..6ae87c0
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/revprops.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/root-range.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/root-range.expected.dump
new file mode 100644
index 0000000..20e9e81
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/root-range.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/skeleton.dump b/subversion/tests/cmdline/svnrdump_tests_data/skeleton.dump
new file mode 100644
index 0000000..08e8aa3
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/skeleton.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/sparse-propchanges.dump b/subversion/tests/cmdline/svnrdump_tests_data/sparse-propchanges.dump
new file mode 100644
index 0000000..d3d4357
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/sparse-propchanges.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/tag-empty-trunk.dump b/subversion/tests/cmdline/svnrdump_tests_data/tag-empty-trunk.dump
new file mode 100644
index 0000000..fc9a780
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/tag-empty-trunk.dump
@@ -0,0 +1,75 @@
+SVN-fs-dump-format-version: 3
+
+UUID: c38a9451-c504-0410-b613-a343b2d5fd66
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-04T23:03:50.600647Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-04T23:04:15.060966Z
+K 7
+svn:log
+V 5
+setup
+PROPS-END
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 106
+Content-length: 106
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-04T23:04:32.222710Z
+K 7
+svn:log
+V 6
+tag it
+PROPS-END
+
+Node-path: tags/an-empty-tag
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: trunk
+
+
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file.dump b/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file.dump
new file mode 100644
index 0000000..9bdb921
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file2.dump b/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file2.dump
new file mode 100644
index 0000000..0721b96
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/tag-trunk-with-file2.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.dump
new file mode 100644
index 0000000..794e0b8
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.expected.dump
new file mode 100644
index 0000000..c95fdcc
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-changes.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-range.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-range.expected.dump
new file mode 100644
index 0000000..70a96c5
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-A-range.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-only-range.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only-range.expected.dump
new file mode 100644
index 0000000..9ec633f
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only-range.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.dump
new file mode 100644
index 0000000..55bdae1
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.expected.dump
new file mode 100644
index 0000000..689c780
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/trunk-only.expected.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/url-encoding-bug.dump b/subversion/tests/cmdline/svnrdump_tests_data/url-encoding-bug.dump
new file mode 100644
index 0000000..8d5cf89
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/url-encoding-bug.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnrdump_tests_data/with_merges.dump b/subversion/tests/cmdline/svnrdump_tests_data/with_merges.dump
new file mode 100644
index 0000000..7b68e71
--- /dev/null
+++ b/subversion/tests/cmdline/svnrdump_tests_data/with_merges.dump
Binary files differ
diff --git a/subversion/tests/cmdline/svnserveautocheck.sh b/subversion/tests/cmdline/svnserveautocheck.sh
new file mode 100755
index 0000000..4fb0b1b
--- /dev/null
+++ b/subversion/tests/cmdline/svnserveautocheck.sh
@@ -0,0 +1,159 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# -*- mode: shell-script; -*-
+
+# This script simplifies the preparation of the environment for a Subversion
+# client communicating with an svnserve server.
+#
+# The script runs svnserve, runs "make check", and kills the svnserve
+# afterwards. It makes sure to kill the svnserve even if the test run dies.
+#
+# This script should be run from the top level of the Subversion
+# distribution; it's easiest to just run it as "make svnserveautocheck".
+# Like "make check", you can specify further options like
+# "make svnserveautocheck FS_TYPE=bdb TESTS=subversion/tests/cmdline/basic.py".
+#
+# Other environment variables that can be passed:
+#
+# make svnserveautocheck CACHE_REVPROPS=1 # run svnserve --cache-revprops
+#
+# make svnserveautocheck BLOCK_READ=1 # run svnserve --block-read on
+#
+# make svnserveautocheck THREADED=1 # run svnserve -T
+
+PYTHON=${PYTHON:-python}
+
+SCRIPTDIR=$(dirname $0)
+SCRIPT=$(basename $0)
+
+set +e
+
+trap trap_cleanup HUP TERM INT
+
+# Ensure the server uses a known locale.
+LC_ALL=C
+export LC_ALL
+
+really_cleanup() {
+ if [ -e "$SVNSERVE_PID" ]; then
+ kill $(cat "$SVNSERVE_PID")
+ rm -f $SVNSERVE_PID
+ fi
+}
+
+trap_cleanup() {
+ really_cleanup
+ exit 1
+}
+
+say() {
+ echo "$SCRIPT: $*"
+}
+
+fail() {
+ say $*
+ exit 1
+}
+
+# Compute ABS_BUILDDIR and ABS_SRCDIR.
+if [ -x subversion/svn/svn ]; then
+ # cwd is build tree root
+ ABS_BUILDDIR=$(pwd)
+elif [ -x $SCRIPTDIR/../../svn/svn ]; then
+ # cwd is subversion/tests/cmdline/ in the build tree
+ cd $SCRIPTDIR/../../../
+ ABS_BUILDDIR=$(pwd)
+ cd - >/dev/null
+else
+ fail "Run this script from the root of Subversion's build tree!"
+fi
+# Cater for out-of-tree builds
+ABS_SRCDIR=`<$ABS_BUILDDIR/Makefile sed -ne 's/^srcdir = //p'`
+if [ ! -e $ABS_SRCDIR/subversion/include/svn_version.h ]; then
+ fail "Run this script from the root of Subversion's build tree!"
+fi
+
+# If you change this, also make sure to change the svn:ignore entry
+# for it and "make check-clean".
+SVNSERVE_PID=$ABS_BUILDDIR/subversion/tests/svnserveautocheck.pid
+
+SERVER_CMD="$ABS_BUILDDIR/subversion/svnserve/svnserve"
+
+rm -f $SVNSERVE_PID
+
+random_port() {
+ if [ -n "$BASH_VERSION" ]; then
+ echo $(($RANDOM+1024))
+ else
+ $PYTHON -c 'import random; print random.randint(1024, 2**16-1)'
+ fi
+}
+
+if type time > /dev/null ; then TIME_CMD() { time "$@"; } ; else TIME_CMD() { "$@"; } ; fi
+
+MAKE=${MAKE:-make}
+PATH="$PATH:/usr/sbin/:/usr/local/sbin/"
+
+ss > /dev/null 2>&1 || netstat > /dev/null 2>&1 || fail "unable to find ss or netstat required to find a free port"
+
+SVNSERVE_PORT=$(random_port)
+while \
+ (ss -ltn sport = :$SVNSERVE_PORT 2>&1 | grep :$SVNSERVE_PORT > /dev/null ) \
+ || \
+ (netstat -an 2>&1 | grep $SVNSERVE_PORT | grep 'LISTEN' > /dev/null ) \
+ do
+ SVNSERVE_PORT=$(random_port)
+done
+
+if [ "$THREADED" != "" ]; then
+ SVNSERVE_ARGS="-T"
+fi
+
+if [ ${CACHE_REVPROPS:+set} ]; then
+ SVNSERVE_ARGS="$SVNSERVE_ARGS --cache-revprops on"
+fi
+
+if [ ${BLOCK_READ:+set} ]; then
+ SVNSERVE_ARGS="$SVNSERVE_ARGS --block-read on"
+fi
+
+"$SERVER_CMD" -d -r "$ABS_BUILDDIR/subversion/tests/cmdline" \
+ --listen-host 127.0.0.1 \
+ --listen-port $SVNSERVE_PORT \
+ --pid-file $SVNSERVE_PID \
+ $SVNSERVE_ARGS &
+
+BASE_URL=svn://127.0.0.1:$SVNSERVE_PORT
+if [ $# = 0 ]; then
+ TIME_CMD "$MAKE" check "BASE_URL=$BASE_URL"
+ r=$?
+else
+ cd "$ABS_BUILDDIR/subversion/tests/cmdline/"
+ TEST="$1"
+ shift
+ TIME_CMD "$ABS_SRCDIR/subversion/tests/cmdline/${TEST}_tests.py" "--url=$BASE_URL" $*
+ r=$?
+ cd - > /dev/null
+fi
+
+really_cleanup
+exit $r
diff --git a/subversion/tests/cmdline/svnsync_authz_tests.py b/subversion/tests/cmdline/svnsync_authz_tests.py
new file mode 100755
index 0000000..e8b9444
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_authz_tests.py
@@ -0,0 +1,504 @@
+#!/usr/bin/env python
+#
+# svnsync_authz_tests.py: Tests SVNSync's repository mirroring
+# capabilities that need to be run serially
+# (mainly authz).
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, os
+
+# Test suite-specific modules
+import locale, re
+
+# Our testing module
+import svntest
+from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr
+from svntest.verify import SVNExpectedStderr
+from svntest.main import write_restrictive_svnserve_conf
+from svntest.main import write_authz_file
+from svntest.main import server_has_partial_replay
+
+# Shared helpers
+from svnsync_tests import run_init, run_sync, run_test
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+#----------------------------------------------------------------------
+@Skip(svntest.main.is_ra_type_file)
+def basic_authz(sbox):
+ "verify that unreadable content is not synced"
+
+ sbox.build(create_wc = False)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+
+ src_authz = sbox.authz_name()
+ dst_authz = dest_sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A/B': '* =',
+ dst_authz + ':/': '* = rw',
+ })
+
+ run_sync(dest_sbox.repo_url)
+
+ lambda_url = dest_sbox.repo_url + '/A/B/lambda'
+ iota_url = dest_sbox.repo_url + '/iota'
+
+ # this file should have been blocked by authz
+ svntest.actions.run_and_verify_svn([], svntest.verify.AnyOutput,
+ 'cat',
+ lambda_url)
+ # this file should have been synced
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'cat',
+ iota_url)
+
+#----------------------------------------------------------------------
+@Skip(svntest.main.is_ra_type_file)
+def copy_from_unreadable_dir(sbox):
+ "verify that copies from unreadable dirs work"
+
+ sbox.build()
+
+ B_url = sbox.repo_url + '/A/B'
+ P_url = sbox.repo_url + '/A/P'
+
+ # Set a property on the directory we're going to copy, and a file in it, to
+ # confirm that they're transmitted when we later sync the copied directory
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'pset',
+ 'foo',
+ 'bar',
+ sbox.wc_dir + '/A/B/lambda')
+
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'pset',
+ 'baz',
+ 'zot',
+ sbox.wc_dir + '/A/B')
+
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'ci',
+ sbox.wc_dir + '/A/B',
+ '-m', 'log_msg')
+
+ # Now copy that directory so we'll see it in our synced copy
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'cp',
+ B_url,
+ P_url,
+ '-m', 'Copy B to P')
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ src_authz = sbox.authz_name()
+ dst_authz = dest_sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A/B': '* =',
+ dst_authz + ':/': '* = rw',
+ })
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+
+ run_sync(dest_sbox.repo_url)
+
+ expected_out = [
+ 'Changed paths:\n',
+ ' A /A/P\n',
+ ' A /A/P/E\n',
+ ' A /A/P/E/alpha\n',
+ ' A /A/P/E/beta\n',
+ ' A /A/P/F\n',
+ ' A /A/P/lambda\n',
+ '\n',
+ '\n', # log message is stripped
+ ]
+
+ exit_code, out, err = svntest.main.run_svn(None,
+ 'log',
+ '-r', '3',
+ '-v',
+ dest_sbox.repo_url)
+
+ if err:
+ raise SVNUnexpectedStderr(err)
+
+ svntest.verify.compare_and_display_lines(None,
+ 'LOG',
+ expected_out,
+ out[2:11])
+
+ svntest.actions.run_and_verify_svn(['bar\n'],
+ [],
+ 'pget',
+ 'foo',
+ dest_sbox.repo_url + '/A/P/lambda')
+
+ svntest.actions.run_and_verify_svn(['zot\n'],
+ [],
+ 'pget',
+ 'baz',
+ dest_sbox.repo_url + '/A/P')
+
+# Issue 2705.
+@Issue(2705)
+@Skip(svntest.main.is_ra_type_file)
+def copy_with_mod_from_unreadable_dir(sbox):
+ "verify copies with mods from unreadable dirs"
+
+ sbox.build()
+
+ # Make a copy of the B directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'cp',
+ sbox.wc_dir + '/A/B',
+ sbox.wc_dir + '/A/P')
+
+ # Set a property inside the copied directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'pset',
+ 'foo',
+ 'bar',
+ sbox.wc_dir + '/A/P/lambda')
+
+ # Add a new directory and file inside the copied directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'mkdir',
+ sbox.wc_dir + '/A/P/NEW-DIR')
+
+ svntest.main.file_append(sbox.wc_dir + '/A/P/E/new-file', "bla bla")
+ svntest.main.run_svn(None, 'add', sbox.wc_dir + '/A/P/E/new-file')
+
+ # Delete a file inside the copied directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'rm',
+ sbox.wc_dir + '/A/P/E/beta')
+
+ # Commit the copy-with-modification.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'ci',
+ sbox.wc_dir,
+ '-m', 'log_msg')
+
+ # Lock down the source repository.
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ src_authz = sbox.authz_name()
+ dst_authz = dest_sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A/B': '* =',
+ dst_authz + ':/': '* = rw',
+ })
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+
+ run_sync(dest_sbox.repo_url)
+
+ expected_out = [
+ 'Changed paths:\n',
+ ' A /A/P\n',
+ ' A /A/P/E\n',
+ ' A /A/P/E/alpha\n',
+ ' A /A/P/E/new-file\n',
+ ' A /A/P/F\n',
+ ' A /A/P/NEW-DIR\n',
+ ' A /A/P/lambda\n',
+ '\n',
+ '\n', # log message is stripped
+ ]
+
+ exit_code, out, err = svntest.main.run_svn(None,
+ 'log',
+ '-r', '2',
+ '-v',
+ dest_sbox.repo_url)
+
+ if err:
+ raise SVNUnexpectedStderr(err)
+
+ svntest.verify.compare_and_display_lines(None,
+ 'LOG',
+ expected_out,
+ out[2:12])
+
+ svntest.actions.run_and_verify_svn(['bar\n'],
+ [],
+ 'pget',
+ 'foo',
+ dest_sbox.repo_url + '/A/P/lambda')
+
+# Issue 2705.
+@Issue(2705)
+@Skip(svntest.main.is_ra_type_file)
+def copy_with_mod_from_unreadable_dir_and_copy(sbox):
+ "verify copies with mods from unreadable dirs +copy"
+
+ sbox.build()
+
+ # Make a copy of the B directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'cp',
+ sbox.wc_dir + '/A/B',
+ sbox.wc_dir + '/A/P')
+
+
+ # Copy a (readable) file into the copied directory.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'cp',
+ sbox.wc_dir + '/A/D/gamma',
+ sbox.wc_dir + '/A/P/E')
+
+
+ # Commit the copy-with-modification.
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'ci',
+ sbox.wc_dir,
+ '-m', 'log_msg')
+
+ # Lock down the source repository.
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ src_authz = sbox.authz_name()
+ dst_authz = dest_sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A/B': '* =',
+ dst_authz + ':/': '* = rw',
+ })
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+
+ run_sync(dest_sbox.repo_url)
+
+ expected_out = [
+ 'Changed paths:\n',
+ ' A /A/P\n',
+ ' A /A/P/E\n',
+ ' A /A/P/E/alpha\n',
+ ' A /A/P/E/beta\n',
+ ' A /A/P/E/gamma (from /A/D/gamma:1)\n',
+ ' A /A/P/F\n',
+ ' A /A/P/lambda\n',
+ '\n',
+ '\n', # log message is stripped
+ ]
+
+ exit_code, out, err = svntest.main.run_svn(None,
+ 'log',
+ '-r', '2',
+ '-v',
+ dest_sbox.repo_url)
+
+ if err:
+ raise SVNUnexpectedStderr(err)
+
+ svntest.verify.compare_and_display_lines(None,
+ 'LOG',
+ expected_out,
+ out[2:12])
+
+def identity_copy(sbox):
+ "copy UTF-8 svn:* props identically"
+
+ sbox.build(create_wc = False)
+
+ orig_lc_all = locale.setlocale(locale.LC_ALL)
+ other_locales = [ "English.1252", "German.1252", "French.1252",
+ "en_US.ISO-8859-1", "en_GB.ISO-8859-1", "de_DE.ISO-8859-1",
+ "en_US.ISO8859-1", "en_GB.ISO8859-1", "de_DE.ISO8859-1" ]
+ for other_locale in other_locales:
+ try:
+ locale.setlocale(locale.LC_ALL, other_locale)
+ break
+ except:
+ pass
+ if locale.setlocale(locale.LC_ALL) != other_locale:
+ raise svntest.Skip('Setting test locale failed')
+
+ try:
+ run_test(sbox, "copy-bad-encoding.expected.dump",
+ exp_dump_file_name="copy-bad-encoding.expected.dump",
+ bypass_prop_validation=True)
+ finally:
+ locale.setlocale(locale.LC_ALL, orig_lc_all)
+
+@Skip(svntest.main.is_ra_type_file)
+def specific_deny_authz(sbox):
+ "verify if specifically denied paths dont sync"
+
+ sbox.build()
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+
+ svntest.main.run_svn(None, "cp",
+ os.path.join(sbox.wc_dir, "A"),
+ os.path.join(sbox.wc_dir, "A_COPY")
+ )
+ svntest.main.run_svn(None, "ci", "-mm", sbox.wc_dir)
+
+ write_restrictive_svnserve_conf(sbox.repo_dir)
+
+ # For mod_dav_svn's parent path setup we need per-repos permissions in
+ # the authz file...
+ if sbox.repo_url.startswith('http'):
+ src_authz = sbox.authz_name()
+ dst_authz = dest_sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A': '* =',
+ src_authz + ':/A_COPY/B/lambda': '* =',
+ dst_authz + ':/': '* = rw',
+ })
+ # Otherwise we can just go with the permissions needed for the source
+ # repository.
+ else:
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ '/': '* = r',
+ '/A': '* =',
+ '/A_COPY/B/lambda': '* =',
+ })
+
+ run_sync(dest_sbox.repo_url)
+
+ lambda_url = dest_sbox.repo_url + '/A_COPY/B/lambda'
+
+ # this file should have been blocked by authz
+ svntest.actions.run_and_verify_svn([], svntest.verify.AnyOutput,
+ 'cat',
+ lambda_url)
+
+@Issue(4121)
+@Skip(svntest.main.is_ra_type_file)
+def copy_delete_unreadable_child(sbox):
+ "copy, then rm at-src-unreadable child"
+
+ # Prepare the source: Greek tree (r1), cp+rm (r2).
+ sbox.build(create_wc = False)
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-m', 'r2',
+ '-U', sbox.repo_url,
+ 'cp', 'HEAD', '/', 'branch',
+ 'rm', 'branch/A')
+
+ # Create the destination.
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ # Lock down the source.
+ write_restrictive_svnserve_conf(sbox.repo_dir, anon_access='read')
+ src_authz = sbox.authz_name()
+ write_authz_file(sbox, None,
+ prefixed_rules = {
+ src_authz + ':/': '* = r',
+ src_authz + ':/A': '* =',
+ })
+
+ dest_url = dest_sbox.file_protocol_repo_url()
+ run_init(dest_url, sbox.repo_url)
+ run_sync(dest_url)
+
+ # sanity check
+ svntest.actions.run_and_verify_svn(["iota\n"], [],
+ 'ls', dest_url+'/branch@2')
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_authz,
+ copy_from_unreadable_dir,
+ copy_with_mod_from_unreadable_dir,
+ copy_with_mod_from_unreadable_dir_and_copy,
+ identity_copy,
+ specific_deny_authz,
+ copy_delete_unreadable_child,
+ ]
+serial_only = True
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list, serial_only = serial_only)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnsync_tests.py b/subversion/tests/cmdline/svnsync_tests.py
new file mode 100755
index 0000000..ba55fb5
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests.py
@@ -0,0 +1,635 @@
+#!/usr/bin/env python
+#
+# svnsync_tests.py: Tests SVNSync's repository mirroring capabilities.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, os
+
+# Test suite-specific modules
+import re
+
+# Our testing module
+import svntest
+from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr
+from svntest.verify import SVNExpectedStderr
+from svntest.verify import AnyOutput
+from svntest.main import server_has_partial_replay
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+######################################################################
+# Helper routines
+
+
+def run_sync(url, source_url=None,
+ source_prop_encoding=None,
+ expected_output=AnyOutput, expected_error=[]):
+ "Synchronize the mirror repository with the master"
+ if source_url is not None:
+ args = ["synchronize", url, source_url]
+ else: # Allow testing of old source-URL-less syntax
+ args = ["synchronize", url]
+ if source_prop_encoding:
+ args.append("--source-prop-encoding")
+ args.append(source_prop_encoding)
+
+ # Normal expected output is of the form:
+ # ['Transmitting file data .......\n', # optional
+ # 'Committed revision 1.\n',
+ # 'Copied properties for revision 1.\n', ...]
+ svntest.actions.run_and_verify_svnsync(expected_output, expected_error,
+ *args)
+
+def run_copy_revprops(url, source_url,
+ source_prop_encoding=None,
+ expected_output=AnyOutput, expected_error=[]):
+ "Copy revprops to the mirror repository from the master"
+ args = ["copy-revprops", url, source_url]
+ if source_prop_encoding:
+ args.append("--source-prop-encoding")
+ args.append(source_prop_encoding)
+
+ # Normal expected output is of the form:
+ # ['Copied properties for revision 1.\n', ...]
+ svntest.actions.run_and_verify_svnsync(expected_output, expected_error,
+ *args)
+
+def run_init(dst_url, src_url, source_prop_encoding=None):
+ "Initialize the mirror repository from the master"
+ args = ["initialize", dst_url, src_url]
+ if source_prop_encoding:
+ args.append("--source-prop-encoding")
+ args.append(source_prop_encoding)
+
+ expected_output = ['Copied properties for revision 0.\n']
+ svntest.actions.run_and_verify_svnsync(expected_output, [], *args)
+
+def run_info(url, expected_output=AnyOutput, expected_error=[]):
+ "Print synchronization information of the repository"
+ # Normal expected output is of the form:
+ # ['From URL: http://....\n',
+ # 'From UUID: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n',
+ # 'Last Merged Revision: XXX\n']
+ svntest.actions.run_and_verify_svnsync(expected_output, expected_error,
+ "info", url)
+
+
+def setup_and_sync(sbox, dump_file_contents, subdir=None,
+ bypass_prop_validation=False, source_prop_encoding=None,
+ is_src_ra_local=None, is_dest_ra_local=None):
+ """Create a repository for SBOX, load it with DUMP_FILE_CONTENTS, then create a mirror repository and sync it with SBOX. If is_src_ra_local or is_dest_ra_local is True, then run_init, run_sync, and run_copy_revprops will use the file:// scheme for the source and destination URLs. Return the mirror sandbox."""
+
+ # Create the empty master repository.
+ sbox.build(create_wc=False, empty=True)
+
+ # Load the repository from DUMP_FILE_PATH.
+ svntest.actions.run_and_verify_load(sbox.repo_dir, dump_file_contents,
+ bypass_prop_validation)
+
+ # Create the empty destination repository.
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ # Setup the mirror repository. Feed it the UUID of the source repository.
+ exit_code, output, errput = svntest.main.run_svnlook("uuid", sbox.repo_dir)
+ svntest.actions.run_and_verify_svnadmin2(None, None, 0,
+ 'setuuid', dest_sbox.repo_dir,
+ output[0][:-1])
+
+ # Create the revprop-change hook for this test
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ repo_url = sbox.repo_url
+ cwd = os.getcwd()
+ if is_src_ra_local:
+ repo_url = sbox.file_protocol_repo_url()
+
+ if subdir:
+ repo_url = repo_url + subdir
+
+ dest_repo_url = dest_sbox.repo_url
+ if is_dest_ra_local:
+ dest_repo_url = dest_sbox.file_protocol_repo_url()
+ run_init(dest_repo_url, repo_url, source_prop_encoding)
+
+ run_sync(dest_repo_url, repo_url,
+ source_prop_encoding=source_prop_encoding)
+ run_copy_revprops(dest_repo_url, repo_url,
+ source_prop_encoding=source_prop_encoding)
+
+ return dest_sbox
+
+def verify_mirror(dest_sbox, exp_dump_file_contents):
+ """Compare the contents of the mirror repository in DEST_SBOX with
+ EXP_DUMP_FILE_CONTENTS, by comparing the parsed dump stream content.
+
+ First remove svnsync rev-props from the DEST_SBOX repository.
+ """
+
+ # Remove some SVNSync-specific housekeeping properties from the
+ # mirror repository in preparation for the comparison dump.
+ for prop_name in ("svn:sync-from-url", "svn:sync-from-uuid",
+ "svn:sync-last-merged-rev"):
+ svntest.actions.run_and_verify_svn(
+ None, [], "propdel", "--revprop", "-r", "0",
+ prop_name, dest_sbox.repo_url)
+
+ # Create a dump file from the mirror repository.
+ dest_dump = svntest.actions.run_and_verify_dump(dest_sbox.repo_dir)
+
+ svntest.verify.compare_dump_files(
+ "Dump files", "DUMP", exp_dump_file_contents, dest_dump)
+
+def run_test(sbox, dump_file_name, subdir=None, exp_dump_file_name=None,
+ bypass_prop_validation=False, source_prop_encoding=None,
+ is_src_ra_local=None, is_dest_ra_local=None):
+
+ """Load a dump file, sync repositories, and compare contents with the original
+or another dump file."""
+
+ # This directory contains all the dump files
+ svnsync_tests_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnsync_tests_data')
+
+ # Load the specified dump file into the master repository.
+ master_dumpfile_contents = open(os.path.join(svnsync_tests_dir,
+ dump_file_name),
+ 'rb').readlines()
+
+ dest_sbox = setup_and_sync(sbox, master_dumpfile_contents, subdir,
+ bypass_prop_validation, source_prop_encoding,
+ is_src_ra_local, is_dest_ra_local)
+
+ # Compare the dump produced by the mirror repository with either the original
+ # dump file (used to create the master repository) or another specified dump
+ # file.
+ if exp_dump_file_name:
+ exp_dump_file_contents = open(os.path.join(svnsync_tests_dir,
+ exp_dump_file_name), 'rb').readlines()
+ else:
+ exp_dump_file_contents = master_dumpfile_contents
+
+ verify_mirror(dest_sbox, exp_dump_file_contents)
+
+
+
+######################################################################
+# Tests
+
+#----------------------------------------------------------------------
+
+def copy_and_modify(sbox):
+ "copy and modify"
+ run_test(sbox, "copy-and-modify.dump")
+
+#----------------------------------------------------------------------
+
+def copy_from_previous_version_and_modify(sbox):
+ "copy from previous version and modify"
+ run_test(sbox, "copy-from-previous-version-and-modify.dump")
+
+#----------------------------------------------------------------------
+
+def copy_from_previous_version(sbox):
+ "copy from previous version"
+ run_test(sbox, "copy-from-previous-version.dump")
+
+#----------------------------------------------------------------------
+
+def modified_in_place(sbox):
+ "modified in place"
+ run_test(sbox, "modified-in-place.dump")
+
+#----------------------------------------------------------------------
+
+def tag_empty_trunk(sbox):
+ "tag empty trunk"
+ run_test(sbox, "tag-empty-trunk.dump")
+
+#----------------------------------------------------------------------
+
+def tag_trunk_with_dir(sbox):
+ "tag trunk containing a sub-directory"
+ run_test(sbox, "tag-trunk-with-dir.dump")
+
+#----------------------------------------------------------------------
+
+def tag_trunk_with_file(sbox):
+ "tag trunk containing a file"
+ run_test(sbox, "tag-trunk-with-file.dump")
+
+#----------------------------------------------------------------------
+
+def tag_trunk_with_file2(sbox):
+ "tag trunk containing a file (#2)"
+ run_test(sbox, "tag-trunk-with-file2.dump")
+
+#----------------------------------------------------------------------
+
+def tag_with_modified_file(sbox):
+ "tag with a modified file"
+ run_test(sbox, "tag-with-modified-file.dump")
+
+#----------------------------------------------------------------------
+
+def dir_prop_change(sbox):
+ "directory property changes"
+ run_test(sbox, "dir-prop-change.dump")
+
+#----------------------------------------------------------------------
+
+def file_dir_file(sbox):
+ "files and dirs mixed together"
+ run_test(sbox, "file-dir-file.dump")
+
+#----------------------------------------------------------------------
+
+def copy_parent_modify_prop(sbox):
+ "copy parent and modify prop"
+ run_test(sbox, "copy-parent-modify-prop.dump")
+
+#----------------------------------------------------------------------
+
+def detect_meddling(sbox):
+ "detect non-svnsync commits in destination"
+
+ sbox.build("svnsync-meddling")
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ # Make our own destination checkout (have to do it ourself because
+ # it is not greek).
+
+ svntest.main.safe_rmtree(dest_sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'co',
+ dest_sbox.repo_url,
+ dest_sbox.wc_dir)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+ run_sync(dest_sbox.repo_url)
+
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'up',
+ dest_sbox.wc_dir)
+
+ # Commit some change to the destination, which should be detected by svnsync
+ svntest.main.file_append(os.path.join(dest_sbox.wc_dir, 'A', 'B', 'lambda'),
+ 'new lambda text')
+ svntest.actions.run_and_verify_svn(None,
+ [],
+ 'ci',
+ '-m', 'msg',
+ dest_sbox.wc_dir)
+
+ expected_error = r".*Destination HEAD \(2\) is not the last merged revision \(1\).*"
+ run_sync(dest_sbox.repo_url, None,
+ expected_output=[], expected_error=expected_error)
+
+def url_encoding(sbox):
+ "test url encoding issues"
+ run_test(sbox, "url-encoding-bug.dump")
+
+
+# A test for copying revisions that lack a property that already exists
+# on the destination rev as part of the commit (i.e. svn:author in this
+# case, but svn:date would also work).
+def no_author(sbox):
+ "test copying revs with no svn:author revprops"
+ run_test(sbox, "no-author.dump")
+
+def copy_revprops(sbox):
+ "test copying revprops other than svn:*"
+ run_test(sbox, "revprops.dump")
+
+@SkipUnless(server_has_partial_replay)
+def only_trunk(sbox):
+ "test syncing subdirectories"
+ run_test(sbox, "svnsync-trunk-only.dump", "/trunk",
+ "svnsync-trunk-only.expected.dump")
+
+@SkipUnless(server_has_partial_replay)
+def only_trunk_A_with_changes(sbox):
+ "test syncing subdirectories with changes on root"
+ run_test(sbox, "svnsync-trunk-A-changes.dump", "/trunk/A",
+ "svnsync-trunk-A-changes.expected.dump")
+
+# test for issue #2904
+@Issue(2904)
+def move_and_modify_in_the_same_revision(sbox):
+ "test move parent and modify child file in same rev"
+ run_test(sbox, "svnsync-move-and-modify.dump")
+
+def info_synchronized(sbox):
+ "test info cmd on a synchronized repo"
+
+ sbox.build("svnsync-info-syncd", False)
+
+ # Get the UUID of the source repository.
+ exit_code, output, errput = svntest.main.run_svnlook("uuid", sbox.repo_dir)
+ src_uuid = output[0].strip()
+
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+ run_sync(dest_sbox.repo_url)
+
+ expected_out = ['Source URL: %s\n' % sbox.repo_url,
+ 'Source Repository UUID: %s\n' % src_uuid,
+ 'Last Merged Revision: 1\n',
+ ]
+ svntest.actions.run_and_verify_svnsync(expected_out, [],
+ "info", dest_sbox.repo_url)
+
+def info_not_synchronized(sbox):
+ "test info cmd on an un-synchronized repo"
+
+ sbox.build("svnsync-info-not-syncd", False)
+
+ run_info(sbox.repo_url,
+ [], ".*Repository '%s' is not initialized.*" % sbox.repo_url)
+
+#----------------------------------------------------------------------
+
+def copy_bad_line_endings(sbox):
+ "copy with inconsistent line endings in svn:* props"
+ run_test(sbox, "copy-bad-line-endings.dump",
+ exp_dump_file_name="copy-bad-line-endings.expected.dump",
+ bypass_prop_validation=True)
+
+def copy_bad_line_endings2(sbox):
+ "copy with non-LF line endings in svn:* props"
+ run_test(sbox, "copy-bad-line-endings2.dump",
+ exp_dump_file_name="copy-bad-line-endings2.expected.dump",
+ bypass_prop_validation=True)
+
+def copy_bad_encoding(sbox):
+ "copy and reencode non-UTF-8 svn:* props"
+ run_test(sbox, "copy-bad-encoding.dump",
+ exp_dump_file_name="copy-bad-encoding.expected.dump",
+ bypass_prop_validation=True, source_prop_encoding="ISO-8859-3")
+
+#----------------------------------------------------------------------
+
+def delete_svn_props(sbox):
+ "copy with svn:* prop deletions"
+ run_test(sbox, "delete-svn-props.dump")
+
+@Issue(3438)
+def commit_a_copy_of_root(sbox):
+ "commit a copy of root causes sync to fail"
+ #Testcase for issue 3438.
+ run_test(sbox, "repo-with-copy-of-root-dir.dump")
+
+
+# issue #3641 'svnsync fails to partially copy a repository'.
+# This currently fails because while replacements with history
+# within copies are handled, replacements without history inside
+# copies cause the sync to fail:
+#
+# >svnsync synchronize %TEST_REPOS_ROOT_URL%/svnsync_tests-29-1
+# %TEST_REPOS_ROOT_URL%/svnsync_tests-29/trunk/H
+# Transmitting file data ...\..\..\subversion\svnsync\main.c:1444: (apr_err=160013)
+# ..\..\..\subversion\svnsync\main.c:1391: (apr_err=160013)
+# ..\..\..\subversion\libsvn_ra\ra_loader.c:1168: (apr_err=160013)
+# ..\..\..\subversion\libsvn_delta\path_driver.c:254: (apr_err=160013)
+# ..\..\..\subversion\libsvn_repos\replay.c:480: (apr_err=160013)
+# ..\..\..\subversion\libsvn_repos\replay.c:276: (apr_err=160013)
+# ..\..\..\subversion\libsvn_repos\replay.c:290: (apr_err=160013)
+# ..\..\..\subversion\libsvn_fs_base\tree.c:1258: (apr_err=160013)
+# ..\..\..\subversion\libsvn_fs_base\tree.c:1258: (apr_err=160013)
+# ..\..\..\subversion\libsvn_fs_base\tree.c:1236: (apr_err=160013)
+# ..\..\..\subversion\libsvn_fs_base\tree.c:931: (apr_err=160013)
+# ..\..\..\subversion\libsvn_fs_base\tree.c:742: (apr_err=160013)
+# svnsync: File not found: revision 4, path '/trunk/H/Z/B/lambda'
+#
+# See also http://svn.haxx.se/dev/archive-2010-11/0411.shtml and
+#
+#
+# Note: For those who may poke around this test in the future, r3 of
+# delete-revprops.dump was created with the following svnmucc command:
+#
+# svnmucc.exe -mm cp head %ROOT_URL%/trunk/A %ROOT_URL%/trunk/H
+# rm %ROOT_URL%/trunk/H/B
+# cp head %ROOT_URL%/trunk/X %ROOT_URL%/trunk/B
+#
+# r4 was created with this svnmucc command:
+#
+# svnmucc.exe -mm cp head %ROOT_URL%/trunk/A %ROOT_URL%/trunk/H/Z
+# rm %ROOT_URL%/trunk/H/Z/B
+# mkdir %ROOT_URL%/trunk/H/Z/B
+@Issue(3641)
+def descend_into_replace(sbox):
+ "descending into replaced dir looks in src"
+ run_test(sbox, "descend-into-replace.dump", subdir='/trunk/H',
+ exp_dump_file_name = "descend-into-replace.expected.dump")
+
+# issue #3728
+@Issue(3728)
+def delete_revprops(sbox):
+ "copy-revprops with removals"
+ svnsync_tests_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnsync_tests_data')
+ initial_contents = open(os.path.join(svnsync_tests_dir,
+ "delete-revprops.dump"),
+ 'rb').readlines()
+ expected_contents = open(os.path.join(svnsync_tests_dir,
+ "delete-revprops.expected.dump"),
+ 'rb').readlines()
+
+ # Create the initial repos and mirror, and sync 'em.
+ dest_sbox = setup_and_sync(sbox, initial_contents)
+
+ # Now remove a revprop from r1 of the source, and run 'svnsync
+ # copy-revprops' to re-sync 'em.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ exit_code, out, err = svntest.main.run_svn(None,
+ 'pdel',
+ '-r', '1',
+ '--revprop',
+ 'issue-id',
+ sbox.repo_url)
+ if err:
+ raise SVNUnexpectedStderr(err)
+ run_copy_revprops(dest_sbox.repo_url, sbox.repo_url)
+
+ # Does the result look as we expected?
+ verify_mirror(dest_sbox, expected_contents)
+
+@Issue(3870)
+@SkipUnless(svntest.main.is_posix_os)
+def fd_leak_sync_from_serf_to_local(sbox):
+ "fd leak during sync from serf to local"
+ import resource
+ resource.setrlimit(resource.RLIMIT_NOFILE, (128, 128))
+ run_test(sbox, "largemods.dump", is_src_ra_local=None, is_dest_ra_local=True)
+
+#----------------------------------------------------------------------
+
+@Issue(4476)
+def mergeinfo_contains_r0(sbox):
+ "mergeinfo contains r0"
+
+ def make_node_record(node_name, mi):
+ """Return a dumpfile node-record for adding a (directory) node named
+ NODE_NAME with mergeinfo MI. Return it as a list of newline-terminated
+ lines.
+ """
+ headers_tmpl = """\
+Node-path: %s
+Node-kind: dir
+Node-action: add
+Prop-content-length: %d
+Content-length: %d
+"""
+ content_tmpl = """\
+K 13
+svn:mergeinfo
+V %d
+%s
+PROPS-END
+"""
+ content = content_tmpl % (len(mi), mi)
+ headers = headers_tmpl % (node_name, len(content), len(content))
+ record = (headers + '\n' + content + '\n\n').encode()
+ return record.splitlines(True)
+
+ # The test case mergeinfo (before, after) syncing, separated here with
+ # spaces instead of newlines
+ test_mi = [
+ ("", ""), # unchanged
+ ("/a:1", "/a:1"),
+ ("/a:1 /b:1*,2","/a:1 /b:1*,2"),
+ ("/:0:1", "/:0:1"), # unchanged; colon-zero in filename
+ ("/a:0", ""), # dropped entirely
+ ("/a:0*", ""),
+ ("/a:0 /b:0*", ""),
+ ("/a:1 /b:0", "/a:1"), # one kept, one dropped
+ ("/a:0 /b:1", "/b:1"),
+ ("/a:0,1 /b:1", "/a:1 /b:1"), # one kept, one changed
+ ("/a:1 /b:0,1", "/a:1 /b:1"),
+ ("/a:0,1 /b:0*,1 /c:0,2 /d:0-1 /e:0-1,3 /f:0-2 /g:0-3",
+ "/a:1 /b:1 /c:2 /d:1 /e:1,3 /f:1-2 /g:1-3"), # all changed
+ ("/a:0:0-1", "/a:0:1"), # changed; colon-zero in filename
+ ]
+
+ # Get the constant prefix for each dumpfile
+ dump_file_name = "mergeinfo-contains-r0.dump"
+ svnsync_tests_dir = os.path.join(os.path.dirname(sys.argv[0]),
+ 'svnsync_tests_data')
+ dump_in = open(os.path.join(svnsync_tests_dir, dump_file_name),
+ 'rb').readlines()
+ dump_out = list(dump_in) # duplicate the list
+
+ # Add dumpfile node records containing the test mergeinfo
+ for n, mi in enumerate(test_mi):
+ node_name = "D" + str(n)
+
+ mi_in = mi[0].replace(' ', '\n')
+ mi_out = mi[1].replace(' ', '\n')
+ dump_in.extend(make_node_record(node_name, mi_in))
+ dump_out.extend(make_node_record(node_name, mi_out))
+
+ # Run the sync
+ dest_sbox = setup_and_sync(sbox, dump_in, bypass_prop_validation=True)
+
+ # Compare the dump produced by the mirror repository with expected
+ verify_mirror(dest_sbox, dump_out)
+
+def up_to_date_sync(sbox):
+ """sync that does nothing"""
+
+ # An up-to-date mirror.
+ sbox.build(create_wc=False)
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+ run_init(dest_sbox.repo_url, sbox.repo_url)
+ run_sync(dest_sbox.repo_url)
+
+ # Another sync should be a no-op
+ svntest.actions.run_and_verify_svnsync([], [],
+ "synchronize", dest_sbox.repo_url)
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ copy_and_modify,
+ copy_from_previous_version_and_modify,
+ copy_from_previous_version,
+ modified_in_place,
+ tag_empty_trunk,
+ tag_trunk_with_dir,
+ tag_trunk_with_file,
+ tag_trunk_with_file2,
+ tag_with_modified_file,
+ dir_prop_change,
+ file_dir_file,
+ copy_parent_modify_prop,
+ detect_meddling,
+ url_encoding,
+ no_author,
+ copy_revprops,
+ only_trunk,
+ only_trunk_A_with_changes,
+ move_and_modify_in_the_same_revision,
+ info_synchronized,
+ info_not_synchronized,
+ copy_bad_line_endings,
+ copy_bad_line_endings2,
+ copy_bad_encoding,
+ delete_svn_props,
+ commit_a_copy_of_root,
+ descend_into_replace,
+ delete_revprops,
+ fd_leak_sync_from_serf_to_local, # calls setrlimit
+ mergeinfo_contains_r0,
+ up_to_date_sync,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-and-modify.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-and-modify.dump
new file mode 100644
index 0000000..381fdb4
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-and-modify.dump
@@ -0,0 +1,78 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 6ad9f820-0205-0410-94a2-c8cf366bb2b3
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-07T23:36:48.095832Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 112
+Content-length: 112
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T23:37:17.705159Z
+K 7
+svn:log
+V 11
+add foo.txt
+PROPS-END
+
+Node-path: foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 135
+Content-length: 135
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T23:37:44.549695Z
+K 7
+svn:log
+V 34
+copy and change at the same time.
+
+PROPS-END
+
+Node-path: bar.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: foo.txt
+Text-copy-source-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-copy-source-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Text-content-length: 9
+Text-content-md5: d2508118d0d39e198d1129d87d692d59
+Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020
+Content-length: 9
+
+modified
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.dump
new file mode 100644
index 0000000..2409e9e
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.dump
@@ -0,0 +1,47 @@
+SVN-fs-dump-format-version: 2
+
+UUID: d90df8cd-70fe-4939-a557-177246366bb1
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2011-01-11T20:57:24.206641Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 102
+Content-length: 102
+
+K 7
+svn:log
+V 15
+¡¦§©ª«¬ÄÆÑøôûþ
+
+K 10
+svn:author
+V 6
+daniel
+K 8
+svn:date
+V 27
+2011-01-11T20:58:29.224246Z
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 47
+Content-length: 47
+
+K 10
+svn:ignore
+V 15
+¡¦§©ª«¬ÄÆÑøôûþ
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.expected.dump
new file mode 100644
index 0000000..7663918
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-encoding.expected.dump
@@ -0,0 +1,47 @@
+SVN-fs-dump-format-version: 2
+
+UUID: d90df8cd-70fe-4939-a557-177246366bb1
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2011-01-11T20:57:24.206641Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 130
+Content-length: 130
+
+K 10
+svn:author
+V 6
+daniel
+K 8
+svn:date
+V 27
+2011-01-11T20:58:29.224246Z
+K 7
+svn:log
+V 29
+ĦĤ§İŞĞĴÄĈÑÄôûÅ
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 61
+Content-length: 61
+
+K 10
+svn:ignore
+V 29
+ĦĤ§İŞĞĴÄĈÑÄôûÅ
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.dump
new file mode 100644
index 0000000..01e7140
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.dump
@@ -0,0 +1,48 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 0e893bae-2d4c-452c-8616-b774d5b9104e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-03-27T19:25:43.035921Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 151
+Content-length: 151
+
+K 10
+svn:author
+V 7
+smithma
+K 8
+svn:date
+V 27
+2009-03-27T19:33:51.178381Z
+K 7
+svn:log
+V 49
+added svn:ignore using CRLF to terminate lines
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 55
+Content-length: 55
+
+K 10
+svn:ignore
+V 23
+target
+target-eclipse
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.expected.dump
new file mode 100644
index 0000000..5fa2896
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings.expected.dump
@@ -0,0 +1,49 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 0e893bae-2d4c-452c-8616-b774d5b9104e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-03-27T19:25:43.035921Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 150
+Content-length: 150
+
+K 10
+svn:author
+V 7
+smithma
+K 8
+svn:date
+V 27
+2009-03-27T19:33:51.178381Z
+K 7
+svn:log
+V 48
+added svn:ignore using CRLF to
+ terminate lines
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 10
+svn:ignore
+V 22
+target
+target-eclipse
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.dump
new file mode 100644
index 0000000..fbc5df8
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.dump
@@ -0,0 +1,141 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 1676c7c8-1042-47ba-811d-d6f56ff7be54
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-09-14T00:20:38.518085Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 150
+Content-length: 150
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T00:22:19.558161Z
+K 7
+svn:log
+V 46
+This log message
+contains CRLF
+line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:04:52.173534Z
+K 7
+svn:log
+V 42
+This log message contains CR line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Text-content-length: 5
+Text-content-md5: d8e8fca2dc0f896fd7cb4cb0031ba249
+Text-content-sha1: 4e1243bd22c66e76c2ba9eddc1f91394e57f9f83
+Content-length: 5
+
+test
+
+
+Revision-number: 3
+Prop-content-length: 151
+Content-length: 151
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:16:41.718227Z
+K 7
+svn:log
+V 47
+Add `svn:ignore` using CRLF to terminate lines.
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 49
+Content-length: 49
+
+K 10
+svn:ignore
+V 17
+blah.o
+blah2.o
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 153
+Content-length: 153
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:17:35.488565Z
+K 7
+svn:log
+V 49
+Add `x:related-to` using CRLF to terminate lines.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Prop-content-length: 51
+Content-length: 51
+
+K 12
+x:related-to
+V 17
+blah.o
+blah2.o
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.expected.dump
new file mode 100644
index 0000000..98de855
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-bad-line-endings2.expected.dump
@@ -0,0 +1,143 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 1676c7c8-1042-47ba-811d-d6f56ff7be54
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-09-14T00:20:38.518085Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 148
+Content-length: 148
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T00:22:19.558161Z
+K 7
+svn:log
+V 44
+This log message
+contains CRLF
+line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:04:52.173534Z
+K 7
+svn:log
+V 42
+This log message
+contains
+CR line endings.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Text-content-length: 5
+Text-content-md5: d8e8fca2dc0f896fd7cb4cb0031ba249
+Text-content-sha1: 4e1243bd22c66e76c2ba9eddc1f91394e57f9f83
+Content-length: 5
+
+test
+
+
+Revision-number: 3
+Prop-content-length: 151
+Content-length: 151
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:16:41.718227Z
+K 7
+svn:log
+V 47
+Add `svn:ignore` using CRLF to terminate lines.
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 47
+Content-length: 47
+
+K 10
+svn:ignore
+V 15
+blah.o
+blah2.o
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 153
+Content-length: 153
+
+K 10
+svn:author
+V 9
+dtrebbien
+K 8
+svn:date
+V 27
+2010-09-14T14:17:35.488565Z
+K 7
+svn:log
+V 49
+Add `x:related-to` using CRLF to terminate lines.
+PROPS-END
+
+Node-path: TEST
+Node-kind: file
+Node-action: change
+Prop-content-length: 51
+Content-length: 51
+
+K 12
+x:related-to
+V 17
+blah.o
+blah2.o
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version-and-modify.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version-and-modify.dump
new file mode 100644
index 0000000..0369e29
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version-and-modify.dump
@@ -0,0 +1,155 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 5bd4ad05-a105-0410-b7b7-c014cebbdc5c
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-15T21:10:49.994595Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 103
+Content-length: 103
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T21:10:58.608841Z
+K 7
+svn:log
+V 3
+ttb
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 116
+Content-length: 116
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T21:11:19.415517Z
+K 7
+svn:log
+V 15
+first version!
+
+PROPS-END
+
+Node-path: trunk/file.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 14
+Text-content-md5: 9f089b639127e2f5a79c4eda189678d6
+Text-content-sha1: b1c2f43edab746cec4857f5d4b00579922a83d92
+Content-length: 24
+
+PROPS-END
+first version
+
+
+Revision-number: 3
+Prop-content-length: 117
+Content-length: 117
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T21:11:29.391885Z
+K 7
+svn:log
+V 16
+second version!
+
+PROPS-END
+
+Node-path: trunk/file.txt
+Node-kind: file
+Node-action: change
+Text-content-length: 15
+Text-content-md5: 27f60b341727cb8ed1de139b0da7c173
+Text-content-sha1: b61e81f23c338df5c1dff26963f755d4226227c6
+Content-length: 15
+
+second version
+
+
+Revision-number: 4
+Prop-content-length: 122
+Content-length: 122
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T21:11:50.278301Z
+K 7
+svn:log
+V 21
+a copy, but modified
+
+PROPS-END
+
+Node-path: trunk/copy-of-file.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk/file.txt
+Text-copy-source-md5: 9f089b639127e2f5a79c4eda189678d6
+Text-copy-source-sha1: b1c2f43edab746cec4857f5d4b00579922a83d92
+Text-content-length: 28
+Text-content-md5: 7b4d0f5ac875af39d2ee3a67798f5754
+Text-content-sha1: 2040e27d431428a216382b42560dccaaa5e1b3b6
+Content-length: 28
+
+first version, but modified
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version.dump
new file mode 100644
index 0000000..688d096
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-from-previous-version.dump
@@ -0,0 +1,181 @@
+SVN-fs-dump-format-version: 2
+
+UUID: c542f1c4-a005-0410-b9aa-b3fbdc38f810
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-15T20:52:43.894708Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 104
+Content-length: 104
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T20:53:08.484681Z
+K 7
+svn:log
+V 4
+ttb
+
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 119
+Content-length: 119
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T20:53:34.716301Z
+K 7
+svn:log
+V 18
+Add first version
+
+PROPS-END
+
+Node-path: trunk/file.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 22
+Text-content-md5: bb55f6530f359710b6e52f0d5a9f544f
+Text-content-sha1: df4f5675e945ac7f4a776cb068aeb3bc5fb5fd29
+Content-length: 32
+
+PROPS-END
+first version of file
+
+
+Revision-number: 3
+Prop-content-length: 120
+Content-length: 120
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T20:53:48.805239Z
+K 7
+svn:log
+V 19
+Add second version
+
+PROPS-END
+
+Node-path: trunk/file.txt
+Node-kind: file
+Node-action: change
+Text-content-length: 23
+Text-content-md5: d6e0e9fdc2cb38352eca81f093110f4b
+Text-content-sha1: 64f3eeab9a2fce6b19eec365bbb181561718b999
+Content-length: 23
+
+second version of file
+
+
+Revision-number: 4
+Prop-content-length: 149
+Content-length: 149
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T20:56:25.247172Z
+K 7
+svn:log
+V 48
+Add a new file to create an uninvolved revision
+
+PROPS-END
+
+Node-path: trunk/foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 131
+Content-length: 131
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-15T20:56:46.198327Z
+K 7
+svn:log
+V 30
+copy from a previous revision
+
+PROPS-END
+
+Node-path: trunk/copy-of-file.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk/file.txt
+Text-copy-source-md5: bb55f6530f359710b6e52f0d5a9f544f
+Text-copy-source-sha1: df4f5675e945ac7f4a776cb068aeb3bc5fb5fd29
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/copy-parent-modify-prop.dump b/subversion/tests/cmdline/svnsync_tests_data/copy-parent-modify-prop.dump
new file mode 100644
index 0000000..d12efa4
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/copy-parent-modify-prop.dump
@@ -0,0 +1,91 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 265b4915-c811-0410-a32e-b5d713837d08
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-04-19T12:49:15.123309Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 113
+Content-length: 113
+
+K 10
+svn:author
+V 2
+pl
+K 8
+svn:date
+V 27
+2006-04-19T12:50:29.623828Z
+K 7
+svn:log
+V 16
+add dir and file
+PROPS-END
+
+Node-path: dir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: dir/f
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 126
+Content-length: 126
+
+K 10
+svn:author
+V 2
+pl
+K 8
+svn:date
+V 27
+2006-04-19T12:51:15.837786Z
+K 7
+svn:log
+V 29
+copy dir and modify prop of f
+PROPS-END
+
+Node-path: dir2
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: dir
+
+
+Node-path: dir2/f
+Node-kind: file
+Node-action: change
+Prop-content-length: 27
+Content-length: 27
+
+K 4
+prop
+V 3
+val
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.dump b/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.dump
new file mode 100644
index 0000000..f06a4df
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.dump
@@ -0,0 +1,45 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 8e4a7212-d56e-11df-8ee9-37196cd04bc1
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-10-11T19:34:25.487865Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 136
+Content-length: 136
+
+K 8
+issue-id
+V 4
+1729
+K 10
+svn:author
+V 8
+cmpilato
+K 8
+svn:date
+V 27
+2010-10-11T19:55:50.912266Z
+K 7
+svn:log
+V 11
+Create dir1
+PROPS-END
+
+Node-path: dir1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.expected.dump
new file mode 100644
index 0000000..fc6ae70
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/delete-revprops.expected.dump
@@ -0,0 +1,41 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 8e4a7212-d56e-11df-8ee9-37196cd04bc1
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2010-10-11T19:34:25.487865Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 114
+Content-length: 114
+
+K 10
+svn:author
+V 8
+cmpilato
+K 8
+svn:date
+V 27
+2010-10-11T19:55:50.912266Z
+K 7
+svn:log
+V 11
+Create dir1
+PROPS-END
+
+Node-path: dir1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/delete-svn-props.dump b/subversion/tests/cmdline/svnsync_tests_data/delete-svn-props.dump
new file mode 100644
index 0000000..a6cab39
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/delete-svn-props.dump
@@ -0,0 +1,76 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 776dcb32-51dc-11de-996d-d717ce91f81e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-06-05T14:23:40.611796Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 137
+Content-length: 137
+
+K 10
+svn:author
+V 5
+neels
+K 8
+svn:date
+V 27
+2009-06-05T14:23:41.185914Z
+K 7
+svn:log
+V 37
+adding file with prop 'svn:eol-style'
+PROPS-END
+
+Node-path: file
+Node-kind: file
+Node-action: add
+Prop-content-length: 36
+Text-content-length: 13
+Text-content-md5: 65cea5e7a1cc0516de76d7f523f105e6
+Text-content-sha1: ecc6621aa15c7c96d81bb9931b33f0c12c93a1c5
+Content-length: 49
+
+K 13
+svn:eol-style
+V 2
+LF
+PROPS-END
+file-content
+
+
+Revision-number: 2
+Prop-content-length: 139
+Content-length: 139
+
+K 10
+svn:author
+V 5
+neels
+K 8
+svn:date
+V 27
+2009-06-05T14:23:42.140849Z
+K 7
+svn:log
+V 39
+removing prop 'svn:eol-style' from file
+PROPS-END
+
+Node-path: file
+Node-kind: file
+Node-action: change
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.dump b/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.dump
new file mode 100644
index 0000000..ed0abdc
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.dump
@@ -0,0 +1,386 @@
+SVN-fs-dump-format-version: 2
+
+UUID: b105e986-03ce-ba4b-b01a-b7a10dd0b7f5
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2008-06-14T09:27:00.265625Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 133
+Content-length: 133
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2008-06-14T09:28:38.718750Z
+K 7
+svn:log
+V 32
+Import the Greek Tree to /trunk.
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2010-07-08T21:22:47.484375Z
+K 7
+svn:log
+V 5
+add X
+PROPS-END
+
+Node-path: trunk/X
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 3
+Prop-content-length: 102
+Content-length: 102
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2010-07-08T21:25:07.187500Z
+K 7
+svn:log
+V 2
+cp
+PROPS-END
+
+Node-path: trunk/H
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk/A
+
+
+Node-path: trunk/H/B
+Node-kind: dir
+Node-action: delete
+
+Node-path: trunk/H/B
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk/X
+
+
+
+
+Revision-number: 4
+Prop-content-length: 103
+Content-length: 103
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-11-20T17:10:31.620281Z
+K 7
+svn:log
+V 3
+mmm
+PROPS-END
+
+Node-path: trunk/H/Z
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 3
+Node-copyfrom-path: trunk/A
+
+
+Node-path: trunk/H/Z/B
+Node-kind: dir
+Node-action: replace
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.expected.dump
new file mode 100644
index 0000000..1e05dc0
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/descend-into-replace.expected.dump
@@ -0,0 +1,411 @@
+SVN-fs-dump-format-version: 2
+
+UUID: b105e986-03ce-ba4b-b01a-b7a10dd0b7f5
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2008-06-14T09:27:00.265625Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 133
+Content-length: 133
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2008-06-14T09:28:38.718750Z
+K 7
+svn:log
+V 32
+Import the Greek Tree to /trunk.
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2010-07-08T21:22:47.484375Z
+K 7
+svn:log
+V 5
+add X
+PROPS-END
+
+Revision-number: 3
+Prop-content-length: 102
+Content-length: 102
+
+K 10
+svn:author
+V 6
+Daniel
+K 8
+svn:date
+V 27
+2010-07-08T21:25:07.187500Z
+K 7
+svn:log
+V 2
+cp
+PROPS-END
+
+Node-path: trunk/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/H/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/H/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/H/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/H/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/H/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: trunk/H/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: trunk/H/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Revision-number: 4
+Prop-content-length: 103
+Content-length: 103
+
+K 10
+svn:author
+V 6
+pburba
+K 8
+svn:date
+V 27
+2010-11-20T17:10:31.620281Z
+K 7
+svn:log
+V 3
+mmm
+PROPS-END
+
+Node-path: trunk/H/Z
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: trunk/H/Z/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: trunk/H/Z/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: trunk/H/Z/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/H/Z/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: trunk/H/Z/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: trunk/H/Z/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: trunk/H/Z/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: trunk/H/Z/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/dir-prop-change.dump b/subversion/tests/cmdline/svnsync_tests_data/dir-prop-change.dump
new file mode 100644
index 0000000..458e9f3
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/dir-prop-change.dump
@@ -0,0 +1,47 @@
+SVN-fs-dump-format-version: 2
+
+UUID: b760f8ea-7608-0410-b9cc-88a00389c87b
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-12-21T23:12:28.152181Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-12-21T23:12:58.931794Z
+K 7
+svn:log
+V 5
+blah
+
+PROPS-END
+
+Node-path:
+Node-kind: dir
+Node-action: change
+Prop-content-length: 37
+Content-length: 37
+
+K 10
+svn:ignore
+V 6
+stuff
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/file-dir-file.dump b/subversion/tests/cmdline/svnsync_tests_data/file-dir-file.dump
new file mode 100644
index 0000000..c9c5387
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/file-dir-file.dump
@@ -0,0 +1,77 @@
+SVN-fs-dump-format-version: 2
+
+UUID: bd4c07e3-6b11-0410-8af7-f8eca344b6da
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-04-14T22:49:34.922133Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2006-04-14T22:50:10.459753Z
+K 7
+svn:log
+V 5
+stuff
+PROPS-END
+
+Node-path: a.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: q
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: q/zot.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: z.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/largemods.dump b/subversion/tests/cmdline/svnsync_tests_data/largemods.dump
new file mode 100644
index 0000000..7d7198b
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/largemods.dump
@@ -0,0 +1,4932 @@
+SVN-fs-dump-format-version: 2
+
+UUID: aa74a681-3e22-43af-8e24-2e2eb9763713
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2011-05-04T11:26:27.741480Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 102
+Content-length: 102
+
+K 10
+svn:author
+V 5
+arwin
+K 8
+svn:date
+V 27
+2011-05-04T11:30:23.286750Z
+K 7
+svn:log
+V 3
+one
+PROPS-END
+
+Node-path: file1
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file10
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file100
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file101
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file102
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file103
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file104
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file105
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file106
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file107
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file108
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file109
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file11
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file110
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file111
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file112
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file113
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file114
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file115
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file116
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file117
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file118
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file119
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file12
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file120
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file121
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file122
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file123
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file124
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file125
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file126
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file127
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file128
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file13
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file14
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file15
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file16
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file17
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file18
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file19
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file2
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file20
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file21
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file22
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file23
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file24
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file25
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file26
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file27
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file28
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file29
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file3
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file30
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file31
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file32
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file33
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file34
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file35
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file36
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file37
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file38
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file39
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file4
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file40
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file41
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file42
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file43
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file44
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file45
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file46
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file47
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file48
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file49
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file5
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file50
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file51
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file52
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file53
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file54
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file55
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file56
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file57
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file58
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file59
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file6
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file60
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file61
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file62
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file63
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file64
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file65
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file66
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file67
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file68
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file69
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file7
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file70
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file71
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file72
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file73
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file74
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file75
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file76
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file77
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file78
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file79
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file8
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file80
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file81
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file82
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file83
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file84
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file85
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file86
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file87
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file88
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file89
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file9
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file90
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file91
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file92
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file93
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file94
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file95
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file96
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file97
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file98
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Node-path: file99
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 2
+Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3
+Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b
+Content-length: 12
+
+PROPS-END
+a
+
+
+Revision-number: 2
+Prop-content-length: 102
+Content-length: 102
+
+K 10
+svn:author
+V 5
+arwin
+K 8
+svn:date
+V 27
+2011-05-04T11:30:45.178713Z
+K 7
+svn:log
+V 3
+two
+PROPS-END
+
+Node-path: file1
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file10
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file100
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file101
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file102
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file103
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file104
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file105
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file106
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file107
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file108
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file109
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file11
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file110
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file111
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file112
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file113
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file114
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file115
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file116
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file117
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file118
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file119
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file12
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file120
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file121
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file122
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file123
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file124
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file125
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file126
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file127
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file128
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file13
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file14
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file15
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file16
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file17
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file18
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file19
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file2
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file20
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file21
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file22
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file23
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file24
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file25
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file26
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file27
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file28
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file29
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file3
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file30
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file31
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file32
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file33
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file34
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file35
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file36
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file37
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file38
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file39
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file4
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file40
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file41
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file42
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file43
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file44
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file45
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file46
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file47
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file48
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file49
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file5
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file50
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file51
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file52
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file53
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file54
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file55
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file56
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file57
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file58
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file59
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file6
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file60
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file61
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file62
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file63
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file64
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file65
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file66
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file67
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file68
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file69
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file7
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file70
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file71
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file72
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file73
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file74
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file75
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file76
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file77
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file78
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file79
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file8
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file80
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file81
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file82
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file83
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file84
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file85
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file86
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file87
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file88
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file89
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file9
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file90
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file91
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file92
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file93
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file94
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file95
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file96
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file97
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file98
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Node-path: file99
+Node-kind: file
+Node-action: change
+Text-content-length: 4
+Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb
+Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29
+Content-length: 4
+
+a
+a
+
+
+Revision-number: 3
+Prop-content-length: 104
+Content-length: 104
+
+K 10
+svn:author
+V 5
+arwin
+K 8
+svn:date
+V 27
+2011-05-04T11:30:59.056159Z
+K 7
+svn:log
+V 5
+three
+PROPS-END
+
+Node-path: file1
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file10
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file100
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file101
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file102
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file103
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file104
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file105
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file106
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file107
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file108
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file109
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file11
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file110
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file111
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file112
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file113
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file114
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file115
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file116
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file117
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file118
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file119
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file12
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file120
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file121
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file122
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file123
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file124
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file125
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file126
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file127
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file128
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file13
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file14
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file15
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file16
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file17
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file18
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file19
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file2
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file20
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file21
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file22
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file23
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file24
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file25
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file26
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file27
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file28
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file29
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file3
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file30
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file31
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file32
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file33
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file34
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file35
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file36
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file37
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file38
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file39
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file4
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file40
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file41
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file42
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file43
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file44
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file45
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file46
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file47
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file48
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file49
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file5
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file50
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file51
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file52
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file53
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file54
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file55
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file56
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file57
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file58
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file59
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file6
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file60
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file61
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file62
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file63
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file64
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file65
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file66
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file67
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file68
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file69
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file7
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file70
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file71
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file72
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file73
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file74
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file75
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file76
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file77
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file78
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file79
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file8
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file80
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file81
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file82
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file83
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file84
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file85
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file86
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file87
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file88
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file89
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file9
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file90
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file91
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file92
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file93
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file94
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file95
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file96
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file97
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file98
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
+Node-path: file99
+Node-kind: file
+Node-action: change
+Text-content-length: 6
+Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c
+Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e
+Content-length: 6
+
+a
+a
+a
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/mergeinfo-contains-r0.dump b/subversion/tests/cmdline/svnsync_tests_data/mergeinfo-contains-r0.dump
new file mode 100644
index 0000000..25d244b
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/mergeinfo-contains-r0.dump
@@ -0,0 +1,28 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 6ad9f820-0205-0410-94a2-c8cf366bb2b3
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-07T23:36:48.095832Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 84
+Content-length: 84
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2000-01-01T00:00:00.000000Z
+PROPS-END
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/modified-in-place.dump b/subversion/tests/cmdline/svnsync_tests_data/modified-in-place.dump
new file mode 100644
index 0000000..c2c0629
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/modified-in-place.dump
@@ -0,0 +1,76 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 103a2b4e-2705-0410-8a58-ca61540ba721
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-09T19:58:00.162066Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 114
+Content-length: 114
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-09T19:58:23.561942Z
+K 7
+svn:log
+V 13
+Added a file
+
+PROPS-END
+
+Node-path: foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: 26bb73556ceb32a5df30b733c5355ee5
+Text-content-sha1: 1c8c6df57252f0cc13dd2d763f4aef9affe1817c
+Content-length: 25
+
+PROPS-END
+this is a file
+
+
+Revision-number: 2
+Prop-content-length: 112
+Content-length: 112
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-09T19:58:38.412025Z
+K 7
+svn:log
+V 11
+modified it
+PROPS-END
+
+Node-path: foo.txt
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: d9bf46ecc7a555936e5115241b93258b
+Text-content-sha1: 1586ddcffca1e5f724b0eea7bb9d6b41f67c7ba6
+Content-length: 38
+
+this is a file
+now it's been modified
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/no-author.dump b/subversion/tests/cmdline/svnsync_tests_data/no-author.dump
new file mode 100644
index 0000000..d483b06
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/no-author.dump
@@ -0,0 +1,38 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 55b1b577-cf20-0410-8f4d-bb837066729e
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-10-27T18:55:45.622477Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 77
+Content-length: 77
+
+K 8
+svn:date
+V 27
+2006-10-27T18:55:49.840787Z
+K 7
+svn:log
+V 4
+foo
+
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/repo-with-copy-of-root-dir.dump b/subversion/tests/cmdline/svnsync_tests_data/repo-with-copy-of-root-dir.dump
new file mode 100644
index 0000000..20cb256
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/repo-with-copy-of-root-dir.dump
@@ -0,0 +1,39 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 7c1d1c84-647b-4ca5-9d20-057768670fee
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2009-06-29T12:22:48.566680Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 104
+Content-length: 104
+
+K 10
+svn:author
+V 6
+kamesh
+K 8
+svn:date
+V 27
+2009-06-29T12:23:55.134926Z
+K 7
+svn:log
+V 4
+wwww
+PROPS-END
+
+Node-path: full
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 0
+Node-copyfrom-path:
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/revprops.dump b/subversion/tests/cmdline/svnsync_tests_data/revprops.dump
new file mode 100644
index 0000000..550c70f
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/revprops.dump
@@ -0,0 +1,338 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 728f5e1e-a57c-4dd4-8b4f-ca752f5178f3
+
+Revision-number: 0
+Prop-content-length: 75
+Content-length: 75
+
+K 3
+foo
+V 6
+r0-bar
+K 8
+svn:date
+V 27
+2007-12-07T20:52:50.286894Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:53:40.322712Z
+K 7
+svn:log
+V 0
+
+K 13
+svn:mergeinfo
+V 10
+/trunk:1:2
+PROPS-END
+
+Node-path: test.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 9b43d872d923f848f999ff12f64adb67
+Text-content-sha1: c12ea8cefa562bf697f9d5e20329b695b97fcba7
+Content-length: 35
+
+PROPS-END
+This if file 'test.txt'.
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:56:45.939703Z
+K 7
+svn:log
+V 26
+Import greek tree on trunk
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 8ac35a19435b31b928de300b32e930cd
+Text-content-sha1: b3f85be4422626e7443b6ddcf60bb451dc7f0560
+Content-length: 42
+
+PROPS-END
+This is the file 'A/B/E/alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: de856b65f58804c29698ac73c77afca0
+Text-content-sha1: 9fd481a7c856d5e99c912b14df12ca436ec87ab8
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/E/beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: 74465fc532ca0c4a59782434fef78950
+Text-content-sha1: 4806e0b7d66e8de981613285ba82f1074419ac12
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Text-content-sha1: 76ca412fe5b6de13ab0ae4afa9b0c4b5722cdf41
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Text-content-sha1: c6727fc45f4251276a957ff3e4b089ee3cc89a0e
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Text-content-sha1: 25e0fbfcac72ddc25bb5c677aea6676329cb9fac
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Text-content-sha1: 9b8aba1e49297c4842b87dca754a1692d58f8c96
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Text-content-sha1: 521fe6f78f95558a5069cbc65e75185f20a5792c
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f70dc430061ce3e0ddd98783ff0b451a
+Text-content-sha1: e2ca5cc8621b52accf604fbf1ef27144bdf89542
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Text-content-sha1: 9ca20fa69244b6352a4785b7e8dc9a76bdd61fa3
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: cbf47443741fa5692f3fe90a9e6532a6
+Text-content-sha1: 3c685125ea46ecb8b1f993f328a753fb2bd75b06
+Content-length: 35
+
+PROPS-END
+This is the file 'A/mu'.
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 3
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T21:12:26.232653Z
+K 7
+svn:log
+V 29
+* trunk/A/D/H/psi: extra line
+PROPS-END
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: change
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Text-content-sha1: 14640f53cbdfb55706234cbaa2b3d244ad56c2a7
+Content-length: 48
+
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/svnsync-move-and-modify.dump b/subversion/tests/cmdline/svnsync_tests_data/svnsync-move-and-modify.dump
new file mode 100644
index 0000000..e0b89ff
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/svnsync-move-and-modify.dump
@@ -0,0 +1,288 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 3f6eb9ce-fbb3-4b51-a522-daa270b3ac27
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2008-01-02T20:33:03.966127Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:05.279494Z
+K 7
+svn:log
+V 17
+Add base folders.
+PROPS-END
+
+Node-path: project1
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: project1/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: project1/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: project1/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 110
+Content-length: 110
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:06.265712Z
+K 7
+svn:log
+V 12
+Add folders.
+PROPS-END
+
+Node-path: project1/trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: project1/trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: project1/trunk/A/B/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 3
+Prop-content-length: 106
+Content-length: 106
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:07.283685Z
+K 7
+svn:log
+V 9
+Add file.
+PROPS-END
+
+Node-path: project1/trunk/A/B/C/file.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 22
+Text-content-md5: fe0e7568a5195cc8a2d60ebf93d93c5b
+Text-content-sha1: 07788068d2ea1f14b0e77ffcd1fac64a6049ab2e
+Content-length: 32
+
+PROPS-END
+content for file.txt
+
+
+
+Revision-number: 4
+Prop-content-length: 117
+Content-length: 117
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:08.278330Z
+K 7
+svn:log
+V 19
+Add file file2.txt.
+PROPS-END
+
+Node-path: project1/trunk/A/B/C/file2.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: f3659a1913324affa5d9ddc7451ce7a8
+Text-content-sha1: f27493e177fceaf3b7d25a5f28dcc194aebca0a9
+Content-length: 33
+
+PROPS-END
+content for file2.txt
+
+
+
+Revision-number: 5
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:12.238116Z
+K 7
+svn:log
+V 29
+move project and modify file.
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 4
+Node-copyfrom-path: project1/branches
+Prop-content-length: 34
+Content-length: 34
+
+K 13
+svn:mergeinfo
+V 0
+
+PROPS-END
+
+
+Node-path: project1/tags
+Node-action: delete
+
+
+Node-path: project1/trunk
+Node-action: delete
+
+
+Node-path: project1/branches
+Node-action: delete
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 4
+Node-copyfrom-path: project1/tags
+Prop-content-length: 34
+Content-length: 34
+
+K 13
+svn:mergeinfo
+V 0
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 4
+Node-copyfrom-path: project1/trunk
+Prop-content-length: 34
+Content-length: 34
+
+K 13
+svn:mergeinfo
+V 0
+
+PROPS-END
+
+
+Node-path: trunk/A/B/C/file.txt
+Node-kind: file
+Node-action: change
+Text-content-length: 42
+Text-content-md5: c69eb9f5054332e29a5e46a80bd3980d
+Text-content-sha1: 910ccaa860667c5216776f8aa479c1177d02df6d
+Content-length: 42
+
+content for file.txt
+
+extra line of test
+
+
+
+Revision-number: 6
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2008-01-02T20:33:14.261412Z
+K 7
+svn:log
+V 17
+removed project1.
+PROPS-END
+
+Node-path: project1
+Node-action: delete
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.dump b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.dump
new file mode 100644
index 0000000..5e80293
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.dump
@@ -0,0 +1,449 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 11659000-ab01-11e1-8800-0080c8fbf679
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-12-07T20:52:50.286894Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:53:40.322712Z
+K 7
+svn:log
+V 0
+
+PROPS-END
+
+Node-path: test.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 9b43d872d923f848f999ff12f64adb67
+Content-length: 35
+
+PROPS-END
+This if file 'test.txt'.
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:56:45.939703Z
+K 7
+svn:log
+V 26
+Import greek tree on trunk
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 8ac35a19435b31b928de300b32e930cd
+Content-length: 42
+
+PROPS-END
+This is the file 'A/B/E/alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: de856b65f58804c29698ac73c77afca0
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/E/beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: 74465fc532ca0c4a59782434fef78950
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f70dc430061ce3e0ddd98783ff0b451a
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: cbf47443741fa5692f3fe90a9e6532a6
+Content-length: 35
+
+PROPS-END
+This is the file 'A/mu'.
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 3
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T21:12:26.232653Z
+K 7
+svn:log
+V 29
+* trunk/A/D/H/psi: extra line
+PROPS-END
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: change
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Content-length: 48
+
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
+Revision-number: 4
+Prop-content-length: 128
+Content-length: 128
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:25:05.711998Z
+K 7
+svn:log
+V 26
+make changes on A and iota
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: change
+Prop-content-length: 26
+Content-length: 26
+
+K 3
+foo
+V 3
+bar
+PROPS-END
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: change
+Prop-content-length: 32
+Content-length: 32
+
+K 5
+donot
+V 7
+include
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 116
+Content-length: 116
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:26:14.639561Z
+K 7
+svn:log
+V 14
+Renamed A to B
+PROPS-END
+
+Node-path: trunk/B
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 4
+Node-copyfrom-path: trunk/A
+
+
+Node-path: trunk/A
+Node-action: delete
+
+
+Revision-number: 6
+Prop-content-length: 116
+Content-length: 116
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:27:02.055549Z
+K 7
+svn:log
+V 14
+Moved B/D to A
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 5
+Node-copyfrom-path: trunk/B/D
+
+
+Node-path: trunk/B/D
+Node-action: delete
+
+
+Revision-number: 7
+Prop-content-length: 125
+Content-length: 125
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:28:07.300170Z
+K 7
+svn:log
+V 23
+Added property to trunk
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: change
+Prop-content-length: 31
+Content-length: 31
+
+K 6
+propon
+V 5
+trunk
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.expected.dump
new file mode 100644
index 0000000..d2390d9
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-A-changes.expected.dump
@@ -0,0 +1,525 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 11659000-ab01-11e1-8800-0080c8fbf679
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-12-07T20:52:50.286894Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:53:40.322712Z
+K 7
+svn:log
+V 0
+
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:56:45.939703Z
+K 7
+svn:log
+V 26
+Import greek tree on trunk
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 8ac35a19435b31b928de300b32e930cd
+Text-content-sha1: b3f85be4422626e7443b6ddcf60bb451dc7f0560
+Content-length: 42
+
+PROPS-END
+This is the file 'A/B/E/alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: de856b65f58804c29698ac73c77afca0
+Text-content-sha1: 9fd481a7c856d5e99c912b14df12ca436ec87ab8
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/E/beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: 74465fc532ca0c4a59782434fef78950
+Text-content-sha1: 4806e0b7d66e8de981613285ba82f1074419ac12
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Text-content-sha1: 76ca412fe5b6de13ab0ae4afa9b0c4b5722cdf41
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Text-content-sha1: c6727fc45f4251276a957ff3e4b089ee3cc89a0e
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Text-content-sha1: 25e0fbfcac72ddc25bb5c677aea6676329cb9fac
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Text-content-sha1: 9b8aba1e49297c4842b87dca754a1692d58f8c96
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Text-content-sha1: 521fe6f78f95558a5069cbc65e75185f20a5792c
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f70dc430061ce3e0ddd98783ff0b451a
+Text-content-sha1: e2ca5cc8621b52accf604fbf1ef27144bdf89542
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Text-content-sha1: 9ca20fa69244b6352a4785b7e8dc9a76bdd61fa3
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: cbf47443741fa5692f3fe90a9e6532a6
+Text-content-sha1: 3c685125ea46ecb8b1f993f328a753fb2bd75b06
+Content-length: 35
+
+PROPS-END
+This is the file 'A/mu'.
+
+
+Revision-number: 3
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T21:12:26.232653Z
+K 7
+svn:log
+V 29
+* trunk/A/D/H/psi: extra line
+PROPS-END
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: change
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Text-content-sha1: 14640f53cbdfb55706234cbaa2b3d244ad56c2a7
+Content-length: 48
+
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
+Revision-number: 4
+Prop-content-length: 128
+Content-length: 128
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:25:05.711998Z
+K 7
+svn:log
+V 26
+make changes on A and iota
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: change
+Prop-content-length: 26
+Content-length: 26
+
+K 3
+foo
+V 3
+bar
+PROPS-END
+
+
+Revision-number: 5
+Prop-content-length: 116
+Content-length: 116
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:26:14.639561Z
+K 7
+svn:log
+V 14
+Renamed A to B
+PROPS-END
+
+Node-path: trunk/A
+Node-action: delete
+
+
+Revision-number: 6
+Prop-content-length: 116
+Content-length: 116
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:27:02.055549Z
+K 7
+svn:log
+V 14
+Moved B/D to A
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Text-content-sha1: 76ca412fe5b6de13ab0ae4afa9b0c4b5722cdf41
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Text-content-sha1: c6727fc45f4251276a957ff3e4b089ee3cc89a0e
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Text-content-sha1: 25e0fbfcac72ddc25bb5c677aea6676329cb9fac
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Text-content-sha1: 9b8aba1e49297c4842b87dca754a1692d58f8c96
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Text-content-sha1: 521fe6f78f95558a5069cbc65e75185f20a5792c
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Text-content-sha1: 14640f53cbdfb55706234cbaa2b3d244ad56c2a7
+Content-length: 58
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
+Node-path: trunk/A/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Text-content-sha1: 9ca20fa69244b6352a4785b7e8dc9a76bdd61fa3
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Revision-number: 7
+Prop-content-length: 125
+Content-length: 125
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2007-12-09T21:28:07.300170Z
+K 7
+svn:log
+V 23
+Added property to trunk
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: change
+Prop-content-length: 31
+Content-length: 31
+
+K 6
+propon
+V 5
+trunk
+PROPS-END
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.dump b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.dump
new file mode 100644
index 0000000..a4fc271
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.dump
@@ -0,0 +1,316 @@
+SVN-fs-dump-format-version: 2
+
+UUID: a6952d52-ab01-11e1-b37f-0080c8fbf679
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-12-07T20:52:50.286894Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:53:40.322712Z
+K 7
+svn:log
+V 0
+
+PROPS-END
+
+Node-path: test.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 9b43d872d923f848f999ff12f64adb67
+Content-length: 35
+
+PROPS-END
+This if file 'test.txt'.
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:56:45.939703Z
+K 7
+svn:log
+V 26
+Import greek tree on trunk
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 8ac35a19435b31b928de300b32e930cd
+Content-length: 42
+
+PROPS-END
+This is the file 'A/B/E/alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: de856b65f58804c29698ac73c77afca0
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/E/beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: 74465fc532ca0c4a59782434fef78950
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f70dc430061ce3e0ddd98783ff0b451a
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: cbf47443741fa5692f3fe90a9e6532a6
+Content-length: 35
+
+PROPS-END
+This is the file 'A/mu'.
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 3
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T21:12:26.232653Z
+K 7
+svn:log
+V 29
+* trunk/A/D/H/psi: extra line
+PROPS-END
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: change
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Content-length: 48
+
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.expected.dump b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.expected.dump
new file mode 100644
index 0000000..8e61c40
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/svnsync-trunk-only.expected.dump
@@ -0,0 +1,317 @@
+SVN-fs-dump-format-version: 2
+
+UUID: a6952d52-ab01-11e1-b37f-0080c8fbf679
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2007-12-07T20:52:50.286894Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 97
+Content-length: 97
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:53:40.322712Z
+K 7
+svn:log
+V 0
+
+PROPS-END
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 124
+Content-length: 124
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T20:56:45.939703Z
+K 7
+svn:log
+V 26
+Import greek tree on trunk
+PROPS-END
+
+Node-path: trunk/A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 8ac35a19435b31b928de300b32e930cd
+Text-content-sha1: b3f85be4422626e7443b6ddcf60bb451dc7f0560
+Content-length: 42
+
+PROPS-END
+This is the file 'A/B/E/alpha'.
+
+
+Node-path: trunk/A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: de856b65f58804c29698ac73c77afca0
+Text-content-sha1: 9fd481a7c856d5e99c912b14df12ca436ec87ab8
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/E/beta'.
+
+
+Node-path: trunk/A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 31
+Text-content-md5: 74465fc532ca0c4a59782434fef78950
+Text-content-sha1: 4806e0b7d66e8de981613285ba82f1074419ac12
+Content-length: 41
+
+PROPS-END
+This is the file 'A/B/lambda'.
+
+
+Node-path: trunk/A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 29
+Text-content-md5: 9f6400f63c4dcd2e4f140b3eef1a323a
+Text-content-sha1: 76ca412fe5b6de13ab0ae4afa9b0c4b5722cdf41
+Content-length: 39
+
+PROPS-END
+This is the file 'A/D/G/pi'.
+
+
+Node-path: trunk/A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 14d1e17286af96fd468e68aea4b9148a
+Text-content-sha1: c6727fc45f4251276a957ff3e4b089ee3cc89a0e
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/rho'.
+
+
+Node-path: trunk/A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 2c41dafbd4d5735e1065b49b009ef0fd
+Text-content-sha1: 25e0fbfcac72ddc25bb5c677aea6676329cb9fac
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/G/tau'.
+
+
+Node-path: trunk/A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: 1d434602d24db956f1c481397340798f
+Text-content-sha1: 9b8aba1e49297c4842b87dca754a1692d58f8c96
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/chi'.
+
+
+Node-path: trunk/A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 32
+Text-content-md5: 60561894aaf380df0d211aba60149262
+Text-content-sha1: 521fe6f78f95558a5069cbc65e75185f20a5792c
+Content-length: 42
+
+PROPS-END
+This is the file 'A/D/H/omega'.
+
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f70dc430061ce3e0ddd98783ff0b451a
+Text-content-sha1: e2ca5cc8621b52accf604fbf1ef27144bdf89542
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/H/psi'.
+
+
+Node-path: trunk/A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 30
+Text-content-md5: f626c0bd25c0aed96c78917a653ed5c1
+Text-content-sha1: 9ca20fa69244b6352a4785b7e8dc9a76bdd61fa3
+Content-length: 40
+
+PROPS-END
+This is the file 'A/D/gamma'.
+
+
+Node-path: trunk/A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: cbf47443741fa5692f3fe90a9e6532a6
+Text-content-sha1: 3c685125ea46ecb8b1f993f328a753fb2bd75b06
+Content-length: 35
+
+PROPS-END
+This is the file 'A/mu'.
+
+
+Node-path: trunk/iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 3
+Prop-content-length: 127
+Content-length: 127
+
+K 10
+svn:author
+V 3
+lgo
+K 8
+svn:date
+V 27
+2007-12-07T21:12:26.232653Z
+K 7
+svn:log
+V 29
+* trunk/A/D/H/psi: extra line
+PROPS-END
+
+Node-path: trunk/A/D/H/psi
+Node-kind: file
+Node-action: change
+Text-content-length: 48
+Text-content-md5: 25969dfd7f76f630537591ab115d3188
+Text-content-sha1: 14640f53cbdfb55706234cbaa2b3d244ad56c2a7
+Content-length: 48
+
+This is the file 'A/D/H/psi'.
+Added extra line.
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/tag-empty-trunk.dump b/subversion/tests/cmdline/svnsync_tests_data/tag-empty-trunk.dump
new file mode 100644
index 0000000..381c907
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/tag-empty-trunk.dump
@@ -0,0 +1,75 @@
+SVN-fs-dump-format-version: 2
+
+UUID: c38a9451-c504-0410-b613-a343b2d5fd66
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-04T23:03:50.600647Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-04T23:04:15.060966Z
+K 7
+svn:log
+V 5
+setup
+PROPS-END
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 106
+Content-length: 106
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-04T23:04:32.222710Z
+K 7
+svn:log
+V 6
+tag it
+PROPS-END
+
+Node-path: tags/an-empty-tag
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: trunk
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-dir.dump b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-dir.dump
new file mode 100644
index 0000000..4c87b38
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-dir.dump
@@ -0,0 +1,84 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 20f4d640-c604-0410-95c8-f4ec7efce8dc
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-05T00:10:44.707628Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 105
+Content-length: 105
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-05T00:11:07.194039Z
+K 7
+svn:log
+V 5
+stuff
+PROPS-END
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk/dir
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 111
+Content-length: 111
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-05T00:11:18.910470Z
+K 7
+svn:log
+V 10
+more stuff
+PROPS-END
+
+Node-path: tags/a-tag
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: trunk
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file.dump b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file.dump
new file mode 100644
index 0000000..00c81e6
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file.dump
@@ -0,0 +1,125 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 9bc56067-ff04-0410-83a8-bf961f5bc06a
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-07T20:21:44.409773Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T20:22:08.601650Z
+K 7
+svn:log
+V 14
+initial layout
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 111
+Content-length: 111
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T20:22:25.607033Z
+K 7
+svn:log
+V 10
+add a file
+PROPS-END
+
+Node-path: trunk/foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 16
+Text-content-md5: 0bebeeefa6853d064099b54a81a627e8
+Text-content-sha1: b78087338d3daf51e667056c78fd8089bb51d5ec
+Content-length: 26
+
+PROPS-END
+this is a file.
+
+
+Revision-number: 3
+Prop-content-length: 113
+Content-length: 113
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T20:22:46.490796Z
+K 7
+svn:log
+V 12
+make a tag.
+
+PROPS-END
+
+Node-path: tags/a-tag-with-file-contents
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: trunk
+
+
+Node-path: tags/a-tag-with-file-contents/foo.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk/foo.txt
+Text-copy-source-md5: 0bebeeefa6853d064099b54a81a627e8
+Text-copy-source-sha1: b78087338d3daf51e667056c78fd8089bb51d5ec
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file2.dump b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file2.dump
new file mode 100644
index 0000000..dfba96b
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/tag-trunk-with-file2.dump
@@ -0,0 +1,116 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 47c84ee7-0005-0410-843c-81123ebdd4be
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-07T22:09:05.681726Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:09:30.282710Z
+K 7
+svn:log
+V 14
+initial layout
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 111
+Content-length: 111
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:09:44.146564Z
+K 7
+svn:log
+V 10
+add a file
+PROPS-END
+
+Node-path: trunk/foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 16
+Text-content-md5: 98475036dc73d318982805bf4b16e8b2
+Text-content-sha1: d7dff2b1ef48b9c20c23d7b3a08b557957cec3c9
+Content-length: 26
+
+PROPS-END
+This is a file.
+
+
+Revision-number: 3
+Prop-content-length: 113
+Content-length: 113
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:10:03.753668Z
+K 7
+svn:log
+V 12
+make a tag.
+
+PROPS-END
+
+Node-path: tags/a-tag
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/tag-with-modified-file.dump b/subversion/tests/cmdline/svnsync_tests_data/tag-with-modified-file.dump
new file mode 100644
index 0000000..e14cfe6
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/tag-with-modified-file.dump
@@ -0,0 +1,128 @@
+SVN-fs-dump-format-version: 2
+
+UUID: a5ac5a0e-0105-0410-a82a-edd0b5a7eba7
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2005-11-07T22:20:00.772497Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 115
+Content-length: 115
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:20:16.583093Z
+K 7
+svn:log
+V 14
+initial layout
+PROPS-END
+
+Node-path: branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 2
+Prop-content-length: 112
+Content-length: 112
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:20:39.840815Z
+K 7
+svn:log
+V 11
+add a file
+
+PROPS-END
+
+Node-path: trunk/foo.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 8
+Text-content-md5: 2973ad11c859f59a44bf3ae776a346b1
+Text-content-sha1: ac657117c81fad228ccdb8c6d0861bdafca9ba3c
+Content-length: 18
+
+PROPS-END
+a file.
+
+
+Revision-number: 3
+Prop-content-length: 126
+Content-length: 126
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2005-11-07T22:21:15.332479Z
+K 7
+svn:log
+V 25
+tag with a modified file
+
+PROPS-END
+
+Node-path: tags/a-tag-with-mods
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: trunk
+
+
+Node-path: tags/a-tag-with-mods/foo.txt
+Node-kind: file
+Node-action: change
+Text-content-length: 17
+Text-content-md5: 30ec0b35cc9c19332cbab42768cf5df6
+Text-content-sha1: c7197a5efb7afcc64f713f7718f0ab1638f2098b
+Content-length: 17
+
+a modified file.
+
+
diff --git a/subversion/tests/cmdline/svnsync_tests_data/url-encoding-bug.dump b/subversion/tests/cmdline/svnsync_tests_data/url-encoding-bug.dump
new file mode 100644
index 0000000..c4b6e8f
--- /dev/null
+++ b/subversion/tests/cmdline/svnsync_tests_data/url-encoding-bug.dump
@@ -0,0 +1,107 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 182b6a41-8020-0410-bcb2-35c7cd5e7ab9
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-10-23T20:25:32.242666Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 101
+Content-length: 101
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2006-10-23T20:26:00.932678Z
+K 7
+svn:log
+V 1
+,
+PROPS-END
+
+Node-path: foo%20bar.txt
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 15
+Text-content-md5: e58d666df0d6cc6e87a7e48440d637e9
+Text-content-sha1: ca53b1f604b633a6bc3cf75325932596efc4717f
+Content-length: 25
+
+PROPS-END
+blah blah blah
+
+
+Revision-number: 2
+Prop-content-length: 101
+Content-length: 101
+
+K 10
+svn:author
+V 6
+rooneg
+K 8
+svn:date
+V 27
+2006-10-23T20:26:12.565554Z
+K 7
+svn:log
+V 1
+,
+PROPS-END
+
+Node-path: foo bar.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: foo%20bar.txt
+Text-copy-source-md5: e58d666df0d6cc6e87a7e48440d637e9
+Text-copy-source-sha1: ca53b1f604b633a6bc3cf75325932596efc4717f
+
+
+Node-path: foo%20bar.txt
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 102
+Content-length: 102
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2006-10-23T20:52:11.378819Z
+K 7
+svn:log
+V 1
+,
+PROPS-END
+
+Node-path: foo%20bar.txt
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: foo bar.txt
+Text-copy-source-md5: e58d666df0d6cc6e87a7e48440d637e9
+Text-copy-source-sha1: ca53b1f604b633a6bc3cf75325932596efc4717f
+
+
+Node-path: foo bar.txt
+Node-action: delete
+
+
diff --git a/subversion/tests/cmdline/svntest/__init__.py b/subversion/tests/cmdline/svntest/__init__.py
new file mode 100644
index 0000000..fd15fff
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/__init__.py
@@ -0,0 +1,60 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# any bozos that do "from svntest import *" should die. export nothing
+# to the dumbasses.
+__all__ = [ ]
+
+import sys
+if sys.hexversion < 0x2070000:
+ sys.stderr.write('[SKIPPED] at least Python 2.7 is required\n')
+
+ # note: exiting is a bit harsh for a library module, but we really do
+ # require Python 2.7. this package isn't going to work otherwise.
+
+ # we're skipping this test, not failing, so exit with 0
+ sys.exit(0)
+
+try:
+ import sqlite3
+except ImportError:
+ sys.stderr.write('[SKIPPED] Python sqlite3 module required\n')
+ sys.exit(0)
+
+# don't export this name
+del sys
+
+class Failure(Exception):
+ 'Base class for exceptions that indicate test failure'
+ pass
+
+class Skip(Exception):
+ 'Base class for exceptions that indicate test was skipped'
+ pass
+
+# import in a specific order: things with the fewest circular imports first.
+from . import testcase
+from . import wc
+from . import verify
+from . import tree
+from . import sandbox
+from . import main
+from . import actions
+from . import factory
diff --git a/subversion/tests/cmdline/svntest/actions.py b/subversion/tests/cmdline/svntest/actions.py
new file mode 100644
index 0000000..8930b63
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/actions.py
@@ -0,0 +1,2355 @@
+#
+# actions.py: routines that actually run the svn client.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os, shutil, re, sys, errno
+import difflib, pprint, logging
+import xml.parsers.expat
+from xml.dom.minidom import parseString
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+
+import svntest
+from svntest import main, verify, tree, wc, sandbox
+from svntest import Failure
+
+logger = logging.getLogger()
+
+# (abbreviation)
+Item = svntest.wc.StateItem
+
+def _log_tree_state(msg, actual, subtree=""):
+ if subtree:
+ subtree += os.sep
+ o = StringIO()
+ o.write(msg + '\n')
+ tree.dump_tree_script(actual, subtree, stream=o)
+ logger.warn(o.getvalue())
+ o.close()
+
+def no_sleep_for_timestamps():
+ os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
+
+def do_sleep_for_timestamps():
+ os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
+
+def no_relocate_validation():
+ os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
+
+def do_relocate_validation():
+ os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
+
+def setup_pristine_greek_repository():
+ """Create the pristine repository and 'svn import' the greek tree"""
+
+ # these directories don't exist out of the box, so we may have to create them
+ if not os.path.exists(main.general_wc_dir):
+ os.makedirs(main.general_wc_dir)
+
+ if not os.path.exists(main.general_repo_dir):
+ os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
+
+ # If there's no pristine repos, create one.
+ if not os.path.exists(main.pristine_greek_repos_dir):
+ if main.options.fsfs_version is not None:
+ main.unpack_greek_repos(main.pristine_greek_repos_dir)
+ else:
+ main.create_repos(main.pristine_greek_repos_dir)
+
+ # if this is dav, gives us access rights to import the greek tree.
+ if main.is_ra_type_dav():
+ authz_file = os.path.join(main.work_dir, "authz")
+ main.file_write(authz_file, "[/]\n* = rw\n")
+
+ # dump the greek tree to disk.
+ main.greek_state.write_to_disk(main.greek_dump_dir)
+
+ # import the greek tree, using l:foo/p:bar
+ ### todo: svn should not be prompting for auth info when using
+ ### repositories with no auth/auth requirements
+ _, output, _ = main.run_svn(None, 'import', '-m',
+ 'Log message for revision 1.',
+ main.greek_dump_dir,
+ main.pristine_greek_repos_url)
+
+ # verify the printed output of 'svn import'.
+ lastline = output.pop().strip()
+ match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
+ if not match:
+ logger.error("import did not succeed, while creating greek repos.")
+ logger.error("The final line from 'svn import' was:")
+ logger.error(lastline)
+ sys.exit(1)
+ output_tree = wc.State.from_commit(output)
+
+ expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
+ expected_output_tree.tweak(verb='Adding',
+ contents=None)
+
+ try:
+ expected_output_tree.compare_and_display('output', output_tree)
+ except tree.SVNTreeUnequal:
+ verify.display_trees("ERROR: output of import command is unexpected.",
+ "OUTPUT TREE",
+ expected_output_tree.old_tree(),
+ output_tree.old_tree())
+ sys.exit(1)
+
+ # Finally, disallow any changes to the "pristine" repos.
+ error_msg = "Don't modify the pristine repository"
+ create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
+ create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
+ create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
+
+
+######################################################################
+
+def guarantee_empty_repository(path, minor_version):
+ """Guarantee that a local svn repository exists at PATH, containing
+ nothing."""
+
+ if path == main.pristine_greek_repos_dir:
+ logger.error("attempt to overwrite the pristine repos! Aborting.")
+ sys.exit(1)
+
+ # create an empty repository at PATH.
+ main.safe_rmtree(path)
+ main.create_repos(path, minor_version)
+
+# Used by every test, so that they can run independently of one
+# another. Every time this routine is called, it recursively copies
+# the `pristine repos' to a new location.
+# Note: make sure setup_pristine_greek_repository was called once before
+# using this function.
+def guarantee_greek_repository(path, minor_version):
+ """Guarantee that a local svn repository exists at PATH, containing
+ nothing but the greek-tree at revision 1."""
+
+ if path == main.pristine_greek_repos_dir:
+ logger.error("attempt to overwrite the pristine repos! Aborting.")
+ sys.exit(1)
+
+ # copy the pristine repository to PATH.
+ main.safe_rmtree(path)
+ if (main.options.fsfs_version is not None):
+ failed = main.unpack_greek_repos(path)
+ else:
+ failed = main.copy_repos(main.pristine_greek_repos_dir,
+ path, 1, 1, minor_version)
+ if failed:
+ logger.error("copying repository failed.")
+ sys.exit(1)
+
+ # make the repos world-writeable, for mod_dav_svn's sake.
+ main.chmod_tree(path, main.S_ALL_RW, main.S_ALL_RW)
+
+ # give the repository a unique UUID
+ run_and_verify_svnadmin([], [], 'setuuid', path)
+
+def run_and_verify_atomic_ra_revprop_change(expected_stdout,
+ expected_stderr,
+ expected_exit,
+ url, revision, propname,
+ old_propval, propval,
+ want_error):
+ """Run atomic-ra-revprop-change helper and check its output and exit code.
+ Transforms OLD_PROPVAL and PROPVAL into a skel.
+ For HTTP, the default HTTP library is used."""
+
+ KEY_OLD_PROPVAL = "old_value_p"
+ KEY_NEW_PROPVAL = "value"
+
+ def skel_make_atom(word):
+ return "%d %s" % (len(word), word)
+
+ def make_proplist_skel_part(nick, val):
+ if val is None:
+ return ""
+ else:
+ return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
+
+ skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
+ make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
+
+ exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
+ propname, skel,
+ want_error)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnlook(expected_stdout,
+ expected_stderr, *varargs):
+ """Like run_and_verify_svnlook2, but the expected exit code is
+ assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnlook2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnlook2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnlook command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnlook(*varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnadmin(expected_stdout,
+ expected_stderr, *varargs):
+ """Like run_and_verify_svnadmin2, but the expected exit code is
+ assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnadmin2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnadmin2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnadmin command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnadmin(*varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnfsfs(expected_stdout,
+ expected_stderr, *varargs):
+ """Like run_and_verify_svnfsfs2, but the expected exit code is
+ assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnfsfs2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnfsfs2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnfsfs command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnfsfs(*varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnversion(wc_dir, trail_url,
+ expected_stdout, expected_stderr, *varargs):
+ """like run_and_verify_svnversion2, but the expected exit code is
+ assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnversion2(wc_dir, trail_url,
+ expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnversion2(wc_dir, trail_url,
+ expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnversion command and check its output and exit code."""
+
+ if trail_url is None:
+ exit_code, out, err = main.run_svnversion(wc_dir, *varargs)
+ else:
+ exit_code, out, err = main.run_svnversion(wc_dir, trail_url, *varargs)
+
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+def run_and_verify_svn(expected_stdout, expected_stderr, *varargs):
+ """like run_and_verify_svn2, but the expected exit code is assumed to
+ be 0 if no output is expected on stderr, and 1 otherwise."""
+
+ expected_exit = 0
+ if expected_stderr is not None:
+ if isinstance(expected_stderr, verify.ExpectedOutput):
+ if not expected_stderr.matches([]):
+ expected_exit = 1
+ elif expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svn2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svn2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout,
+ stderr as lists of lines (including line terminators). For both
+ EXPECTED_STDOUT and EXPECTED_STDERR, create an appropriate instance of
+ verify.ExpectedOutput (if necessary):
+
+ - If it is an array of strings, create a vanilla ExpectedOutput.
+
+ - If it is a single string, create a RegexOutput that must match every
+ line (for stdout) or any line (for stderr) of the expected output.
+
+ - If it is already an instance of ExpectedOutput
+ (e.g. UnorderedOutput), leave it alone.
+
+ ...and invoke compare_and_display_lines() on a label based
+ on the name of the stream being compared (e.g. STDOUT), the
+ ExpectedOutput instance, and the actual output.
+
+ If EXPECTED_STDOUT is None, do not check stdout.
+ EXPECTED_STDERR may not be None.
+
+ If output checks pass, the expected and actual codes are compared.
+
+ If a comparison fails, a Failure will be raised."""
+
+ if expected_stderr is None:
+ raise verify.SVNIncorrectDatatype("expected_stderr must not be None")
+
+ want_err = None
+ if isinstance(expected_stderr, verify.ExpectedOutput):
+ if not expected_stderr.matches([]):
+ want_err = True
+ elif expected_stderr != []:
+ want_err = True
+
+ exit_code, out, err = main.run_svn(want_err, *varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+def run_and_verify_load(repo_dir, dump_file_content,
+ bypass_prop_validation = False,
+ normalize_props = False):
+ "Runs 'svnadmin load' and reports any errors."
+ if not isinstance(dump_file_content, list):
+ raise TypeError("dump_file_content argument should have list type")
+ expected_stderr = []
+ args = ()
+ if bypass_prop_validation:
+ args += ('--bypass-prop-validation',)
+ if normalize_props:
+ args += ('--normalize-props',)
+ main.run_command_stdin(
+ main.svnadmin_binary, expected_stderr, 0, True, dump_file_content,
+ 'load', '--force-uuid', '--quiet', repo_dir, *args)
+
+
+def run_and_verify_dump(repo_dir, deltas=False):
+ "Runs 'svnadmin dump' and reports any errors, returning the dump content."
+ args = ()
+ if deltas:
+ args += ('--deltas',)
+ exit_code, output, errput = run_and_verify_svnadmin(
+ verify.AnyOutput, [],
+ 'dump', '--quiet', repo_dir, *args)
+ return output
+
+
+def run_and_verify_svnrdump(dumpfile_content, expected_stdout,
+ expected_stderr, expected_exit, *varargs):
+ """Runs 'svnrdump dump|load' depending on dumpfile_content and
+ reports any errors."""
+ exit_code, output, err = main.run_svnrdump(dumpfile_content, *varargs)
+
+ # Since main.run_svnrdump() uses binary mode, normalize the stderr
+ # line endings on Windows ourselves.
+ if sys.platform == 'win32':
+ err = map(lambda x : x.replace('\r\n', '\n'), err)
+
+ # Ignore "consider upgrade" warnings to allow regression tests to pass
+ # when run against a 1.6 mod_dav_svn.
+ for index, line in enumerate(err[:]):
+ if re.search("warning: W200007", line):
+ del err[index]
+
+ verify.verify_outputs("Unexpected output", output, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return output
+
+
+def run_and_verify_svnmover(expected_stdout, expected_stderr,
+ *varargs):
+ """Run svnmover command and check its output"""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnmover2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnmover2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnmover command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnmover(*varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnmucc(expected_stdout, expected_stderr,
+ *varargs):
+ """Run svnmucc command and check its output"""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnmucc2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnmucc2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnmucc command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnmucc(*varargs)
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def run_and_verify_svnsync(expected_stdout, expected_stderr,
+ *varargs):
+ """Run svnsync command and check its output"""
+
+ expected_exit = 0
+ if expected_stderr is not None and expected_stderr != []:
+ expected_exit = 1
+ return run_and_verify_svnsync2(expected_stdout, expected_stderr,
+ expected_exit, *varargs)
+
+def run_and_verify_svnsync2(expected_stdout, expected_stderr,
+ expected_exit, *varargs):
+ """Run svnmucc command and check its output and exit code."""
+
+ exit_code, out, err = main.run_svnsync(*varargs)
+
+ # Ignore "consider upgrade" warnings to allow regression tests to pass
+ # when run against a 1.6 mod_dav_svn.
+ for index, line in enumerate(err[:]):
+ if re.search("warning: W200007", line):
+ del err[index]
+
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+
+def load_repo(sbox, dumpfile_path = None, dump_str = None,
+ bypass_prop_validation = False, create_wc=True,
+ normalize_props = False):
+ "Loads the dumpfile into sbox"
+ if not dump_str:
+ dump_str = open(dumpfile_path, "rb").read()
+
+ # Create a virgin repos and working copy
+ main.safe_rmtree(sbox.repo_dir, 1)
+ main.safe_rmtree(sbox.wc_dir, 1)
+ main.create_repos(sbox.repo_dir)
+
+ # Load the mergetracking dumpfile into the repos, and check it out the repo
+ run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True),
+ bypass_prop_validation, normalize_props)
+ if create_wc:
+ run_and_verify_svn(None, [], "co", sbox.repo_url, sbox.wc_dir)
+
+ return dump_str
+
+def expected_noop_update_output(rev):
+ """Return an ExpectedOutput object describing what we'd expect to
+ see from an update to revision REV that was effectively a no-op (no
+ server changes transmitted)."""
+ return verify.createExpectedOutput("Updating '.*':|At revision %d."
+ % (rev),
+ "no-op update")
+
+def run_and_verify_svnauthz(expected_stdout, expected_stderr,
+ expected_exit, compat_mode, *varargs):
+ """Run svnauthz command and check its output and exit code.
+ If COMPAT_MODE is True then run the command in pre-1.8
+ compatibility mode"""
+
+ if compat_mode:
+ exit_code, out, err = main.run_svnauthz_validate(*varargs)
+ else:
+ exit_code, out, err = main.run_svnauthz(*varargs)
+
+ verify.verify_outputs("Unexpected output", out, err,
+ expected_stdout, expected_stderr)
+ verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
+ return exit_code, out, err
+
+######################################################################
+# Subversion Actions
+#
+# These are all routines that invoke 'svn' in particular ways, and
+# then verify the results by comparing expected trees with actual
+# trees.
+#
+
+
+def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree,
+ expected_stderr=[],
+ *args, **kw):
+ """Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any
+ extra optional args to the checkout subcommand.
+
+ The subcommand output will be verified against OUTPUT_TREE,
+ and the working copy itself will be verified against DISK_TREE.
+ For the latter comparison, SINGLETON_HANDLER_A and
+ SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
+ function's doc string for more details. Return if successful, raise
+ on failure.
+
+ WC_DIR_NAME is deleted if DO_REMOVE is True.
+ """
+
+ if isinstance(output_tree, wc.State):
+ output_tree = output_tree.old_tree()
+
+ # Checkout and make a tree of the output, using l:foo/p:bar
+ ### todo: svn should not be prompting for auth info when using
+ ### repositories with no auth/auth requirements
+ exit_code, output, errput = run_and_verify_svn(None, expected_stderr,
+ 'co', URL, wc_dir_name,
+ *args)
+ actual = tree.build_tree_from_checkout(output)
+
+ # Verify actual output against expected output.
+ try:
+ tree.compare_trees("output", actual, output_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL OUTPUT TREE:", actual, wc_dir_name)
+ raise
+
+ if disk_tree:
+ verify_disk(wc_dir_name, disk_tree, False, **kw)
+
+def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree,
+ *args):
+ """Same as run_and_verify_export2 but with KEEP_EOL_STYLE set to False."""
+
+ run_and_verify_export2(URL, export_dir_name, output_tree, disk_tree,
+ False, *args)
+
+def run_and_verify_export2(URL, export_dir_name, output_tree, disk_tree,
+ keep_eol_style=False, *args):
+ """Export the URL into a new directory WC_DIR_NAME.
+
+ The subcommand output will be verified against OUTPUT_TREE,
+ and the exported copy itself will be verified against DISK_TREE.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Return if successful, raise on failure.
+ """
+ assert isinstance(output_tree, wc.State)
+ assert isinstance(disk_tree, wc.State)
+
+ disk_tree = disk_tree.old_tree()
+ output_tree = output_tree.old_tree()
+
+ # Export and make a tree of the output, using l:foo/p:bar
+ ### todo: svn should not be prompting for auth info when using
+ ### repositories with no auth/auth requirements
+ exit_code, output, errput = main.run_svn(None, 'export',
+ URL, export_dir_name, *args)
+ actual = tree.build_tree_from_checkout(output)
+
+ # Verify actual output against expected output.
+ try:
+ tree.compare_trees("output", actual, output_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL OUTPUT TREE:", actual, export_dir_name)
+ raise
+
+ # Create a tree by scanning the working copy. Don't ignore
+ # the .svn directories so that we generate an error if they
+ # happen to show up.
+ actual = tree.build_tree_from_wc(export_dir_name, ignore_svn=False,
+ keep_eol_style=keep_eol_style)
+
+ # Verify expected disk against actual disk.
+ try:
+ tree.compare_trees("disk", actual, disk_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL DISK TREE:", actual, export_dir_name)
+ raise
+
+
+# run_and_verify_log_xml
+
+class LogEntry:
+ def __init__(self, revision, attributes=None,
+ changed_paths=None, revprops=None):
+ self.revision = revision
+ if attributes == None:
+ self.attributes = {}
+ else:
+ self.attributes = attributes
+ if changed_paths == None:
+ self.changed_paths = {}
+ else:
+ self.changed_paths = changed_paths
+ if revprops == None:
+ self.revprops = {}
+ else:
+ self.revprops = revprops
+
+ def assert_log_attrs(self, attributes):
+ """Assert that attributes is the same as this entry's attributes
+ Raises svntest.Failure if not.
+ """
+ if self.attributes != attributes:
+ raise Failure('\n' + '\n'.join(difflib.ndiff(
+ pprint.pformat(attributes).splitlines(),
+ pprint.pformat(self.attributes).splitlines())))
+
+ def assert_changed_paths(self, changed_paths):
+ """Assert that changed_paths is the same as this entry's changed_paths
+ Raises svntest.Failure if not.
+ """
+ if self.changed_paths != changed_paths:
+ raise Failure('\n' + '\n'.join(difflib.ndiff(
+ pprint.pformat(changed_paths).splitlines(),
+ pprint.pformat(self.changed_paths).splitlines())))
+
+ def assert_revprops(self, revprops):
+ """Assert that the dict revprops is the same as this entry's revprops.
+
+ Raises svntest.Failure if not.
+ """
+ if self.revprops != revprops:
+ raise Failure('\n' + '\n'.join(difflib.ndiff(
+ pprint.pformat(revprops).splitlines(),
+ pprint.pformat(self.revprops).splitlines())))
+
+class LogParser:
+ def parse(self, data):
+ """Return a list of LogEntrys parsed from the sequence of strings data.
+
+ This is the only method of interest to callers.
+ """
+ try:
+ for i in data:
+ self.parser.Parse(i)
+ self.parser.Parse('', True)
+ except xml.parsers.expat.ExpatError as e:
+ raise verify.SVNUnexpectedStdout('%s\n%s\n' % (e, ''.join(data),))
+ return self.entries
+
+ def __init__(self):
+ # for expat
+ self.parser = xml.parsers.expat.ParserCreate()
+ self.parser.StartElementHandler = self.handle_start_element
+ self.parser.EndElementHandler = self.handle_end_element
+ self.parser.CharacterDataHandler = self.handle_character_data
+ # Ignore some things.
+ self.ignore_elements('log', 'paths', 'revprops')
+ self.ignore_tags('logentry_end', 'author_start', 'date_start', 'msg_start')
+ # internal state
+ self.cdata = []
+ self.property = None
+ self.kind = None
+ self.action = None
+ # the result
+ self.entries = []
+
+ def ignore(self, *args, **kwargs):
+ del self.cdata[:]
+ def ignore_tags(self, *args):
+ for tag in args:
+ setattr(self, tag, self.ignore)
+ def ignore_elements(self, *args):
+ for element in args:
+ self.ignore_tags(element + '_start', element + '_end')
+
+ # expat handlers
+ def handle_start_element(self, name, attrs):
+ getattr(self, name + '_start')(attrs)
+ def handle_end_element(self, name):
+ getattr(self, name + '_end')()
+ def handle_character_data(self, data):
+ self.cdata.append(data)
+
+ # element handler utilities
+ def use_cdata(self):
+ result = ''.join(self.cdata).strip()
+ del self.cdata[:]
+ return result
+ def svn_prop(self, name):
+ self.entries[-1].revprops['svn:' + name] = self.use_cdata()
+
+ # element handlers
+ def logentry_start(self, attrs):
+ self.entries.append(LogEntry(int(attrs['revision']), attrs))
+ def author_end(self):
+ self.svn_prop('author')
+ def msg_end(self):
+ self.svn_prop('log')
+ def date_end(self):
+ # svn:date could be anything, so just note its presence.
+ self.cdata[:] = ['']
+ self.svn_prop('date')
+ def property_start(self, attrs):
+ self.property = attrs['name']
+ def property_end(self):
+ self.entries[-1].revprops[self.property] = self.use_cdata()
+ def path_start(self, attrs):
+ self.kind = attrs['kind']
+ self.action = attrs['action']
+ def path_end(self):
+ self.entries[-1].changed_paths[self.use_cdata()] = [{'kind': self.kind,
+ 'action': self.action}]
+
+def run_and_verify_log_xml(expected_log_attrs=None,
+ expected_paths=None, expected_revprops=None,
+ expected_stdout=None, expected_stderr=None,
+ args=[]):
+ """Call run_and_verify_svn with log --xml and args (optional) as command
+ arguments, and pass along expected_stdout, and expected_stderr.
+
+ expected_paths checking is not yet implemented.
+
+ expected_log_attrs is an optional list of dicts, compared to each revisions's
+ logentry attributes. The list must be in the same order the log entries
+ come in.
+
+ expected_revprops is an optional list of dicts, compared to each
+ revision's revprops. The list must be in the same order the log entries
+ come in. Any svn:date revprops in the dicts must be '' in order to
+ match, as the actual dates could be anything.
+
+ expected_paths and expected_revprops are ignored if expected_stdout or
+ expected_stderr is specified.
+ """
+ message = ' '.join(args)
+
+ # We'll parse the output unless the caller specifies expected_stderr or
+ # expected_stdout for run_and_verify_svn.
+ parse = True
+ if expected_stderr == None:
+ expected_stderr = []
+ else:
+ parse = False
+ if expected_stdout != None:
+ parse = False
+
+ log_args = list(args)
+ if expected_paths != None:
+ log_args.append('-v')
+
+ (exit_code, stdout, stderr) = run_and_verify_svn(
+ expected_stdout, expected_stderr,
+ 'log', '--xml', *log_args)
+ if not parse:
+ return
+
+ entries = LogParser().parse(stdout)
+ for index in range(len(entries)):
+ entry = entries[index]
+ if expected_revprops != None:
+ entry.assert_revprops(expected_revprops[index])
+ if expected_paths != None:
+ entry.assert_changed_paths(expected_paths[index])
+ if expected_log_attrs != None:
+ entry.assert_log_attrs(expected_log_attrs[index])
+
+
+def verify_update(actual_output,
+ actual_mergeinfo_output,
+ actual_elision_output,
+ wc_dir_name,
+ output_tree,
+ mergeinfo_output_tree,
+ elision_output_tree,
+ disk_tree,
+ status_tree,
+ check_props=False,
+ keep_eol_style=False,
+ extra_files=None):
+ """Verify update of WC_DIR_NAME.
+
+ The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT,
+ and ACTUAL_ELISION_OUTPUT) will be verified against OUTPUT_TREE,
+ MERGEINFO_OUTPUT_TREE, and ELISION_OUTPUT_TREE respectively (if any of
+ these is provided, they may be None in which case a comparison is not
+ done). The working copy itself will be verified against DISK_TREE (if
+ provided), and the working copy's 'svn status' output will be verified
+ against STATUS_TREE (if provided). (This is a good way to check that
+ revision numbers were bumped.)
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Return if successful, raise on failure.
+
+ For the comparison with DISK_TREE, pass SINGLETON_HANDLER_A and
+ SINGLETON_HANDLER_B to tree.compare_trees -- see that function's doc
+ string for more details. If CHECK_PROPS is set, then disk
+ comparison will examine props."""
+
+ if isinstance(actual_output, wc.State):
+ actual_output = actual_output.old_tree()
+ if isinstance(actual_mergeinfo_output, wc.State):
+ actual_mergeinfo_output = actual_mergeinfo_output.old_tree()
+ if isinstance(actual_elision_output, wc.State):
+ actual_elision_output = actual_elision_output.old_tree()
+ if isinstance(output_tree, wc.State):
+ output_tree = output_tree.old_tree()
+ if isinstance(mergeinfo_output_tree, wc.State):
+ mergeinfo_output_tree = mergeinfo_output_tree.old_tree()
+ if isinstance(elision_output_tree, wc.State):
+ elision_output_tree = elision_output_tree.old_tree()
+
+ # Verify actual output against expected output.
+ if output_tree:
+ try:
+ tree.compare_trees("output", actual_output, output_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL OUTPUT TREE:", actual_output, wc_dir_name)
+ raise
+
+ # Verify actual mergeinfo recording output against expected output.
+ if mergeinfo_output_tree:
+ try:
+ tree.compare_trees("mergeinfo_output", actual_mergeinfo_output,
+ mergeinfo_output_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL MERGEINFO OUTPUT TREE:", actual_mergeinfo_output,
+ wc_dir_name)
+ raise
+
+ # Verify actual mergeinfo elision output against expected output.
+ if elision_output_tree:
+ try:
+ tree.compare_trees("elision_output", actual_elision_output,
+ elision_output_tree)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL ELISION OUTPUT TREE:", actual_elision_output,
+ wc_dir_name)
+ raise
+
+ # Create a tree by scanning the working copy, and verify it
+ if disk_tree:
+ verify_disk(wc_dir_name, disk_tree, check_props,
+ extra_files, keep_eol_style)
+
+ # Verify via 'status' command too, if possible.
+ if status_tree:
+ run_and_verify_status(wc_dir_name, status_tree)
+
+
+def verify_disk(wc_dir_name, disk_tree, check_props=False,
+ extra_files=None, keep_eol_style=False):
+ """Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set,
+ the comparison will examin props.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Returns if successful, raises on failure."""
+
+ singleton_handler_a = None
+ a_baton = None,
+ singleton_handler_b = None
+ b_baton = None
+ done_a = None
+
+ if extra_files:
+ singleton_handler_a = svntest.tree.detect_conflict_files
+ done_a = svntest.tree.detect_conflict_files_done
+ a_baton = extra_files
+
+ if isinstance(disk_tree, wc.State):
+ disk_tree = disk_tree.old_tree()
+
+ actual_disk = tree.build_tree_from_wc(wc_dir_name, check_props,
+ keep_eol_style=keep_eol_style)
+ try:
+ tree.compare_trees("disk", actual_disk, disk_tree,
+ singleton_handler_a, a_baton,
+ singleton_handler_b, b_baton)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("EXPECTED DISK TREE:", disk_tree)
+ _log_tree_state("ACTUAL DISK TREE:", actual_disk)
+ raise
+
+ if done_a:
+ done_a(a_baton)
+
+
+def run_and_verify_update(wc_dir_name,
+ output_tree, disk_tree, status_tree,
+ expected_stderr=[],
+ check_props = False,
+ *args, **kw):
+ """Same as run_and_verify_update2 but with keep_eol_style set to False."""
+ run_and_verify_update2(wc_dir_name,
+ output_tree, disk_tree, status_tree,
+ expected_stderr,
+ check_props,
+ False,
+ *args, **kw)
+
+
+def run_and_verify_update2(wc_dir_name,
+ output_tree, disk_tree, status_tree,
+ expected_stderr=[],
+ check_props = False,
+ keep_eol_style = False,
+ *args, **kw):
+
+ """Update WC_DIR_NAME. *ARGS are any extra optional args to the
+ update subcommand. NOTE: If *ARGS is specified at all, explicit
+ target paths must be passed in *ARGS as well (or a default `.' will
+ be chosen by the 'svn' binary). This allows the caller to update
+ many items in a single working copy dir, but still verify the entire
+ working copy dir.
+
+ If ERROR_RE_STRING, the update must exit with error, and the error
+ message must match regular expression ERROR_RE_STRING.
+
+ If OUTPUT_TREE is not None, the subcommand output will be verified
+ against OUTPUT_TREE. If DISK_TREE is not None, the working copy
+ itself will be verified against DISK_TREE. If STATUS_TREE is not
+ None, the 'svn status' output will be verified against STATUS_TREE.
+ (This is a good way to check that revision numbers were bumped.)
+
+ If CHECK_PROPS is set, then disk comparison will examine props.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Return if successful, raise on failure."""
+
+ # Update and make a tree of the output.
+ if len(args) == 0:
+ args = (wc_dir_name,)
+
+ exit_code, output, errput = run_and_verify_svn(None, expected_stderr, 'up', *args)
+
+ actual = wc.State.from_checkout(output)
+ verify_update(actual, None, None, wc_dir_name,
+ output_tree, None, None, disk_tree, status_tree,
+ check_props, keep_eol_style=keep_eol_style, **kw)
+
+
+def run_and_parse_info(*args):
+ """Run 'svn info ARGS' and parse its output into a list of dicts,
+ one dict per reported node."""
+
+ # the returned array
+ all_infos = []
+
+ # per-target variables
+ iter_info = {}
+ prev_key = None
+ lock_comment_lines = 0
+ lock_comments = []
+
+ exit_code, output, errput = main.run_svn(None, 'info', *args)
+
+ for line in output:
+ line = line[:-1] # trim '\n'
+
+ if lock_comment_lines > 0:
+ # mop up any lock comment lines
+ lock_comments.append(line)
+ lock_comment_lines = lock_comment_lines - 1
+ if lock_comment_lines == 0:
+ iter_info[prev_key] = lock_comments
+ elif len(line) == 0:
+ # separator line between items
+ all_infos.append(iter_info)
+ iter_info = {}
+ prev_key = None
+ lock_comment_lines = 0
+ lock_comments = []
+ elif line[0].isspace():
+ # continuation line (for tree conflicts)
+ iter_info[prev_key] += line[1:]
+ else:
+ # normal line
+ key, value = line.split(':', 1)
+
+ if re.search(' \(\d+ lines?\)$', key):
+ # numbered continuation lines
+ match = re.match('^(.*) \((\d+) lines?\)$', key)
+ key = match.group(1)
+ lock_comment_lines = int(match.group(2))
+ elif len(value) > 1:
+ # normal normal line
+ iter_info[key] = value[1:]
+ else:
+ ### originally added for "Tree conflict:\n" lines;
+ ### tree-conflicts output format has changed since then
+ # continuation lines are implicit (prefixed by whitespace)
+ iter_info[key] = ''
+ prev_key = key
+
+ return all_infos
+
+def run_and_verify_info(expected_infos, *args):
+ """Run 'svn info' with the arguments in *ARGS and verify the results
+ against expected_infos. The latter should be a list of dicts, one dict
+ per reported node, in the order in which the 'Path' fields of the output
+ will appear after sorting them as Python strings. (The dicts in
+ EXPECTED_INFOS, however, need not have a 'Path' key.)
+
+ In the dicts, each key is the before-the-colon part of the 'svn info' output,
+ and each value is either None (meaning that the key should *not* appear in
+ the 'svn info' output) or a regex matching the output value. Output lines
+ not matching a key in the dict are ignored.
+
+ Return if successful, raise on failure."""
+
+ actual_infos = run_and_parse_info(*args)
+ actual_infos.sort(key=lambda info: info['Path'])
+
+ try:
+ # zip() won't complain, so check this manually
+ if len(actual_infos) != len(expected_infos):
+ raise verify.SVNUnexpectedStdout(
+ "Expected %d infos, found %d infos"
+ % (len(expected_infos), len(actual_infos)))
+
+ for actual, expected in zip(actual_infos, expected_infos):
+ # compare dicts
+ path = actual['Path']
+ for key, value in expected.items():
+ assert ':' not in key # caller passed impossible expectations?
+ if value is None and key in actual:
+ raise main.SVNLineUnequal("On '%s': Found unexpected key '%s'\n Value '%s'"
+ % (path, key, actual[key]))
+ if value is not None and key not in actual:
+ raise main.SVNLineUnequal("On '%s': Expected key '%s' not found\n Expected value '%s'"
+ % (path, key, value))
+ if value is not None and not re.match(value, actual[key]):
+ raise verify.SVNUnexpectedStdout("On '%s': Values of key '%s' don't match:\n"
+ " Expected: '%s' (regex)\n"
+ " Found: '%s' (string)\n"
+ % (path, key, value, actual[key]))
+
+ except:
+ sys.stderr.write("Bad 'svn info' output:\n"
+ " Received: %s\n"
+ " Expected: %s\n"
+ % (actual_infos, expected_infos))
+ raise
+
+def run_and_verify_merge(dir, rev1, rev2, url1, url2,
+ output_tree,
+ mergeinfo_output_tree,
+ elision_output_tree,
+ disk_tree, status_tree, skip_tree,
+ expected_stderr = [],
+ check_props = False,
+ dry_run = True,
+ *args, **kw):
+ """Same as run_and_verify_merge2 but with keep_eol_style set to False. """
+
+ run_and_verify_merge2(dir, rev1, rev2, url1, url2,
+ output_tree,
+ mergeinfo_output_tree,
+ elision_output_tree,
+ disk_tree, status_tree, skip_tree,
+ expected_stderr,
+ check_props,
+ dry_run,
+ False,
+ *args, **kw)
+
+def run_and_verify_merge2(dir, rev1, rev2, url1, url2,
+ output_tree,
+ mergeinfo_output_tree,
+ elision_output_tree,
+ disk_tree, status_tree, skip_tree,
+ expected_stderr = [],
+ check_props = False,
+ dry_run = True,
+ keep_eol_style = False,
+ *args, **kw):
+ """Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None
+ (for a three-way merge between URLs and WC).
+
+ If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1
+ and REV2 are None, leave off the '-r' argument.
+
+ The subcommand output will be verified against OUTPUT_TREE. Output
+ related to mergeinfo notifications will be verified against
+ MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo
+ elision will be verified against ELISION_OUTPUT_TREE if that is not None.
+ The working copy itself will be verified against DISK_TREE. If optional
+ STATUS_TREE is given, then 'svn status' output will be compared. The
+ 'skipped' merge output will be compared to SKIP_TREE.
+
+ For the DISK_TREE verification, SINGLETON_HANDLER_A and
+ SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
+ function's doc string for more details.
+
+ If CHECK_PROPS is set, then disk comparison will examine props.
+
+ If DRY_RUN is set then a --dry-run merge will be carried out first and
+ the output compared with that of the full merge.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Return if successful, raise on failure.
+
+ *ARGS are any extra optional args to the merge subcommand.
+ NOTE: If *ARGS is specified at all, an explicit target path must be passed
+ in *ARGS as well. This allows the caller to merge into single items inside
+ the working copy, but still verify the entire working copy dir. """
+
+ merge_command = [ "merge" ]
+ if url2:
+ merge_command.extend((url1 + "@" + str(rev1), url2 + "@" + str(rev2)))
+ else:
+ if not (rev1 is None and rev2 is None):
+ merge_command.append("-r" + str(rev1) + ":" + str(rev2))
+ merge_command.append(url1)
+ if len(args) == 0:
+ merge_command.append(dir)
+ merge_command = tuple(merge_command)
+
+ if dry_run:
+ pre_disk = tree.build_tree_from_wc(dir, keep_eol_style=keep_eol_style)
+ dry_run_command = merge_command + ('--dry-run',)
+ dry_run_command = dry_run_command + args
+ exit_code, out_dry, err_dry = run_and_verify_svn(None, expected_stderr,
+ *dry_run_command)
+ post_disk = tree.build_tree_from_wc(dir, keep_eol_style=keep_eol_style)
+ try:
+ tree.compare_trees("disk", post_disk, pre_disk)
+ except tree.SVNTreeError:
+ logger.warn("=============================================================")
+ logger.warn("Dry-run merge altered working copy")
+ logger.warn("=============================================================")
+ raise
+
+
+ # Update and make a tree of the output.
+ merge_command = merge_command + args
+ exit_code, out, err = run_and_verify_svn(None, expected_stderr, *merge_command)
+
+ # Split the output into that related to application of the actual diff
+ # and that related to the recording of mergeinfo describing the merge.
+ merge_diff_out = []
+ mergeinfo_notification_out = []
+ mergeinfo_elision_out = []
+ mergeinfo_notifications = False
+ elision_notifications = False
+ for line in out:
+ if line.startswith('--- Recording'):
+ mergeinfo_notifications = True
+ elision_notifications = False
+ elif line.startswith('--- Eliding'):
+ mergeinfo_notifications = False
+ elision_notifications = True
+ elif line.startswith('--- Merging') or \
+ line.startswith('--- Reverse-merging') or \
+ line.startswith('Summary of conflicts') or \
+ line.startswith('Skipped missing target'):
+ mergeinfo_notifications = False
+ elision_notifications = False
+
+ if mergeinfo_notifications:
+ mergeinfo_notification_out.append(line)
+ elif elision_notifications:
+ mergeinfo_elision_out.append(line)
+ else:
+ merge_diff_out.append(line)
+
+ if dry_run and merge_diff_out != out_dry:
+ # Due to the way ra_serf works, it's possible that the dry-run and
+ # real merge operations did the same thing, but the output came in
+ # a different order. Let's see if maybe that's the case by comparing
+ # the outputs as unordered sets rather than as lists.
+ #
+ # This now happens for other RA layers with modern APR because the
+ # hash order now varies.
+ #
+ # The different orders of the real and dry-run merges may cause
+ # the "Merging rX through rY into" lines to be duplicated a
+ # different number of times in the two outputs. The list-set
+ # conversion removes duplicates so these differences are ignored.
+ # It also removes "U some/path" duplicate lines. Perhaps we
+ # should avoid that?
+ out_copy = set(merge_diff_out[:])
+ out_dry_copy = set(out_dry[:])
+
+ if out_copy != out_dry_copy:
+ logger.warn("=============================================================")
+ logger.warn("Merge outputs differ")
+ logger.warn("The dry-run merge output:")
+ for x in out_dry:
+ logger.warn(x)
+ logger.warn("The full merge output:")
+ for x in merge_diff_out:
+ logger.warn(x)
+ logger.warn("=============================================================")
+ raise main.SVNUnmatchedError
+
+ def missing_skip(a, b):
+ logger.warn("=============================================================")
+ logger.warn("Merge failed to skip: %s", a.path)
+ logger.warn("=============================================================")
+ raise Failure
+ def extra_skip(a, b):
+ logger.warn("=============================================================")
+ logger.warn("Merge unexpectedly skipped: %s", a.path)
+ logger.warn("=============================================================")
+ raise Failure
+
+ myskiptree = tree.build_tree_from_skipped(out)
+ if isinstance(skip_tree, wc.State):
+ skip_tree = skip_tree.old_tree()
+ try:
+ tree.compare_trees("skip", myskiptree, skip_tree,
+ extra_skip, None, missing_skip, None)
+ except tree.SVNTreeUnequal:
+ _log_tree_state("ACTUAL SKIP TREE:", myskiptree, dir)
+ raise
+
+ actual_diff = svntest.wc.State.from_checkout(merge_diff_out, False)
+ actual_mergeinfo = svntest.wc.State.from_checkout(mergeinfo_notification_out,
+ False)
+ actual_elision = svntest.wc.State.from_checkout(mergeinfo_elision_out,
+ False)
+ verify_update(actual_diff, actual_mergeinfo, actual_elision, dir,
+ output_tree, mergeinfo_output_tree, elision_output_tree,
+ disk_tree, status_tree,
+ check_props, keep_eol_style=keep_eol_style, **kw)
+
+
+def run_and_verify_patch(dir, patch_path,
+ output_tree, disk_tree, status_tree, skip_tree,
+ error_re_string=None,
+ check_props=False,
+ dry_run=True,
+ *args, **kw):
+ """Same as run_and_verify_patch2 but with KEEP_EOL_STYLE set to False."""
+
+ run_and_verify_patch2(dir, patch_path,
+ output_tree, disk_tree, status_tree, skip_tree,
+ error_re_string,
+ check_props,
+ dry_run,
+ False,
+ *args, **kw)
+
+def run_and_verify_patch2(dir, patch_path,
+ output_tree, disk_tree, status_tree, skip_tree,
+ error_re_string=None,
+ check_props=False,
+ dry_run=True,
+ keep_eol_style=False,
+ *args, **kw):
+ """Run 'svn patch patch_path DIR'.
+
+ If ERROR_RE_STRING, 'svn patch' must exit with error, and the error
+ message must match regular expression ERROR_RE_STRING.
+
+ The subcommand output will be verified against OUTPUT_TREE, and the
+ working copy itself will be verified against DISK_TREE. If optional
+ STATUS_TREE is given, then 'svn status' output will be compared.
+ The 'skipped' merge output will be compared to SKIP_TREE.
+
+ If CHECK_PROPS is set, then disk comparison will examine props.
+
+ If DRY_RUN is set then a --dry-run patch will be carried out first and
+ the output compared with that of the full patch application.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+
+ Returns if successful, raises on failure."""
+ patch_command = [ "patch" ]
+ patch_command.append(patch_path)
+ patch_command.append(dir)
+ patch_command = tuple(patch_command)
+
+ if dry_run:
+ pre_disk = tree.build_tree_from_wc(dir, keep_eol_style=keep_eol_style)
+ dry_run_command = patch_command + ('--dry-run',)
+ dry_run_command = dry_run_command + args
+ exit_code, out_dry, err_dry = main.run_svn(error_re_string,
+ *dry_run_command)
+ post_disk = tree.build_tree_from_wc(dir, keep_eol_style=keep_eol_style)
+ try:
+ tree.compare_trees("disk", post_disk, pre_disk)
+ except tree.SVNTreeError:
+ logger.warn("=============================================================")
+ logger.warn("'svn patch --dry-run' altered working copy")
+ logger.warn("=============================================================")
+ raise
+
+ # Update and make a tree of the output.
+ patch_command = patch_command + args
+ exit_code, out, err = main.run_svn(True, *patch_command)
+
+ if error_re_string:
+ rm = re.compile(error_re_string)
+ match = None
+ for line in err:
+ match = rm.search(line)
+ if match:
+ break
+ if not match:
+ raise main.SVNUnmatchedError
+ elif err:
+ logger.warn("UNEXPECTED STDERR:")
+ for x in err:
+ logger.warn(x)
+ raise verify.SVNUnexpectedStderr
+
+ if dry_run and out != out_dry:
+ # APR hash order means the output order can vary, assume everything is OK
+ # if only the order changes.
+ out_dry_expected = svntest.verify.UnorderedOutput(out)
+ verify.compare_and_display_lines('dry-run patch output not as expected',
+ '', out_dry_expected, out_dry)
+
+ def missing_skip(a, b):
+ logger.warn("=============================================================")
+ logger.warn("'svn patch' failed to skip: %s", a.path)
+ logger.warn("=============================================================")
+ raise Failure
+ def extra_skip(a, b):
+ logger.warn("=============================================================")
+ logger.warn("'svn patch' unexpectedly skipped: %s", a.path)
+ logger.warn("=============================================================")
+ raise Failure
+
+ myskiptree = tree.build_tree_from_skipped(out)
+ if isinstance(skip_tree, wc.State):
+ skip_tree = skip_tree.old_tree()
+ tree.compare_trees("skip", myskiptree, skip_tree,
+ extra_skip, None, missing_skip, None)
+
+ mytree = tree.build_tree_from_checkout(out, 0)
+
+ # when the expected output is a list, we want a line-by-line
+ # comparison to happen instead of a tree comparison
+ if (isinstance(output_tree, list)
+ or isinstance(output_tree, verify.UnorderedOutput)):
+ verify.verify_outputs(None, out, err, output_tree, error_re_string)
+ output_tree = None
+
+ verify_update(mytree, None, None, dir,
+ output_tree, None, None, disk_tree, status_tree,
+ check_props=check_props, keep_eol_style=keep_eol_style,
+ **kw)
+
+
+def run_and_verify_mergeinfo(error_re_string = None,
+ expected_output = [],
+ *args):
+ """Run 'svn mergeinfo ARGS', and compare the result against
+ EXPECTED_OUTPUT, a list of string representations of revisions
+ expected in the output. Raise an exception if an unexpected
+ output is encountered."""
+
+ mergeinfo_command = ["mergeinfo"]
+ mergeinfo_command.extend(args)
+ exit_code, out, err = main.run_svn(error_re_string, *mergeinfo_command)
+
+ if error_re_string:
+ if not error_re_string.startswith(".*"):
+ error_re_string = ".*(" + error_re_string + ")"
+ expected_err = verify.RegexOutput(error_re_string, match_all=False)
+ verify.verify_outputs(None, None, err, None, expected_err)
+ return
+
+ out = [_f for _f in [x.rstrip()[1:] for x in out] if _f]
+ extra_out = []
+ if out != expected_output:
+ exp_hash = dict.fromkeys(expected_output)
+ for rev in out:
+ if rev in exp_hash:
+ del(exp_hash[rev])
+ else:
+ extra_out.append(rev)
+ extra_exp = list(exp_hash.keys())
+ raise Exception("Unexpected 'svn mergeinfo' output:\n"
+ " expected but not found: %s\n"
+ " found but not expected: %s"
+ % (', '.join([str(x) for x in extra_exp]),
+ ', '.join([str(x) for x in extra_out])))
+
+
+def run_and_verify_switch(wc_dir_name,
+ wc_target,
+ switch_url,
+ output_tree, disk_tree, status_tree,
+ expected_stderr = [],
+ check_props = False,
+ *args, **kw):
+
+ """Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL.
+
+ The subcommand output will be verified against OUTPUT_TREE, and the
+ working copy itself will be verified against DISK_TREE. If optional
+ STATUS_TREE is given, then 'svn status' output will be
+ compared. (This is a good way to check that revision numbers were
+ bumped.)
+
+ If CHECK_PROPS is set, then disk comparison will examine props.
+
+ Return if successful, raise on failure."""
+
+ # Update and make a tree of the output.
+ exit_code, output, errput = run_and_verify_svn(None, expected_stderr,
+ 'switch', switch_url,
+ wc_target, *args)
+ actual = wc.State.from_checkout(output)
+
+ verify_update(actual, None, None, wc_dir_name,
+ output_tree, None, None, disk_tree, status_tree,
+ check_props, **kw)
+
+def process_output_for_commit(output, error_re_string):
+ """Helper for run_and_verify_commit(), also used in the factory."""
+ # Remove the final output line, and verify that the commit succeeded.
+ lastline = ""
+ rest = []
+
+ def external_removal(line):
+ return line.startswith('Removing external') \
+ or line.startswith('Removed external')
+
+ if len(output):
+ lastline = output.pop().strip()
+
+ while len(output) and external_removal(lastline):
+ rest.append(lastline)
+ lastline = output.pop().strip()
+
+ cm = re.compile("(Committed|Imported) revision [0-9]+.")
+ match = cm.search(lastline)
+ if not match and not error_re_string:
+ logger.warn("ERROR: commit did not succeed.")
+ logger.warn("The final line from 'svn ci' was:")
+ logger.warn(lastline)
+ raise main.SVNCommitFailure
+
+ # The new 'final' line in the output is either a regular line that
+ # mentions {Adding, Deleting, Sending, ...}, or it could be a line
+ # that says "Transmitting file data ...". If the latter case, we
+ # want to remove the line from the output; it should be ignored when
+ # building a tree.
+ if len(output):
+ lastline = output.pop()
+
+ tm = re.compile("Transmitting file data.+")
+ match = tm.search(lastline)
+ if not match:
+ # whoops, it was important output, put it back.
+ output.append(lastline)
+
+ if len(rest):
+ output.extend(rest)
+
+ return output
+
+
+def run_and_verify_commit(wc_dir_name, output_tree, status_tree,
+ expected_stderr=[],
+ *args):
+ """Commit and verify results within working copy WC_DIR_NAME,
+ sending ARGS to the commit subcommand.
+
+ The subcommand output will be verified against OUTPUT_TREE. If
+ optional STATUS_TREE is given, then 'svn status' output will
+ be compared. (This is a good way to check that revision numbers
+ were bumped.)
+
+ EXPECTED_STDERR is handled as in run_and_verify_svn()
+
+ Return if successful, raise on failure."""
+
+ if isinstance(output_tree, wc.State):
+ output_tree = output_tree.old_tree()
+
+ # Commit.
+ if len(args) == 0:
+ args = (wc_dir_name,)
+ if '-m' not in args and '-F' not in args:
+ args = list(args) + ['-m', 'log msg']
+ exit_code, output, errput = run_and_verify_svn(None, expected_stderr,
+ 'ci', *args)
+
+ # Convert the output into a tree.
+ output = process_output_for_commit(output, expected_stderr)
+ actual = tree.build_tree_from_commit(output)
+
+ # Verify actual output against expected output.
+ if output_tree:
+ try:
+ tree.compare_trees("output", actual, output_tree)
+ except tree.SVNTreeError:
+ verify.display_trees("Output of commit is unexpected",
+ "OUTPUT TREE", output_tree, actual)
+ _log_tree_state("ACTUAL OUTPUT TREE:", actual, wc_dir_name)
+ raise
+
+ # Verify via 'status' command too, if possible.
+ if status_tree:
+ run_and_verify_status(wc_dir_name, status_tree)
+
+
+# This function always passes '-q' to the status command, which
+# suppresses the printing of any unversioned or nonexistent items.
+def run_and_verify_status(wc_dir_name, status_tree, no_entries=False):
+ """Run 'status' on WC_DIR_NAME and compare it with the
+ expected STATUS_TREE.
+ Returns on success, raises on failure."""
+
+ if not isinstance(status_tree, wc.State):
+ raise TypeError('wc.State tree expected')
+
+ exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q',
+ wc_dir_name)
+
+ actual_status = svntest.wc.State.from_status(output, wc_dir=wc_dir_name)
+
+ # Verify actual output against expected output.
+ try:
+ status_tree.compare_and_display('status', actual_status)
+ except tree.SVNTreeError:
+ _log_tree_state("ACTUAL STATUS TREE:", actual_status.old_tree(),
+ wc_dir_name)
+ raise
+
+ if no_entries:
+ return
+
+ # if we have an output State, and we can/are-allowed to create an
+ # entries-based State, then compare the two.
+ actual_entries = wc.State.from_entries(wc_dir_name)
+ if actual_entries:
+ tweaked = status_tree.copy()
+ tweaked.tweak_for_entries_compare()
+ try:
+ tweaked.compare_and_display('entries', actual_entries)
+ except tree.SVNTreeUnequal:
+ ### do something more
+ _log_tree_state("ACTUAL ENTRY TREE:", actual_entries.old_tree(),
+ wc_dir_name)
+ raise
+
+
+# A variant of previous func, but doesn't pass '-q'. This allows us
+# to verify unversioned or nonexistent items in the list.
+def run_and_verify_unquiet_status(wc_dir_name, status_tree):
+ """Run 'status' on WC_DIR_NAME and compare it with the
+ expected STATUS_TREE.
+ Returns on success, raises on failure."""
+
+ if not isinstance(status_tree, wc.State):
+ raise TypeError('wc.State tree expected')
+
+ exit_code, output, errput = main.run_svn(None, 'status', '-v',
+ '-u', wc_dir_name)
+
+ actual_status = svntest.wc.State.from_status(output, wc_dir=wc_dir_name)
+
+ # Verify actual output against expected output.
+ try:
+ status_tree.compare_and_display('unquiet status', actual_status)
+ except tree.SVNTreeError:
+ _log_tree_state("ACTUAL STATUS TREE:",
+ actual_status.normalize().old_tree(), wc_dir_name)
+ raise
+
+def run_and_verify_status_xml(expected_entries = [],
+ *args):
+ """ Run 'status --xml' with arguments *ARGS. If successful the output
+ is parsed into an XML document and will be verified by comparing against
+ EXPECTED_ENTRIES.
+ """
+
+ exit_code, output, errput = run_and_verify_svn(None, [],
+ 'status', '--xml', *args)
+
+ if len(errput) > 0:
+ raise Failure
+
+ doc = parseString(''.join(output))
+ entries = doc.getElementsByTagName('entry')
+
+ def getText(nodelist):
+ rc = []
+ for node in nodelist:
+ if node.nodeType == node.TEXT_NODE:
+ rc.append(node.data)
+ return ''.join(rc)
+
+ actual_entries = {}
+ for entry in entries:
+ wcstatus = entry.getElementsByTagName('wc-status')[0]
+ commit = entry.getElementsByTagName('commit')
+ author = entry.getElementsByTagName('author')
+ rstatus = entry.getElementsByTagName('repos-status')
+
+ actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
+ 'wcitem' : wcstatus.getAttribute('item'),
+ }
+ if wcstatus.hasAttribute('revision'):
+ actual_entry['wcrev'] = wcstatus.getAttribute('revision')
+ if (commit):
+ actual_entry['crev'] = commit[0].getAttribute('revision')
+ if (author):
+ actual_entry['author'] = getText(author[0].childNodes)
+ if (rstatus):
+ actual_entry['rprops'] = rstatus[0].getAttribute('props')
+ actual_entry['ritem'] = rstatus[0].getAttribute('item')
+
+ actual_entries[entry.getAttribute('path')] = actual_entry
+
+ if expected_entries != actual_entries:
+ raise Failure('\n' + '\n'.join(difflib.ndiff(
+ pprint.pformat(expected_entries).splitlines(),
+ pprint.pformat(actual_entries).splitlines())))
+
+def run_and_verify_inherited_prop_xml(path_or_url,
+ expected_inherited_props,
+ expected_explicit_props,
+ propname=None,
+ peg_rev=None,
+ *args):
+ """If PROPNAME is None, then call run_and_verify_svn with proplist -v --xml
+ --show-inherited-props on PATH_OR_URL, otherwise call run_and_verify_svn
+ with propget PROPNAME --xml --show-inherited-props.
+
+ PATH_OR_URL is pegged at PEG_REV if the latter is not None. If PEG_REV
+ is none, then PATH_OR_URL is pegged at HEAD if a url.
+
+ EXPECTED_INHERITED_PROPS is a (possibly empty) dict mapping working copy
+ paths or URLs to dicts of inherited properties. EXPECTED_EXPLICIT_PROPS is
+ a (possibly empty) dict of the explicit properties expected on PATH_OR_URL.
+
+ Returns on success, raises on failure if EXPECTED_INHERITED_PROPS or
+ EXPECTED_EXPLICIT_PROPS don't match the results of proplist/propget.
+ """
+
+ if peg_rev is None:
+ if sandbox.is_url(path_or_url):
+ path_or_url = path_or_url + '@HEAD'
+ else:
+ path_or_url = path_or_url + '@' + str(peg_rev)
+
+ if (propname):
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'propget', propname, '--xml',
+ '--show-inherited-props', path_or_url, *args)
+ else:
+ exit_code, output, errput = svntest.actions.run_and_verify_svn(
+ None, [], 'proplist', '-v', '--xml', '--show-inherited-props',
+ path_or_url, *args)
+
+ if len(errput) > 0:
+ raise Failure
+
+ # Props inherited from within the WC are keyed on absolute paths.
+ expected_iprops = {}
+ for x in expected_inherited_props:
+ if sandbox.is_url(x):
+ expected_iprops[x] = expected_inherited_props[x]
+ else:
+ expected_iprops[os.path.abspath(x)] = expected_inherited_props[x]
+
+ actual_iprops = {}
+ actual_explicit_props = {}
+
+ doc = parseString(''.join(output))
+ targets = doc.getElementsByTagName('target')
+ for t in targets:
+
+ # Create actual inherited props.
+ iprops = t.getElementsByTagName('inherited_property')
+
+ if len(iprops) > 0:
+ actual_iprops[t.getAttribute('path')]={}
+
+ for i in iprops:
+ actual_iprops[t.getAttribute('path')][i.getAttribute('name')] = \
+ i.firstChild.nodeValue
+
+ # Create actual explicit props.
+ xprops = t.getElementsByTagName('property')
+
+ for x in xprops:
+ actual_explicit_props[x.getAttribute('name')] = x.firstChild.nodeValue
+
+ if expected_explicit_props != actual_explicit_props:
+ raise svntest.Failure(
+ 'Actual and expected explicit props do not match\n' +
+ '\n'.join(difflib.ndiff(
+ pprint.pformat(expected_explicit_props).splitlines(),
+ pprint.pformat(actual_explicit_props).splitlines())))
+
+ if expected_iprops != actual_iprops:
+ raise svntest.Failure(
+ 'Actual and expected inherited props do not match\n' +
+ '\n'.join(difflib.ndiff(
+ pprint.pformat(expected_iprops).splitlines(),
+ pprint.pformat(actual_iprops).splitlines())))
+
+def run_and_verify_diff_summarize_xml(error_re_string = [],
+ expected_prefix = None,
+ expected_paths = [],
+ expected_items = [],
+ expected_props = [],
+ expected_kinds = [],
+ *args):
+ """Run 'diff --summarize --xml' with the arguments *ARGS, which should
+ contain all arguments beyond for your 'diff --summarize --xml' omitting
+ said arguments. EXPECTED_PREFIX will store a "common" path prefix
+ expected to be at the beginning of each summarized path. If
+ EXPECTED_PREFIX is None, then EXPECTED_PATHS will need to be exactly
+ as 'svn diff --summarize --xml' will output. If ERROR_RE_STRING, the
+ command must exit with error, and the error message must match regular
+ expression ERROR_RE_STRING.
+
+ Else if ERROR_RE_STRING is None, the subcommand output will be parsed
+ into an XML document and will then be verified by comparing the parsed
+ output to the contents in the EXPECTED_PATHS, EXPECTED_ITEMS,
+ EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises
+ on failure."""
+
+ exit_code, output, errput = run_and_verify_svn(None, error_re_string,
+ 'diff', '--summarize',
+ '--xml', *args)
+
+
+ # Return if errors are present since they were expected
+ if len(errput) > 0:
+ return
+
+ doc = parseString(''.join(output))
+ paths = doc.getElementsByTagName("path")
+ items = expected_items
+ kinds = expected_kinds
+
+ for path in paths:
+ modified_path = path.childNodes[0].data
+
+ if (expected_prefix
+ and modified_path.find(expected_prefix) == 0):
+ modified_path = modified_path.replace(expected_prefix, '')[1:].strip()
+
+ # Workaround single-object diff
+ if len(modified_path) == 0:
+ modified_path = path.childNodes[0].data.split(os.sep)[-1]
+
+ # From here on, we use '/' as path separator.
+ if os.sep != "/":
+ modified_path = modified_path.replace(os.sep, "/")
+
+ if modified_path not in expected_paths:
+ logger.warn("ERROR: %s not expected in the changed paths.", modified_path)
+ raise Failure
+
+ index = expected_paths.index(modified_path)
+ expected_item = items[index]
+ expected_kind = kinds[index]
+ expected_prop = expected_props[index]
+ actual_item = path.getAttribute('item')
+ actual_kind = path.getAttribute('kind')
+ actual_prop = path.getAttribute('props')
+
+ if expected_item != actual_item:
+ logger.warn("ERROR: expected: %s actual: %s", expected_item, actual_item)
+ raise Failure
+
+ if expected_kind != actual_kind:
+ logger.warn("ERROR: expected: %s actual: %s", expected_kind, actual_kind)
+ raise Failure
+
+ if expected_prop != actual_prop:
+ logger.warn("ERROR: expected: %s actual: %s", expected_prop, actual_prop)
+ raise Failure
+
+def run_and_verify_diff_summarize(output_tree, *args):
+ """Run 'diff --summarize' with the arguments *ARGS.
+
+ The subcommand output will be verified against OUTPUT_TREE. Returns
+ on success, raises on failure.
+ """
+
+ if isinstance(output_tree, wc.State):
+ output_tree = output_tree.old_tree()
+
+ exit_code, output, errput = main.run_svn(None, 'diff', '--summarize',
+ *args)
+
+ actual = tree.build_tree_from_diff_summarize(output)
+
+ # Verify actual output against expected output.
+ try:
+ tree.compare_trees("output", actual, output_tree)
+ except tree.SVNTreeError:
+ verify.display_trees(None, 'DIFF OUTPUT TREE', output_tree, actual)
+ _log_tree_state("ACTUAL DIFF OUTPUT TREE:", actual)
+ raise
+
+def run_and_validate_lock(path, username):
+ """`svn lock' the given path and validate the contents of the lock.
+ Use the given username. This is important because locks are
+ user specific."""
+
+ comment = "Locking path:%s." % path
+
+ # lock the path
+ run_and_verify_svn(".*locked by user", [], 'lock',
+ '--username', username,
+ '-m', comment, path)
+
+ # Run info and check that we get the lock fields.
+ exit_code, output, err = run_and_verify_svn(None, [],
+ 'info','-R',
+ path)
+
+ ### TODO: Leverage RegexOutput([...], match_all=True) here.
+ # prepare the regexs to compare against
+ token_re = re.compile(".*?Lock Token: opaquelocktoken:.*?", re.DOTALL)
+ author_re = re.compile(".*?Lock Owner: %s\n.*?" % username, re.DOTALL)
+ created_re = re.compile(".*?Lock Created:.*?", re.DOTALL)
+ comment_re = re.compile(".*?%s\n.*?" % re.escape(comment), re.DOTALL)
+ # join all output lines into one
+ output = "".join(output)
+ # Fail even if one regex does not match
+ if ( not (token_re.match(output) and
+ author_re.match(output) and
+ created_re.match(output) and
+ comment_re.match(output))):
+ raise Failure
+
+def _run_and_verify_resolve(cmd, expected_paths, *args):
+ """Run "svn CMD" (where CMD is 'resolve' or 'resolved') with arguments
+ ARGS, and verify that it resolves the paths in EXPECTED_PATHS and no others.
+ If no ARGS are specified, use the elements of EXPECTED_PATHS as the
+ arguments."""
+ # TODO: verify that the status of PATHS changes accordingly.
+ if len(args) == 0:
+ args = expected_paths
+ expected_output = verify.AlternateOutput([
+ verify.UnorderedOutput([
+ "Resolved conflicted state of '" + path + "'\n" for path in
+ expected_paths]),
+ verify.UnorderedOutput([
+ "Breaking move with source path '" + path + "'\n" for path in
+ expected_paths] + [
+ "Resolved conflicted state of '" + path + "'\n" for path in
+ expected_paths]),
+ verify.UnorderedOutput([
+ "Merge conflicts in '" + path + "' marked as resolved.\n" for path in
+ expected_paths]),
+ verify.UnorderedRegexListOutput([
+ "Conflict in property.*at '" + path + "' marked as resolved.\n" \
+ for path in expected_paths]),
+ verify.UnorderedOutput([
+ "Tree conflict at '" + path + "' marked as resolved.\n" for path in
+ expected_paths]),
+ ],
+ match_all=False)
+ run_and_verify_svn(expected_output, [],
+ cmd, *args)
+
+def run_and_verify_resolve(expected_paths, *args):
+ """Run "svn resolve" with arguments ARGS, and verify that it resolves the
+ paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
+ elements of EXPECTED_PATHS as the arguments."""
+ _run_and_verify_resolve('resolve', expected_paths, *args)
+
+def run_and_verify_resolved(expected_paths, *args):
+ """Run "svn resolved" with arguments ARGS, and verify that it resolves the
+ paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
+ elements of EXPECTED_PATHS as the arguments."""
+ _run_and_verify_resolve('resolved', expected_paths, *args)
+
+def run_and_verify_revert(expected_paths, *args):
+ """Run "svn revert" with arguments ARGS, and verify that it reverts
+ the paths in EXPECTED_PATHS and no others. If no ARGS are
+ specified, use the elements of EXPECTED_PATHS as the arguments."""
+ if len(args) == 0:
+ args = expected_paths
+ expected_output = verify.UnorderedOutput([
+ "Reverted '" + path + "'\n" for path in
+ expected_paths])
+ run_and_verify_svn(expected_output, [],
+ "revert", *args)
+
+
+######################################################################
+# Other general utilities
+
+
+# This allows a test to *quickly* bootstrap itself.
+def make_repo_and_wc(sbox, create_wc=True, read_only=False, empty=False,
+ minor_version=None):
+ """Create a fresh repository and check out a WC from it. If EMPTY is
+ True, the repository and WC will be empty and at revision 0,
+ otherwise they will contain the 'Greek Tree' at revision 1.
+
+ If READ_ONLY is False, a dedicated repository will be created, at the path
+ SBOX.repo_dir. If READ_ONLY is True, a shared pristine repository may be
+ used or a dedicated repository may be created. (Currently we use a shared
+ pristine 'Greek tree' repo but we create a dedicated empty repo.)
+ In either case, SBOX.repo_url is assumed to point to the repository that
+ will be used.
+
+ If CREATE_WC is True, a dedicated working copy will be checked out from
+ the repository, at the path SBOX.wc_dir.
+
+ Returns on success, raises on failure."""
+
+ # Create or copy or reference the appropriate kind of repository:
+ # if we want a non-empty, Greek repo, refer to the shared one; else
+ # if we want an empty repo or a writable Greek repo, create one.
+ # (We could have a shared empty repo for read-only use, but we don't.)
+ if empty:
+ guarantee_empty_repository(sbox.repo_dir, minor_version)
+ expected_state = svntest.wc.State('', {})
+ else:
+ if not read_only:
+ guarantee_greek_repository(sbox.repo_dir, minor_version)
+ expected_state = main.greek_state
+
+ if create_wc:
+ # Generate the expected output tree.
+ expected_output = expected_state.copy()
+ expected_output.wc_dir = sbox.wc_dir
+ expected_output.tweak(status='A ', contents=None)
+
+ # Generate an expected wc tree.
+ expected_wc = expected_state
+
+ # Do a checkout, and verify the resulting output and disk contents.
+ run_and_verify_checkout(sbox.repo_url,
+ sbox.wc_dir,
+ expected_output,
+ expected_wc)
+ else:
+ # just make sure the parent folder of our working copy is created
+ try:
+ os.mkdir(main.general_wc_dir)
+ except OSError as err:
+ if err.errno != errno.EEXIST:
+ raise
+
+# Duplicate a working copy or other dir.
+def duplicate_dir(wc_name, wc_copy_name):
+ """Copy the working copy WC_NAME to WC_COPY_NAME. Overwrite any
+ existing tree at that location."""
+
+ main.safe_rmtree(wc_copy_name)
+ shutil.copytree(wc_name, wc_copy_name)
+
+
+
+def get_virginal_state(wc_dir, rev):
+ "Return a virginal greek tree state for a WC and repos at revision REV."
+
+ rev = str(rev) ### maybe switch rev to an integer?
+
+ # copy the greek tree, shift it to the new wc_dir, insert a root elem,
+ # then tweak all values
+ state = main.greek_state.copy()
+ state.wc_dir = wc_dir
+ state.desc[''] = wc.StateItem()
+ state.tweak(contents=None, status=' ', wc_rev=rev)
+
+ return state
+
+# Cheap administrative directory locking
+def lock_admin_dir(wc_dir, recursive=False, work_queue=False):
+ "Lock a SVN administrative directory"
+ db, root_path, relpath = wc.open_wc_db(wc_dir)
+
+ svntest.main.run_wc_lock_tester(recursive, wc_dir, work_queue)
+
+def set_incomplete(wc_dir, revision):
+ "Make wc_dir incomplete at revision"
+
+ svntest.main.run_wc_incomplete_tester(wc_dir, revision)
+
+def get_wc_uuid(wc_dir):
+ "Return the UUID of the working copy at WC_DIR."
+ return run_and_parse_info(wc_dir)[0]['Repository UUID']
+
+def get_wc_base_rev(wc_dir):
+ "Return the BASE revision of the working copy at WC_DIR."
+ return run_and_parse_info(wc_dir)[0]['Revision']
+
+def load_dumpfile(filename):
+ "Return the contents of the FILENAME assuming that it is a dump file"
+ return open(filename, "rb").readlines()
+
+def hook_failure_message(hook_name):
+ """Return the error message that the client prints for failure of the
+ specified hook HOOK_NAME. The wording changed with Subversion 1.5."""
+
+ # Output depends on the server version, not the repository version.
+ # This gets the wrong result for modern servers with old format
+ # repositories.
+ if svntest.main.options.server_minor_version < 5 and not svntest.main.is_ra_type_file():
+ return "'%s' hook failed with error output:\n" % hook_name
+ else:
+ if hook_name in ["start-commit", "pre-commit"]:
+ action = "Commit"
+ elif hook_name == "pre-revprop-change":
+ action = "Revprop change"
+ elif hook_name == "pre-lock":
+ action = "Lock"
+ elif hook_name == "pre-unlock":
+ action = "Unlock"
+ else:
+ action = None
+ if action is None:
+ message = "%s hook failed (exit code 1)" % (hook_name,)
+ else:
+ message = "%s blocked by %s hook (exit code 1)" % (action, hook_name)
+ return message + " with output:\n"
+
+def create_failing_hook(repo_dir, hook_name, text):
+ """Create a HOOK_NAME hook in the repository at REPO_DIR that prints
+ TEXT to stderr and exits with an error."""
+
+ hook_path = os.path.join(repo_dir, 'hooks', hook_name)
+ # Embed the text carefully: it might include characters like "%" and "'".
+ main.create_python_hook_script(hook_path, 'import sys\n'
+ 'sys.stderr.write(' + repr(text) + ')\n'
+ 'sys.exit(1)\n')
+
+def enable_revprop_changes(repo_dir):
+ """Enable revprop changes in the repository at REPO_DIR by creating a
+ pre-revprop-change hook script and (if appropriate) making it executable."""
+
+ hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
+ main.create_python_hook_script(hook_path, 'import sys; sys.exit(0)',
+ cmd_alternative='@exit 0')
+
+def disable_revprop_changes(repo_dir):
+ """Disable revprop changes in the repository at REPO_DIR by creating a
+ pre-revprop-change hook script that prints "pre-revprop-change" followed
+ by its arguments, and returns an error."""
+
+ hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
+ main.create_python_hook_script(hook_path,
+ 'import sys\n'
+ 'sys.stderr.write("pre-revprop-change %s" %'
+ ' " ".join(sys.argv[1:]))\n'
+ 'sys.exit(1)\n',
+ cmd_alternative=
+ '@echo pre-revprop-change %* 1>&2\n'
+ '@exit 1\n')
+
+def create_failing_post_commit_hook(repo_dir):
+ """Create a post-commit hook script in the repository at REPO_DIR that always
+ reports an error."""
+
+ hook_path = main.get_post_commit_hook_path(repo_dir)
+ main.create_python_hook_script(hook_path, 'import sys\n'
+ 'sys.stderr.write("Post-commit hook failed")\n'
+ 'sys.exit(1)\n',
+ cmd_alternative=
+ '@echo Post-commit hook failed 1>&2\n'
+ '@exit 1\n')
+
+def _make_temp_file(contents):
+ """ Create a unique temporary file with the specified CONTENTS
+ and return its path. """
+ from tempfile import mkstemp
+ (fd, path) = mkstemp()
+ os.close(fd)
+ file = open(path, 'wb')
+ file.write(contents)
+ file.flush()
+ file.close()
+ return path
+
+# set_prop can be used for properties with NULL characters which are not
+# handled correctly when passed to subprocess.Popen() and values like "*"
+# which are not handled correctly on Windows.
+def set_prop(name, value, path, expected_re_string=None, force=None):
+ """Set a property with specified value"""
+ if not force:
+ propset = ('propset',)
+ else:
+ propset = ('propset', '--force')
+
+ if isinstance(value, bytes):
+ file = _make_temp_file(value)
+ elif isinstance(value, str):
+ if value and (value[0] == '-' or '\x00' in value or
+ sys.platform == 'win32'):
+ file = _make_temp_file(value.encode())
+ else:
+ file = None
+ else:
+ raise TypeError(value)
+
+ if file is None:
+ propset += (name, value, path)
+ else:
+ propset += ('-F', file, name, path)
+
+ try:
+ exit_code, out, err = main.run_svn(expected_re_string, *propset)
+ finally:
+ if file is not None:
+ os.remove(file)
+
+ if expected_re_string:
+ if not expected_re_string.startswith(".*"):
+ expected_re_string = ".*(" + expected_re_string + ")"
+ expected_err = verify.RegexOutput(expected_re_string, match_all=False)
+ verify.verify_outputs(None, None, err, None, expected_err)
+
+def check_prop(name, path, exp_out, revprop=None):
+ """Verify that property NAME on PATH has a value of EXP_OUT.
+ If REVPROP is not None, then it is a revision number and
+ a revision property is sought."""
+ if revprop is not None:
+ revprop_options = ['--revprop', '-r', revprop]
+ else:
+ revprop_options = []
+ # Not using run_svn because binary_mode must be set
+ exit_code, out, err = main.run_command(main.svn_binary, None, True, 'pg',
+ '--strict', name, path,
+ '--config-dir',
+ main.default_config_dir,
+ '--username', main.wc_author,
+ '--password', main.wc_passwd,
+ *revprop_options)
+ if out != exp_out:
+ logger.warn("svn pg --strict %s output does not match expected.", name)
+ logger.warn("Expected standard output: %s\n", exp_out)
+ logger.warn("Actual standard output: %s\n", out)
+ raise Failure
+
+def fill_file_with_lines(wc_path, line_nbr, line_descrip=None,
+ append=True):
+ """Change the file at WC_PATH (adding some lines), and return its
+ new contents. LINE_NBR indicates the line number at which the new
+ contents should assume that it's being appended. LINE_DESCRIP is
+ something like 'This is line' (the default) or 'Conflicting line'."""
+
+ if line_descrip is None:
+ line_descrip = "This is line"
+
+ # Generate the new contents for the file.
+ contents = ""
+ for n in range(line_nbr, line_nbr + 3):
+ contents = contents + line_descrip + " " + repr(n) + " in '" + \
+ os.path.basename(wc_path) + "'.\n"
+
+ # Write the new contents to the file.
+ if append:
+ main.file_append(wc_path, contents)
+ else:
+ main.file_write(wc_path, contents)
+
+ return contents
+
+def inject_conflict_into_wc(sbox, state_path, file_path,
+ expected_disk, expected_status, merged_rev):
+ """Create a conflict at FILE_PATH by replacing its contents,
+ committing the change, backdating it to its previous revision,
+ changing its contents again, then updating it to merge in the
+ previous change."""
+
+ wc_dir = sbox.wc_dir
+
+ # Make a change to the file.
+ contents = fill_file_with_lines(file_path, 1, "This is line", append=False)
+
+ # Commit the changed file, first taking note of the current revision.
+ prev_rev = expected_status.desc[state_path].wc_rev
+ expected_output = wc.State(wc_dir, {
+ state_path : wc.StateItem(verb='Sending'),
+ })
+ if expected_status:
+ expected_status.tweak(state_path, wc_rev=merged_rev)
+ run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], file_path)
+
+ # Backdate the file.
+ exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev),
+ file_path)
+ if expected_status:
+ expected_status.tweak(state_path, wc_rev=prev_rev)
+
+ # Make a conflicting change to the file, and backdate the file.
+ conflicting_contents = fill_file_with_lines(file_path, 1, "Conflicting line",
+ append=False)
+
+ # Merge the previous change into the file to produce a conflict.
+ if expected_disk:
+ expected_disk.tweak(state_path, contents="")
+ expected_output = wc.State(wc_dir, {
+ state_path : wc.StateItem(status='C '),
+ })
+ inject_conflict_into_expected_state(state_path,
+ expected_disk, expected_status,
+ conflicting_contents, contents,
+ prev_rev, merged_rev)
+ exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev),
+ file_path)
+ if expected_status:
+ expected_status.tweak(state_path, wc_rev=merged_rev)
+
+def inject_conflict_into_expected_state(state_path,
+ expected_disk, expected_status,
+ wc_text, merged_text, prev_rev,
+ merged_rev):
+ """Update the EXPECTED_DISK and EXPECTED_STATUS trees for the
+ conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and
+ MERGED_REV are used to determine the contents of the conflict (the
+ text parameters should be newline-terminated)."""
+ if expected_disk:
+ conflict_marker = make_conflict_marker_text(wc_text, merged_text,
+ prev_rev, merged_rev)
+ existing_text = expected_disk.desc[state_path].contents or ""
+ expected_disk.tweak(state_path, contents=existing_text + conflict_marker)
+
+ if expected_status:
+ expected_status.tweak(state_path, status='C ')
+
+def make_conflict_marker_text(wc_text, merged_text, prev_rev, merged_rev,
+ old_text=''):
+ """Return the conflict marker text described by WC_TEXT (the current
+ text in the working copy, MERGED_TEXT (the conflicting text merged
+ in), and MERGED_REV (the revision from whence the conflicting text
+ came)."""
+ return "<<<<<<< .working\n" + wc_text + \
+ "||||||| .merge-left.r" + str(prev_rev) + '\n' + \
+ old_text + "=======\n" + \
+ merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n"
+
+
+def build_greek_tree_conflicts(sbox):
+ """Create a working copy that has tree-conflict markings.
+ After this function has been called, sbox.wc_dir is a working
+ copy that has specific tree-conflict markings.
+
+ In particular, this does two conflicting sets of edits and performs an
+ update so that tree conflicts appear.
+
+ Note that this function calls sbox.build() because it needs a clean sbox.
+ So, there is no need to call sbox.build() before this.
+
+ The conflicts are the result of an 'update' on the following changes:
+
+ Incoming Local
+
+ A/D/G/pi text-mod del
+ A/D/G/rho del text-mod
+ A/D/G/tau del del
+
+ This function is useful for testing that tree-conflicts are handled
+ properly once they have appeared, e.g. that commits are blocked, that the
+ info output is correct, etc.
+
+ See also the tree-conflicts tests using deep_trees in various other
+ .py files, and tree_conflict_tests.py.
+ """
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ j = os.path.join
+ G = j(wc_dir, 'A', 'D', 'G')
+ pi = j(G, 'pi')
+ rho = j(G, 'rho')
+ tau = j(G, 'tau')
+
+ # Make incoming changes and "store them away" with a commit.
+ main.file_append(pi, "Incoming edit.\n")
+ main.run_svn(None, 'del', rho)
+ main.run_svn(None, 'del', tau)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Deleting'),
+ 'A/D/G/tau' : Item(verb='Deleting'),
+ })
+ expected_status = get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/pi', wc_rev='2')
+ expected_status.remove('A/D/G/rho', 'A/D/G/tau')
+ run_and_verify_commit(wc_dir, expected_output, expected_status, [],
+ '-m', 'Incoming changes.', wc_dir )
+
+ # Update back to the pristine state ("time-warp").
+ expected_output = wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ })
+ expected_disk = main.greek_state
+ expected_status = get_virginal_state(wc_dir, 1)
+ run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status, [], False,
+ '-r', '1', wc_dir)
+
+ # Make local changes
+ main.run_svn(None, 'del', pi)
+ main.file_append(rho, "Local edit.\n")
+ main.run_svn(None, 'del', tau)
+
+ # Update, receiving the incoming changes on top of the local changes,
+ # causing tree conflicts. Don't check for any particular result: that is
+ # the job of other tests.
+ run_and_verify_svn(verify.AnyOutput, [], 'update', wc_dir)
+
+
diff --git a/subversion/tests/cmdline/svntest/deeptrees.py b/subversion/tests/cmdline/svntest/deeptrees.py
new file mode 100644
index 0000000..093ccbf
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/deeptrees.py
@@ -0,0 +1,1197 @@
+#
+# deeptrees.py: routines that create specific test scenarios
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os, shutil, re, sys, errno
+import difflib, pprint, logging
+import xml.parsers.expat
+from xml.dom.minidom import parseString
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+
+import svntest
+from svntest import main, verify, tree, wc, sandbox
+from svntest import Failure
+from svntest.actions import *
+
+logger = logging.getLogger()
+
+def make_deep_trees(base):
+ """Helper function for deep trees conflicts. Create a set of trees,
+ each in its own "container" dir. Any conflicts can be tested separately
+ in each container.
+ """
+ j = os.path.join
+ # Create the container dirs.
+ F = j(base, 'F')
+ D = j(base, 'D')
+ DF = j(base, 'DF')
+ DD = j(base, 'DD')
+ DDF = j(base, 'DDF')
+ DDD = j(base, 'DDD')
+ os.makedirs(F)
+ os.makedirs(j(D, 'D1'))
+ os.makedirs(j(DF, 'D1'))
+ os.makedirs(j(DD, 'D1', 'D2'))
+ os.makedirs(j(DDF, 'D1', 'D2'))
+ os.makedirs(j(DDD, 'D1', 'D2', 'D3'))
+
+ # Create their files.
+ alpha = j(F, 'alpha')
+ beta = j(DF, 'D1', 'beta')
+ gamma = j(DDF, 'D1', 'D2', 'gamma')
+ main.file_append(alpha, "This is the file 'alpha'.\n")
+ main.file_append(beta, "This is the file 'beta'.\n")
+ main.file_append(gamma, "This is the file 'gamma'.\n")
+
+
+def add_deep_trees(sbox, base_dir_name):
+ """Prepare a "deep_trees" within a given directory.
+
+ The directory <sbox.wc_dir>/<base_dir_name> is created and a deep_tree
+ is created within. The items are only added, a commit has to be
+ called separately, if needed.
+
+ <base_dir_name> will thus be a container for the set of containers
+ mentioned in make_deep_trees().
+ """
+ j = os.path.join
+ base = j(sbox.wc_dir, base_dir_name)
+ make_deep_trees(base)
+ main.run_svn(None, 'add', base)
+
+
+Item = wc.StateItem
+
+# initial deep trees state
+deep_trees_virginal_state = wc.State('', {
+ 'F' : Item(),
+ 'F/alpha' : Item("This is the file 'alpha'.\n"),
+ 'D' : Item(),
+ 'D/D1' : Item(),
+ 'DF' : Item(),
+ 'DF/D1' : Item(),
+ 'DF/D1/beta' : Item("This is the file 'beta'.\n"),
+ 'DD' : Item(),
+ 'DD/D1' : Item(),
+ 'DD/D1/D2' : Item(),
+ 'DDF' : Item(),
+ 'DDF/D1' : Item(),
+ 'DDF/D1/D2' : Item(),
+ 'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\n"),
+ 'DDD' : Item(),
+ 'DDD/D1' : Item(),
+ 'DDD/D1/D2' : Item(),
+ 'DDD/D1/D2/D3' : Item(),
+ })
+
+
+# Many actions on deep trees and their resulting states...
+
+def deep_trees_leaf_edit(base):
+ """Helper function for deep trees test cases. Append text to files,
+ create new files in empty directories, and change leaf node properties."""
+ j = os.path.join
+ F = j(base, 'F', 'alpha')
+ DF = j(base, 'DF', 'D1', 'beta')
+ DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
+ main.file_append(F, "More text for file alpha.\n")
+ main.file_append(DF, "More text for file beta.\n")
+ main.file_append(DDF, "More text for file gamma.\n")
+ run_and_verify_svn(verify.AnyOutput, [],
+ 'propset', 'prop1', '1', F, DF, DDF)
+
+ D = j(base, 'D', 'D1')
+ DD = j(base, 'DD', 'D1', 'D2')
+ DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
+ run_and_verify_svn(verify.AnyOutput, [],
+ 'propset', 'prop1', '1', D, DD, DDD)
+ D = j(base, 'D', 'D1', 'delta')
+ DD = j(base, 'DD', 'D1', 'D2', 'epsilon')
+ DDD = j(base, 'DDD', 'D1', 'D2', 'D3', 'zeta')
+ main.file_append(D, "This is the file 'delta'.\n")
+ main.file_append(DD, "This is the file 'epsilon'.\n")
+ main.file_append(DDD, "This is the file 'zeta'.\n")
+ run_and_verify_svn(verify.AnyOutput, [],
+ 'add', D, DD, DDD)
+
+# deep trees state after a call to deep_trees_leaf_edit
+deep_trees_after_leaf_edit = wc.State('', {
+ 'F' : Item(),
+ 'F/alpha' : Item("This is the file 'alpha'.\nMore text for file alpha.\n"),
+ 'D' : Item(),
+ 'D/D1' : Item(),
+ 'D/D1/delta' : Item("This is the file 'delta'.\n"),
+ 'DF' : Item(),
+ 'DF/D1' : Item(),
+ 'DF/D1/beta' : Item("This is the file 'beta'.\nMore text for file beta.\n"),
+ 'DD' : Item(),
+ 'DD/D1' : Item(),
+ 'DD/D1/D2' : Item(),
+ 'DD/D1/D2/epsilon' : Item("This is the file 'epsilon'.\n"),
+ 'DDF' : Item(),
+ 'DDF/D1' : Item(),
+ 'DDF/D1/D2' : Item(),
+ 'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\nMore text for file gamma.\n"),
+ 'DDD' : Item(),
+ 'DDD/D1' : Item(),
+ 'DDD/D1/D2' : Item(),
+ 'DDD/D1/D2/D3' : Item(),
+ 'DDD/D1/D2/D3/zeta' : Item("This is the file 'zeta'.\n"),
+ })
+
+
+def deep_trees_leaf_del(base):
+ """Helper function for deep trees test cases. Delete files and empty
+ dirs."""
+ j = os.path.join
+ F = j(base, 'F', 'alpha')
+ D = j(base, 'D', 'D1')
+ DF = j(base, 'DF', 'D1', 'beta')
+ DD = j(base, 'DD', 'D1', 'D2')
+ DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
+ DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
+ main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
+
+# deep trees state after a call to deep_trees_leaf_del
+deep_trees_after_leaf_del = wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DF/D1' : Item(),
+ 'DD' : Item(),
+ 'DD/D1' : Item(),
+ 'DDF' : Item(),
+ 'DDF/D1' : Item(),
+ 'DDF/D1/D2' : Item(),
+ 'DDD' : Item(),
+ 'DDD/D1' : Item(),
+ 'DDD/D1/D2' : Item(),
+ })
+
+# deep trees state after a call to deep_trees_leaf_del with no commit
+def deep_trees_after_leaf_del_no_ci(wc_dir):
+ return deep_trees_after_leaf_del
+
+def deep_trees_tree_del(base):
+ """Helper function for deep trees test cases. Delete top-level dirs."""
+ j = os.path.join
+ F = j(base, 'F', 'alpha')
+ D = j(base, 'D', 'D1')
+ DF = j(base, 'DF', 'D1')
+ DD = j(base, 'DD', 'D1')
+ DDF = j(base, 'DDF', 'D1')
+ DDD = j(base, 'DDD', 'D1')
+ main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
+
+def deep_trees_rmtree(base):
+ """Helper function for deep trees test cases. Delete top-level dirs
+ with rmtree instead of svn del."""
+ j = os.path.join
+ F = j(base, 'F', 'alpha')
+ D = j(base, 'D', 'D1')
+ DF = j(base, 'DF', 'D1')
+ DD = j(base, 'DD', 'D1')
+ DDF = j(base, 'DDF', 'D1')
+ DDD = j(base, 'DDD', 'D1')
+ os.unlink(F)
+ main.safe_rmtree(D)
+ main.safe_rmtree(DF)
+ main.safe_rmtree(DD)
+ main.safe_rmtree(DDF)
+ main.safe_rmtree(DDD)
+
+# deep trees state after a call to deep_trees_tree_del
+deep_trees_after_tree_del = wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+
+# deep trees state after a call to deep_trees_tree_del with no commit
+def deep_trees_after_tree_del_no_ci(wc_dir):
+ return deep_trees_after_tree_del
+
+def deep_trees_tree_del_repos(base):
+ """Helper function for deep trees test cases. Delete top-level dirs,
+ directly in the repository."""
+ j = '/'.join
+ F = j([base, 'F', 'alpha'])
+ D = j([base, 'D', 'D1'])
+ DF = j([base, 'DF', 'D1'])
+ DD = j([base, 'DD', 'D1'])
+ DDF = j([base, 'DDF', 'D1'])
+ DDD = j([base, 'DDD', 'D1'])
+ main.run_svn(None, 'mkdir', '-m', '', F, D, DF, DD, DDF, DDD)
+
+# Expected merge/update/switch output.
+
+deep_trees_conflict_output = wc.State('', {
+ 'F/alpha' : Item(status=' ', treeconflict='C'),
+ 'D/D1' : Item(status=' ', treeconflict='C'),
+ 'DF/D1' : Item(status=' ', treeconflict='C'),
+ 'DD/D1' : Item(status=' ', treeconflict='C'),
+ 'DDF/D1' : Item(status=' ', treeconflict='C'),
+ 'DDD/D1' : Item(status=' ', treeconflict='C'),
+ })
+
+deep_trees_conflict_output_skipped = wc.State('', {
+ 'D/D1' : Item(verb='Skipped'),
+ 'F/alpha' : Item(verb='Skipped'),
+ 'DD/D1' : Item(verb='Skipped'),
+ 'DF/D1' : Item(verb='Skipped'),
+ 'DDD/D1' : Item(verb='Skipped'),
+ 'DDF/D1' : Item(verb='Skipped'),
+ })
+
+# Expected status output after merge/update/switch.
+
+deep_trees_status_local_tree_del = wc.State('', {
+ '' : Item(status=' ', wc_rev=3),
+ 'D' : Item(status=' ', wc_rev=3),
+ 'D/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'DD' : Item(status=' ', wc_rev=3),
+ 'DD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'DD/D1/D2' : Item(status='D ', wc_rev=2),
+ 'DDD' : Item(status=' ', wc_rev=3),
+ 'DDD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'DDD/D1/D2' : Item(status='D ', wc_rev=2),
+ 'DDD/D1/D2/D3' : Item(status='D ', wc_rev=2),
+ 'DDF' : Item(status=' ', wc_rev=3),
+ 'DDF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'DDF/D1/D2' : Item(status='D ', wc_rev=2),
+ 'DDF/D1/D2/gamma' : Item(status='D ', wc_rev=2),
+ 'DF' : Item(status=' ', wc_rev=3),
+ 'DF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ 'DF/D1/beta' : Item(status='D ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=3),
+ 'F/alpha' : Item(status='D ', wc_rev=2, treeconflict='C'),
+ })
+
+deep_trees_status_local_leaf_edit = wc.State('', {
+ '' : Item(status=' ', wc_rev=3),
+ 'D' : Item(status=' ', wc_rev=3),
+ 'D/D1' : Item(status=' M', wc_rev=2, treeconflict='C'),
+ 'D/D1/delta' : Item(status='A ', wc_rev=0),
+ 'DD' : Item(status=' ', wc_rev=3),
+ 'DD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
+ 'DD/D1/D2' : Item(status=' M', wc_rev=2),
+ 'DD/D1/D2/epsilon' : Item(status='A ', wc_rev=0),
+ 'DDD' : Item(status=' ', wc_rev=3),
+ 'DDD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
+ 'DDD/D1/D2' : Item(status=' ', wc_rev=2),
+ 'DDD/D1/D2/D3' : Item(status=' M', wc_rev=2),
+ 'DDD/D1/D2/D3/zeta' : Item(status='A ', wc_rev=0),
+ 'DDF' : Item(status=' ', wc_rev=3),
+ 'DDF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
+ 'DDF/D1/D2' : Item(status=' ', wc_rev=2),
+ 'DDF/D1/D2/gamma' : Item(status='MM', wc_rev=2),
+ 'DF' : Item(status=' ', wc_rev=3),
+ 'DF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
+ 'DF/D1/beta' : Item(status='MM', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=3),
+ 'F/alpha' : Item(status='MM', wc_rev=2, treeconflict='C'),
+ })
+
+
+class DeepTreesTestCase:
+ """Describes one tree-conflicts test case.
+ See deep_trees_run_tests_scheme_for_update(), ..._switch(), ..._merge().
+
+ The name field is the subdirectory name in which the test should be run.
+
+ The local_action and incoming_action are the functions to run
+ to construct the local changes and incoming changes, respectively.
+ See deep_trees_leaf_edit, deep_trees_tree_del, etc.
+
+ The expected_* and error_re_string arguments are described in functions
+ run_and_verify_[update|switch|merge]
+ except expected_info, which is a dict that has path keys with values
+ that are dicts as passed to run_and_verify_info():
+ expected_info = {
+ 'F/alpha' : {
+ 'Revision' : '3',
+ 'Tree conflict' :
+ '^local delete, incoming edit upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .file.*/F/alpha@3$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local delete, incoming edit upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .dir.*/DF/D1@3$',
+ },
+ ...
+ }
+
+ Note: expected_skip is only used in merge, i.e. using
+ deep_trees_run_tests_scheme_for_merge.
+ """
+
+ def __init__(self, name, local_action, incoming_action,
+ expected_output = None, expected_disk = None,
+ expected_status = None, expected_skip = None,
+ error_re_string = None,
+ commit_block_string = ".*remains in conflict.*",
+ expected_info = None):
+ self.name = name
+ self.local_action = local_action
+ self.incoming_action = incoming_action
+ self.expected_output = expected_output
+ self.expected_disk = expected_disk
+ self.expected_status = expected_status
+ self.expected_skip = expected_skip
+ self.error_re_string = error_re_string
+ self.commit_block_string = commit_block_string
+ self.expected_info = expected_info
+
+
+
+def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme):
+ """
+ Runs a given list of tests for conflicts occuring at an update operation.
+
+ This function wants to save time and perform a number of different
+ test cases using just a single repository and performing just one commit
+ for all test cases instead of one for each test case.
+
+ 1) Each test case is initialized in a separate subdir. Each subdir
+ again contains one set of "deep_trees", being separate container
+ dirs for different depths of trees (F, D, DF, DD, DDF, DDD).
+
+ 2) A commit is performed across all test cases and depths.
+ (our initial state, -r2)
+
+ 3) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
+ its *incoming* action is performed (e.g. "deep_trees_leaf_edit"), in
+ each of the different depth trees (F, D, DF, ... DDD).
+
+ 4) A commit is performed across all test cases and depths:
+ our "incoming" state is "stored away in the repository for now",
+ -r3.
+
+ 5) All test case dirs and contained deep_trees are time-warped
+ (updated) back to -r2, the initial state containing deep_trees.
+
+ 6) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
+ its *local* action is performed (e.g. "deep_trees_leaf_del"), in
+ each of the different depth trees (F, D, DF, ... DDD).
+
+ 7) An update to -r3 is performed across all test cases and depths.
+ This causes tree-conflicts between the "local" state in the working
+ copy and the "incoming" state from the repository, -r3.
+
+ 8) A commit is performed in each separate container, to verify
+ that each tree-conflict indeed blocks a commit.
+
+ The sbox parameter is just the sbox passed to a test function. No need
+ to call sbox.build(), since it is called (once) within this function.
+
+ The "table" greater_scheme models all of the different test cases
+ that should be run using a single repository.
+
+ greater_scheme is a list of DeepTreesTestCase items, which define complete
+ test setups, so that they can be performed as described above.
+ """
+
+ j = os.path.join
+
+ if not sbox.is_built():
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+
+ # 1) create directories
+
+ for test_case in greater_scheme:
+ try:
+ add_deep_trees(sbox, test_case.name)
+ except:
+ logger.warn("ERROR IN: Tests scheme for update: "
+ + "while setting up deep trees in '%s'", test_case.name)
+ raise
+
+
+ # 2) commit initial state
+
+ main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
+
+
+ # 3) apply incoming changes
+
+ for test_case in greater_scheme:
+ try:
+ test_case.incoming_action(j(sbox.wc_dir, test_case.name))
+ except:
+ logger.warn("ERROR IN: Tests scheme for update: "
+ + "while performing incoming action in '%s'", test_case.name)
+ raise
+
+
+ # 4) commit incoming changes
+
+ main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
+
+
+ # 5) time-warp back to -r2
+
+ main.run_svn(None, 'update', '-r2', wc_dir)
+
+
+ # 6) apply local changes
+
+ for test_case in greater_scheme:
+ try:
+ test_case.local_action(j(wc_dir, test_case.name))
+ except:
+ logger.warn("ERROR IN: Tests scheme for update: "
+ + "while performing local action in '%s'", test_case.name)
+ raise
+
+
+ # 7) update to -r3, conflicting with incoming changes.
+ # A lot of different things are expected.
+ # Do separate update operations for each test case.
+
+ for test_case in greater_scheme:
+ try:
+ base = j(wc_dir, test_case.name)
+
+ x_out = test_case.expected_output
+ if x_out != None:
+ x_out = x_out.copy()
+ x_out.wc_dir = base
+
+ x_disk = test_case.expected_disk
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = base
+
+ if test_case.error_re_string == None:
+ expected_stderr = []
+ else:
+ expected_stderr = test_case.error_re_string
+
+ run_and_verify_update(base, x_out, x_disk, None,
+ expected_stderr = expected_stderr)
+ if x_status:
+ run_and_verify_unquiet_status(base, x_status)
+
+ x_info = test_case.expected_info or {}
+ for path in x_info:
+ run_and_verify_info([x_info[path]], j(base, path))
+
+ except:
+ logger.warn("ERROR IN: Tests scheme for update: "
+ + "while verifying in '%s'", test_case.name)
+ raise
+
+
+ # 8) Verify that commit fails.
+
+ for test_case in greater_scheme:
+ try:
+ base = j(wc_dir, test_case.name)
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = base
+
+ run_and_verify_commit(base, None, x_status,
+ test_case.commit_block_string)
+ except:
+ logger.warn("ERROR IN: Tests scheme for update: "
+ + "while checking commit-blocking in '%s'", test_case.name)
+ raise
+
+
+
+def deep_trees_skipping_on_update(sbox, test_case, skip_paths,
+ chdir_skip_paths):
+ """
+ Create tree conflicts, then update again, expecting the existing tree
+ conflicts to be skipped.
+ SKIP_PATHS is a list of paths, relative to the "base dir", for which
+ "update" on the "base dir" should report as skipped.
+ CHDIR_SKIP_PATHS is a list of (target-path, skipped-path) pairs for which
+ an update of "target-path" (relative to the "base dir") should result in
+ "skipped-path" (relative to "target-path") being reported as skipped.
+ """
+
+ """FURTHER_ACTION is a function that will make a further modification to
+ each target, this being the modification that we expect to be skipped. The
+ function takes the "base dir" (the WC path to the test case directory) as
+ its only argument."""
+ further_action = deep_trees_tree_del_repos
+
+ j = os.path.join
+ wc_dir = sbox.wc_dir
+ base = j(wc_dir, test_case.name)
+
+ # Initialize: generate conflicts. (We do not check anything here.)
+ setup_case = DeepTreesTestCase(test_case.name,
+ test_case.local_action,
+ test_case.incoming_action,
+ None,
+ None,
+ None)
+ deep_trees_run_tests_scheme_for_update(sbox, [setup_case])
+
+ # Make a further change to each target in the repository so there is a new
+ # revision to update to. (This is r4.)
+ further_action(sbox.repo_url + '/' + test_case.name)
+
+ # Update whole working copy, expecting the nodes still in conflict to be
+ # skipped.
+
+ x_out = test_case.expected_output
+ if x_out != None:
+ x_out = x_out.copy()
+ x_out.wc_dir = base
+
+ x_disk = test_case.expected_disk
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status = x_status.copy()
+ x_status.wc_dir = base
+ # Account for nodes that were updated by further_action
+ x_status.tweak('', 'D', 'F', 'DD', 'DF', 'DDD', 'DDF', wc_rev=4)
+
+ if test_case.error_re_string == None:
+ expected_stderr = []
+ else:
+ expected_stderr = test_case.error_re_string
+
+ run_and_verify_update(base, x_out, x_disk, None,
+ expected_stderr = expected_stderr)
+
+ run_and_verify_unquiet_status(base, x_status)
+
+ # Try to update each in-conflict subtree. Expect a 'Skipped' output for
+ # each, and the WC status to be unchanged.
+ for path in skip_paths:
+ run_and_verify_update(j(base, path),
+ wc.State(base, {path : Item(verb='Skipped')}),
+ None, None)
+
+ run_and_verify_unquiet_status(base, x_status)
+
+ # Try to update each in-conflict subtree. Expect a 'Skipped' output for
+ # each, and the WC status to be unchanged.
+ # This time, cd to the subdir before updating it.
+ was_cwd = os.getcwd()
+ for path, skipped in chdir_skip_paths:
+ if isinstance(skipped, list):
+ expected_skip = {}
+ for p in skipped:
+ expected_skip[p] = Item(verb='Skipped')
+ else:
+ expected_skip = {skipped : Item(verb='Skipped')}
+ p = j(base, path)
+ run_and_verify_update(p,
+ wc.State(p, expected_skip),
+ None, None)
+ os.chdir(was_cwd)
+
+ run_and_verify_unquiet_status(base, x_status)
+
+ # Verify that commit still fails.
+ for path, skipped in chdir_skip_paths:
+
+ run_and_verify_commit(j(base, path), None, None,
+ test_case.commit_block_string,
+ base)
+
+ run_and_verify_unquiet_status(base, x_status)
+
+
+def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme):
+ """
+ Runs a given list of tests for conflicts occuring at a switch operation.
+
+ This function wants to save time and perform a number of different
+ test cases using just a single repository and performing just one commit
+ for all test cases instead of one for each test case.
+
+ 1) Each test case is initialized in a separate subdir. Each subdir
+ again contains two subdirs: one "local" and one "incoming" for
+ the switch operation. These contain a set of deep_trees each.
+
+ 2) A commit is performed across all test cases and depths.
+ (our initial state, -r2)
+
+ 3) In each test case subdir's incoming subdir, the
+ incoming actions are performed.
+
+ 4) A commit is performed across all test cases and depths. (-r3)
+
+ 5) In each test case subdir's local subdir, the local actions are
+ performed. They remain uncommitted in the working copy.
+
+ 6) In each test case subdir's local dir, a switch is performed to its
+ corresponding incoming dir.
+ This causes conflicts between the "local" state in the working
+ copy and the "incoming" state from the incoming subdir (still -r3).
+
+ 7) A commit is performed in each separate container, to verify
+ that each tree-conflict indeed blocks a commit.
+
+ The sbox parameter is just the sbox passed to a test function. No need
+ to call sbox.build(), since it is called (once) within this function.
+
+ The "table" greater_scheme models all of the different test cases
+ that should be run using a single repository.
+
+ greater_scheme is a list of DeepTreesTestCase items, which define complete
+ test setups, so that they can be performed as described above.
+ """
+
+ j = os.path.join
+
+ if not sbox.is_built():
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+
+ # 1) Create directories.
+
+ for test_case in greater_scheme:
+ try:
+ base = j(sbox.wc_dir, test_case.name)
+ os.makedirs(base)
+ make_deep_trees(j(base, "local"))
+ make_deep_trees(j(base, "incoming"))
+ main.run_svn(None, 'add', base)
+ except:
+ logger.warn("ERROR IN: Tests scheme for switch: "
+ + "while setting up deep trees in '%s'", test_case.name)
+ raise
+
+
+ # 2) Commit initial state (-r2).
+
+ main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
+
+
+ # 3) Apply incoming changes
+
+ for test_case in greater_scheme:
+ try:
+ test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
+ except:
+ logger.warn("ERROR IN: Tests scheme for switch: "
+ + "while performing incoming action in '%s'", test_case.name)
+ raise
+
+
+ # 4) Commit all changes (-r3).
+
+ main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
+
+
+ # 5) Apply local changes in their according subdirs.
+
+ for test_case in greater_scheme:
+ try:
+ test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
+ except:
+ logger.warn("ERROR IN: Tests scheme for switch: "
+ + "while performing local action in '%s'", test_case.name)
+ raise
+
+
+ # 6) switch the local dir to the incoming url, conflicting with incoming
+ # changes. A lot of different things are expected.
+ # Do separate switch operations for each test case.
+
+ for test_case in greater_scheme:
+ try:
+ local = j(wc_dir, test_case.name, "local")
+ incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
+
+ x_out = test_case.expected_output
+ if x_out != None:
+ x_out = x_out.copy()
+ x_out.wc_dir = local
+
+ x_disk = test_case.expected_disk
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = local
+
+ if test_case.error_re_string == None:
+ expected_stderr = []
+ else:
+ expected_stderr = test_case.error_re_string
+
+ run_and_verify_switch(local, local, incoming, x_out, x_disk, None,
+ expected_stderr, False,
+ '--ignore-ancestry')
+ run_and_verify_unquiet_status(local, x_status)
+
+ x_info = test_case.expected_info or {}
+ for path in x_info:
+ run_and_verify_info([x_info[path]], j(local, path))
+ except:
+ logger.warn("ERROR IN: Tests scheme for switch: "
+ + "while verifying in '%s'", test_case.name)
+ raise
+
+
+ # 7) Verify that commit fails.
+
+ for test_case in greater_scheme:
+ try:
+ local = j(wc_dir, test_case.name, 'local')
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = local
+
+ run_and_verify_commit(local, None, x_status,
+ test_case.commit_block_string)
+ except:
+ logger.warn("ERROR IN: Tests scheme for switch: "
+ + "while checking commit-blocking in '%s'", test_case.name)
+ raise
+
+
+def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme,
+ do_commit_local_changes,
+ do_commit_conflicts=True,
+ ignore_ancestry=False):
+ """
+ Runs a given list of tests for conflicts occuring at a merge operation.
+
+ This function wants to save time and perform a number of different
+ test cases using just a single repository and performing just one commit
+ for all test cases instead of one for each test case.
+
+ 1) Each test case is initialized in a separate subdir. Each subdir
+ initially contains another subdir, called "incoming", which
+ contains a set of deep_trees.
+
+ 2) A commit is performed across all test cases and depths.
+ (a pre-initial state)
+
+ 3) In each test case subdir, the "incoming" subdir is copied to "local",
+ via the `svn copy' command. Each test case's subdir now has two sub-
+ dirs: "local" and "incoming", initial states for the merge operation.
+
+ 4) An update is performed across all test cases and depths, so that the
+ copies made in 3) are pulled into the wc.
+
+ 5) In each test case's "incoming" subdir, the incoming action is
+ performed.
+
+ 6) A commit is performed across all test cases and depths, to commit
+ the incoming changes.
+ If do_commit_local_changes is True, this becomes step 7 (swap steps).
+
+ 7) In each test case's "local" subdir, the local_action is performed.
+ If do_commit_local_changes is True, this becomes step 6 (swap steps).
+ Then, in effect, the local changes are committed as well.
+
+ 8) In each test case subdir, the "incoming" subdir is merged into the
+ "local" subdir. If ignore_ancestry is True, then the merge is done
+ with the --ignore-ancestry option, so mergeinfo is neither considered
+ nor recorded. This causes conflicts between the "local" state in the
+ working copy and the "incoming" state from the incoming subdir.
+
+ 9) If do_commit_conflicts is True, then a commit is performed in each
+ separate container, to verify that each tree-conflict indeed blocks
+ a commit.
+
+ The sbox parameter is just the sbox passed to a test function. No need
+ to call sbox.build(), since it is called (once) within this function.
+
+ The "table" greater_scheme models all of the different test cases
+ that should be run using a single repository.
+
+ greater_scheme is a list of DeepTreesTestCase items, which define complete
+ test setups, so that they can be performed as described above.
+ """
+
+ j = os.path.join
+
+ if not sbox.is_built():
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # 1) Create directories.
+ for test_case in greater_scheme:
+ try:
+ base = j(sbox.wc_dir, test_case.name)
+ os.makedirs(base)
+ make_deep_trees(j(base, "incoming"))
+ main.run_svn(None, 'add', base)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while setting up deep trees in '%s'", test_case.name)
+ raise
+
+
+ # 2) Commit pre-initial state (-r2).
+
+ main.run_svn(None, 'commit', '-m', 'pre-initial state', wc_dir)
+
+
+ # 3) Copy "incoming" to "local".
+
+ for test_case in greater_scheme:
+ try:
+ base_url = sbox.repo_url + "/" + test_case.name
+ incoming_url = base_url + "/incoming"
+ local_url = base_url + "/local"
+ main.run_svn(None, 'cp', incoming_url, local_url, '-m',
+ 'copy incoming to local')
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while copying deep trees in '%s'", test_case.name)
+ raise
+
+ # 4) Update to load all of the "/local" subdirs into the working copies.
+
+ try:
+ main.run_svn(None, 'up', sbox.wc_dir)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while updating local subdirs")
+ raise
+
+
+ # 5) Perform incoming actions
+
+ for test_case in greater_scheme:
+ try:
+ test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while performing incoming action in '%s'", test_case.name)
+ raise
+
+
+ # 6) or 7) Commit all incoming actions
+
+ if not do_commit_local_changes:
+ try:
+ main.run_svn(None, 'ci', '-m', 'Committing incoming actions',
+ sbox.wc_dir)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while committing incoming actions")
+ raise
+
+
+ # 7) or 6) Perform all local actions.
+
+ for test_case in greater_scheme:
+ try:
+ test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while performing local action in '%s'", test_case.name)
+ raise
+
+
+ # 6) or 7) Commit all incoming actions
+
+ if do_commit_local_changes:
+ try:
+ main.run_svn(None, 'ci', '-m', 'Committing incoming and local actions',
+ sbox.wc_dir)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while committing incoming and local actions")
+ raise
+
+
+ # 8) Merge all "incoming" subdirs to their respective "local" subdirs.
+ # This creates conflicts between the local changes in the "local" wc
+ # subdirs and the incoming states committed in the "incoming" subdirs.
+
+ for test_case in greater_scheme:
+ try:
+ local = j(sbox.wc_dir, test_case.name, "local")
+ incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
+
+ x_out = test_case.expected_output
+ if x_out != None:
+ x_out = x_out.copy()
+ x_out.wc_dir = local
+
+ x_disk = test_case.expected_disk
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = local
+
+ x_skip = test_case.expected_skip
+ if x_skip != None:
+ x_skip.copy()
+ x_skip.wc_dir = local
+
+ varargs = (local,'--allow-mixed-revisions',)
+ if ignore_ancestry:
+ varargs = varargs + ('--ignore-ancestry',)
+
+ if test_case.error_re_string == None:
+ expected_stderr = []
+ else:
+ expected_stderr = test_case.error_re_string
+
+ run_and_verify_merge(local, '0', 'HEAD', incoming, None,
+ x_out, None, None, x_disk, None, x_skip,
+ expected_stderr,
+ False, False, *varargs)
+ run_and_verify_unquiet_status(local, x_status)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while verifying in '%s'", test_case.name)
+ raise
+
+
+ # 9) Verify that commit fails.
+
+ if do_commit_conflicts:
+ for test_case in greater_scheme:
+ try:
+ local = j(wc_dir, test_case.name, 'local')
+
+ x_status = test_case.expected_status
+ if x_status != None:
+ x_status.copy()
+ x_status.wc_dir = local
+
+ run_and_verify_commit(local, None, x_status,
+ test_case.commit_block_string)
+ except:
+ logger.warn("ERROR IN: Tests scheme for merge: "
+ + "while checking commit-blocking in '%s'", test_case.name)
+ raise
+
+
+### Bummer. It would be really nice to have easy access to the URL
+### member of our entries files so that switches could be testing by
+### examining the modified ancestry. But status doesn't show this
+### information. Hopefully in the future the cmdline binary will have
+### a subcommand for dumping multi-line detailed information about
+### versioned things. Until then, we'll stick with the traditional
+### verification methods.
+###
+### gjs says: we have 'svn info' now
+
+def get_routine_status_state(wc_dir):
+ """get the routine status list for WC_DIR at the completion of an
+ initial call to do_routine_switching()"""
+
+ # Construct some paths for convenience
+ ADH_path = os.path.join(wc_dir, 'A', 'D', 'H')
+ chi_path = os.path.join(ADH_path, 'chi')
+ omega_path = os.path.join(ADH_path, 'omega')
+ psi_path = os.path.join(ADH_path, 'psi')
+ pi_path = os.path.join(ADH_path, 'pi')
+ tau_path = os.path.join(ADH_path, 'tau')
+ rho_path = os.path.join(ADH_path, 'rho')
+
+ # Now generate a state
+ state = svntest.actions.get_virginal_state(wc_dir, 1)
+ state.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda')
+ state.add({
+ 'A/B/pi' : Item(status=' ', wc_rev=1),
+ 'A/B/tau' : Item(status=' ', wc_rev=1),
+ 'A/B/rho' : Item(status=' ', wc_rev=1),
+ })
+
+ return state
+
+#----------------------------------------------------------------------
+
+def get_routine_disk_state(wc_dir):
+ """get the routine disk list for WC_DIR at the completion of an
+ initial call to do_routine_switching()"""
+
+ disk = svntest.main.greek_state.copy()
+
+ # iota has the same contents as gamma
+ disk.tweak('iota', contents=disk.desc['A/D/gamma'].contents)
+
+ # A/B/* no longer exist, but have been replaced by copies of A/D/G/*
+ disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda')
+ disk.add({
+ 'A/B/pi' : Item("This is the file 'pi'.\n"),
+ 'A/B/rho' : Item("This is the file 'rho'.\n"),
+ 'A/B/tau' : Item("This is the file 'tau'.\n"),
+ })
+
+ return disk
+
+#----------------------------------------------------------------------
+
+def do_routine_switching(wc_dir, repo_url, verify):
+ """perform some routine switching of the working copy WC_DIR for
+ other tests to use. If VERIFY, then do a full verification of the
+ switching, else don't bother."""
+
+ ### Switch the file `iota' to `A/D/gamma'.
+
+ # Construct some paths for convenience
+ iota_path = os.path.join(wc_dir, 'iota')
+ gamma_url = repo_url + '/A/D/gamma'
+
+ if verify:
+ # Create expected output tree
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ })
+
+ # Create expected disk tree (iota will have gamma's contents)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents=expected_disk.desc['A/D/gamma'].contents)
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', switched='S')
+
+ # Do the switch and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, iota_path, gamma_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+ else:
+ svntest.main.run_svn(None, 'switch', '--ignore-ancestry',
+ gamma_url, iota_path)
+
+ ### Switch the directory `A/B' to `A/D/G'.
+
+ # Construct some paths for convenience
+ AB_path = os.path.join(wc_dir, 'A', 'B')
+ ADG_url = repo_url + '/A/D/G'
+
+ if verify:
+ # Create expected output tree
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ 'A/B/lambda' : Item(status='D '),
+ 'A/B/pi' : Item(status='A '),
+ 'A/B/tau' : Item(status='A '),
+ 'A/B/rho' : Item(status='A '),
+ })
+
+ # Create expected disk tree (iota will have gamma's contents,
+ # A/B/* will look like A/D/G/*)
+ expected_disk = get_routine_disk_state(wc_dir)
+
+ # Create expected status
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak('iota', 'A/B', switched='S')
+
+ # Do the switch and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, ADG_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+ else:
+ svntest.main.run_svn(None, 'switch', '--ignore-ancestry',
+ ADG_url, AB_path)
+
+
+#----------------------------------------------------------------------
+
+def commit_routine_switching(wc_dir, verify):
+ "Commit some stuff in a routinely-switched working copy."
+
+ # Make some local mods
+ iota_path = os.path.join(wc_dir, 'iota')
+ Bpi_path = os.path.join(wc_dir, 'A', 'B', 'pi')
+ Gpi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi')
+ Z_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z')
+ zeta_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z', 'zeta')
+
+ svntest.main.file_append(iota_path, "apple")
+ svntest.main.file_append(Bpi_path, "melon")
+ svntest.main.file_append(Gpi_path, "banana")
+ os.mkdir(Z_path)
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'.\n")
+ svntest.main.run_svn(None, 'add', Z_path)
+
+ # Try to commit. We expect this to fail because, if all the
+ # switching went as expected, A/B/pi and A/D/G/pi point to the
+ # same URL. We don't allow this.
+ svntest.actions.run_and_verify_commit(
+ wc_dir, None, None,
+ "svn: E195003: Cannot commit both .* as they refer to the same URL$")
+
+ # Okay, that all taken care of, let's revert the A/D/G/pi path and
+ # move along. Afterward, we should be okay to commit. (Sorry,
+ # holsta, that banana has to go...)
+ svntest.main.run_svn(None, 'revert', Gpi_path)
+
+ # Create expected output tree.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/Z' : Item(verb='Adding'),
+ 'A/D/G/Z/zeta' : Item(verb='Adding'),
+ 'iota' : Item(verb='Sending'),
+ 'A/B/pi' : Item(verb='Sending'),
+ })
+
+ # Created expected status tree.
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak('iota', 'A/B', switched='S')
+ expected_status.tweak('iota', 'A/B/pi', wc_rev=2, status=' ')
+ expected_status.add({
+ 'A/D/G/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit should succeed
+ if verify:
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+ else:
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg', wc_dir)
diff --git a/subversion/tests/cmdline/svntest/err.py b/subversion/tests/cmdline/svntest/err.py
new file mode 100644
index 0000000..e5271c0
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/err.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python
+### This file automatically generated by tools/dev/gen-py-error.py,
+### which see for more information
+###
+### It is versioned for convenience.
+
+APMOD_ACTIVITY_NOT_FOUND = 190002
+APMOD_BAD_BASELINE = 190003
+APMOD_CONNECTION_ABORTED = 190004
+APMOD_MALFORMED_URI = 190001
+APMOD_MISSING_PATH_TO_FS = 190000
+ASSERTION_FAIL = 235000
+ASSERTION_ONLY_TRACING_LINKS = 235001
+ATOMIC_INIT_FAILURE = 200029
+AUTHN_CREDS_NOT_SAVED = 215003
+AUTHN_CREDS_UNAVAILABLE = 215000
+AUTHN_FAILED = 215004
+AUTHN_NO_PROVIDER = 215001
+AUTHN_PROVIDERS_EXHAUSTED = 215002
+AUTHZ_INVALID_CONFIG = 220003
+AUTHZ_PARTIALLY_READABLE = 220002
+AUTHZ_ROOT_UNREADABLE = 220000
+AUTHZ_UNREADABLE = 220001
+AUTHZ_UNWRITABLE = 220004
+BAD_ATOMIC = 125015
+BAD_CHANGELIST_NAME = 125014
+BAD_CHECKSUM_KIND = 125011
+BAD_CHECKSUM_PARSE = 125012
+BAD_CONFIG_VALUE = 125009
+BAD_CONTAINING_POOL = 125000
+BAD_DATE = 125003
+BAD_FILENAME = 125001
+BAD_MIME_TYPE = 125004
+BAD_PROPERTY_VALUE = 125005
+BAD_PROP_KIND = 200008
+BAD_RELATIVE_PATH = 125007
+BAD_SERVER_SPECIFICATION = 125010
+BAD_TOKEN = 125013
+BAD_URL = 125002
+BAD_UUID = 125008
+BAD_VERSION_FILE_FORMAT = 125006
+BASE = 200000
+CANCELLED = 200015
+CEASE_INVOCATION = 200021
+CHECKSUM_MISMATCH = 200014
+CLIENT_BAD_REVISION = 195002
+CLIENT_CYCLE_DETECTED = 195019
+CLIENT_DUPLICATE_COMMIT_URL = 195003
+CLIENT_FILE_EXTERNAL_OVERWRITE_VERSIONED = 195017
+CLIENT_FORBIDDEN_BY_SERVER = 195023
+CLIENT_INVALID_EXTERNALS_DESCRIPTION = 195005
+CLIENT_INVALID_MERGEINFO_NO_MERGETRACKING = 195021
+CLIENT_INVALID_RELOCATION = 195009
+CLIENT_IS_BINARY_FILE = 195004
+CLIENT_IS_DIRECTORY = 195007
+CLIENT_MERGE_UPDATE_REQUIRED = 195020
+CLIENT_MISSING_LOCK_TOKEN = 195013
+CLIENT_MODIFIED = 195006
+CLIENT_MULTIPLE_SOURCES_DISALLOWED = 195014
+CLIENT_NOT_READY_TO_MERGE = 195016
+CLIENT_NO_LOCK_TOKEN = 195022
+CLIENT_NO_VERSIONED_PARENT = 195015
+CLIENT_PATCH_BAD_STRIP_COUNT = 195018
+CLIENT_PROPERTY_NAME = 195011
+CLIENT_RA_ACCESS_REQUIRED = 195001
+CLIENT_REVISION_AUTHOR_CONTAINS_NEWLINE = 195010
+CLIENT_REVISION_RANGE = 195008
+CLIENT_UNRELATED_RESOURCES = 195012
+CLIENT_VERSIONED_PATH_REQUIRED = 195000
+CL_ADM_DIR_RESERVED = 205003
+CL_ARG_PARSING_ERROR = 205000
+CL_BAD_LOG_MESSAGE = 205008
+CL_COMMIT_IN_ADDED_DIR = 205006
+CL_ERROR_PROCESSING_EXTERNALS = 205011
+CL_INSUFFICIENT_ARGS = 205001
+CL_LOG_MESSAGE_IS_PATHNAME = 205005
+CL_LOG_MESSAGE_IS_VERSIONED_FILE = 205004
+CL_MUTUALLY_EXCLUSIVE_ARGS = 205002
+CL_NO_EXTERNAL_EDITOR = 205007
+CL_NO_EXTERNAL_MERGE_TOOL = 205010
+CL_UNNECESSARY_LOG_MESSAGE = 205009
+DELTA_MD5_CHECKSUM_ABSENT = 200010
+DIFF_DATASOURCE_MODIFIED = 225000
+DIR_NOT_EMPTY = 200011
+ENTRY_ATTRIBUTE_INVALID = 150005
+ENTRY_EXISTS = 150002
+ENTRY_FORBIDDEN = 150006
+ENTRY_MISSING_REVISION = 150003
+ENTRY_MISSING_URL = 150004
+ENTRY_NOT_FOUND = 150000
+EXTERNAL_PROGRAM = 200012
+FS_ALREADY_EXISTS = 160020
+FS_ALREADY_OPEN = 160002
+FS_BAD_LOCK_TOKEN = 160037
+FS_BERKELEY_DB = 160029
+FS_BERKELEY_DB_DEADLOCK = 160030
+FS_CLEANUP = 160001
+FS_CONFLICT = 160024
+FS_CORRUPT = 160004
+FS_GENERAL = 160000
+FS_ID_NOT_FOUND = 160014
+FS_INCORRECT_EDITOR_COMPLETION = 160050
+FS_LOCK_EXPIRED = 160041
+FS_LOCK_OWNER_MISMATCH = 160039
+FS_MALFORMED_SKEL = 160027
+FS_NOT_DIRECTORY = 160016
+FS_NOT_FILE = 160017
+FS_NOT_FOUND = 160013
+FS_NOT_ID = 160015
+FS_NOT_MUTABLE = 160019
+FS_NOT_OPEN = 160003
+FS_NOT_REVISION_ROOT = 160023
+FS_NOT_SINGLE_PATH_COMPONENT = 160018
+FS_NOT_TXN_ROOT = 160022
+FS_NO_LOCK_TOKEN = 160038
+FS_NO_SUCH_CHECKSUM_REP = 160048
+FS_NO_SUCH_COPY = 160011
+FS_NO_SUCH_ENTRY = 160008
+FS_NO_SUCH_LOCK = 160040
+FS_NO_SUCH_NODE_ORIGIN = 160046
+FS_NO_SUCH_REPRESENTATION = 160009
+FS_NO_SUCH_REVISION = 160006
+FS_NO_SUCH_STRING = 160010
+FS_NO_SUCH_TRANSACTION = 160007
+FS_NO_USER = 160034
+FS_OUT_OF_DATE = 160042
+FS_PATH_ALREADY_LOCKED = 160035
+FS_PATH_NOT_LOCKED = 160036
+FS_PATH_SYNTAX = 160005
+FS_PROP_BASEVALUE_MISMATCH = 160049
+FS_REP_BEING_WRITTEN = 160044
+FS_REP_CHANGED = 160025
+FS_REP_NOT_MUTABLE = 160026
+FS_ROOT_DIR = 160021
+FS_TRANSACTION_DEAD = 160031
+FS_TRANSACTION_NOT_DEAD = 160032
+FS_TRANSACTION_NOT_MUTABLE = 160012
+FS_TXN_NAME_TOO_LONG = 160045
+FS_TXN_OUT_OF_DATE = 160028
+FS_UNKNOWN_FS_TYPE = 160033
+FS_UNSUPPORTED_FORMAT = 160043
+FS_UNSUPPORTED_UPGRADE = 160047
+ILLEGAL_TARGET = 200009
+INCOMPLETE_DATA = 200003
+INCORRECT_PARAMS = 200004
+INVALID_DIFF_OPTION = 200016
+IO_CORRUPT_EOL = 135002
+IO_INCONSISTENT_EOL = 135000
+IO_PIPE_FRAME_ERROR = 135004
+IO_PIPE_READ_ERROR = 135005
+IO_PIPE_WRITE_ERROR = 135007
+IO_UNIQUE_NAMES_EXHAUSTED = 135003
+IO_UNKNOWN_EOL = 135001
+IO_WRITE_ERROR = 135006
+ITER_BREAK = 200023
+MALFORMED_FILE = 200002
+MERGEINFO_PARSE_ERROR = 200020
+NODE_UNEXPECTED_KIND = 145001
+NODE_UNKNOWN_KIND = 145000
+NO_APR_MEMCACHE = 200028
+NO_AUTH_FILE_PATH = 200018
+PLUGIN_LOAD_FAILURE = 200001
+PROPERTY_NOT_FOUND = 200017
+RA_DAV_ALREADY_EXISTS = 175005
+RA_DAV_CONN_TIMEOUT = 175012
+RA_DAV_CREATING_REQUEST = 175001
+RA_DAV_FORBIDDEN = 175013
+RA_DAV_INVALID_CONFIG_VALUE = 175006
+RA_DAV_MALFORMED_DATA = 175009
+RA_DAV_OPTIONS_REQ_FAILED = 175003
+RA_DAV_PATH_NOT_FOUND = 175007
+RA_DAV_PROPPATCH_FAILED = 175008
+RA_DAV_PROPS_NOT_FOUND = 175004
+RA_DAV_RELOCATED = 175011
+RA_DAV_REQUEST_FAILED = 175002
+RA_DAV_RESPONSE_HEADER_BADNESS = 175010
+RA_DAV_SOCK_INIT = 175000
+RA_ILLEGAL_URL = 170000
+RA_LOCAL_REPOS_NOT_FOUND = 180000
+RA_LOCAL_REPOS_OPEN_FAILED = 180001
+RA_NOT_AUTHORIZED = 170001
+RA_NOT_IMPLEMENTED = 170003
+RA_NOT_LOCKED = 170007
+RA_NO_REPOS_UUID = 170005
+RA_OUT_OF_DATE = 170004
+RA_PARTIAL_REPLAY_NOT_SUPPORTED = 170008
+RA_REPOS_ROOT_URL_MISMATCH = 170010
+RA_SERF_GSSAPI_INITIALISATION_FAILED = 230002
+RA_SERF_SSL_CERT_UNTRUSTED = 230001
+RA_SERF_SSPI_INITIALISATION_FAILED = 230000
+RA_SERF_WRAPPED_ERROR = 230003
+RA_SESSION_URL_MISMATCH = 170011
+RA_SVN_BAD_VERSION = 210006
+RA_SVN_CMD_ERR = 210000
+RA_SVN_CONNECTION_CLOSED = 210002
+RA_SVN_EDIT_ABORTED = 210008
+RA_SVN_IO_ERROR = 210003
+RA_SVN_MALFORMED_DATA = 210004
+RA_SVN_NO_MECHANISMS = 210007
+RA_SVN_REPOS_NOT_FOUND = 210005
+RA_SVN_UNKNOWN_CMD = 210001
+RA_UNKNOWN_AUTH = 170002
+RA_UNSUPPORTED_ABI_VERSION = 170006
+RA_UUID_MISMATCH = 170009
+REPOS_BAD_ARGS = 165002
+REPOS_BAD_REVISION_REPORT = 165004
+REPOS_DISABLED_FEATURE = 165006
+REPOS_HOOK_FAILURE = 165001
+REPOS_LOCKED = 165000
+REPOS_NO_DATA_FOR_REPORT = 165003
+REPOS_POST_COMMIT_HOOK_FAILED = 165007
+REPOS_POST_LOCK_HOOK_FAILED = 165008
+REPOS_POST_UNLOCK_HOOK_FAILED = 165009
+REPOS_UNSUPPORTED_UPGRADE = 165010
+REPOS_UNSUPPORTED_VERSION = 165005
+RESERVED_FILENAME_SPECIFIED = 200025
+REVNUM_PARSE_FAILURE = 200022
+SQLITE_BUSY = 200033
+SQLITE_CONSTRAINT = 200035
+SQLITE_ERROR = 200030
+SQLITE_READONLY = 200031
+SQLITE_RESETTING_FOR_ROLLBACK = 200034
+SQLITE_UNSUPPORTED_SCHEMA = 200032
+STREAM_MALFORMED_DATA = 140001
+STREAM_SEEK_NOT_SUPPORTED = 140003
+STREAM_UNEXPECTED_EOF = 140000
+STREAM_UNRECOGNIZED_DATA = 140002
+SVNDIFF_BACKWARD_VIEW = 185002
+SVNDIFF_CORRUPT_WINDOW = 185001
+SVNDIFF_INVALID_COMPRESSED_DATA = 185005
+SVNDIFF_INVALID_HEADER = 185000
+SVNDIFF_INVALID_OPS = 185003
+SVNDIFF_UNEXPECTED_END = 185004
+SWIG_PY_EXCEPTION_SET = 200013
+TEST_FAILED = 200006
+TEST_SKIPPED = 200027
+UNKNOWN_CAPABILITY = 200026
+UNKNOWN_CHANGELIST = 200024
+UNSUPPORTED_FEATURE = 200007
+UNVERSIONED_RESOURCE = 200005
+VERSION_MISMATCH = 200019
+WC_BAD_ADM_LOG = 155009
+WC_BAD_ADM_LOG_START = 155020
+WC_BAD_PATH = 155022
+WC_CANNOT_DELETE_FILE_EXTERNAL = 155030
+WC_CANNOT_MOVE_FILE_EXTERNAL = 155031
+WC_CHANGELIST_MOVE = 155029
+WC_CLEANUP_REQUIRED = 155037
+WC_CONFLICT_RESOLVER_FAILURE = 155027
+WC_COPYFROM_PATH_NOT_FOUND = 155028
+WC_CORRUPT = 155016
+WC_CORRUPT_TEXT_BASE = 155017
+WC_DB_ERROR = 155032
+WC_FOUND_CONFLICT = 155015
+WC_INVALID_LOCK = 155006
+WC_INVALID_OPERATION_DEPTH = 155038
+WC_INVALID_OP_ON_CWD = 155019
+WC_INVALID_RELOCATION = 155024
+WC_INVALID_SCHEDULE = 155023
+WC_INVALID_SWITCH = 155025
+WC_LEFT_LOCAL_MOD = 155012
+WC_LOCKED = 155004
+WC_MISMATCHED_CHANGELIST = 155026
+WC_MISSING = 155033
+WC_NODE_KIND_CHANGE = 155018
+WC_NOT_FILE = 155008
+WC_NOT_LOCKED = 155005
+WC_NOT_SYMLINK = 155034
+WC_NOT_UP_TO_DATE = 155011
+WC_NOT_WORKING_COPY = 155007
+WC_OBSTRUCTED_UPDATE = 155000
+WC_PATH_ACCESS_DENIED = 155039
+WC_PATH_FOUND = 155014
+WC_PATH_NOT_FOUND = 155010
+WC_PATH_UNEXPECTED_STATUS = 155035
+WC_SCHEDULE_CONFLICT = 155013
+WC_UNSUPPORTED_FORMAT = 155021
+WC_UNWIND_EMPTY = 155002
+WC_UNWIND_MISMATCH = 155001
+WC_UNWIND_NOT_EMPTY = 155003
+WC_UPGRADE_REQUIRED = 155036
+XML_ATTRIB_NOT_FOUND = 130000
+XML_MALFORMED = 130003
+XML_MISSING_ANCESTRY = 130001
+XML_UNESCAPABLE_DATA = 130004
+XML_UNKNOWN_ENCODING = 130002
diff --git a/subversion/tests/cmdline/svntest/factory.py b/subversion/tests/cmdline/svntest/factory.py
new file mode 100644
index 0000000..32093ea
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/factory.py
@@ -0,0 +1,1919 @@
+#
+# factory.py: Automatically generate a (near-)complete new cmdline test
+# from a series of shell commands.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+
+## HOW TO USE:
+#
+# (1) Edit the py test script you want to enhance (for example
+# cmdline/basic_tests.py), add a new test header as usual.
+# Insert a call to factory.make() into the empty test:
+#
+# def my_new_test(sbox):
+# "my new test modifies iota"
+# svntest.factory.make(sbox, """
+# echo "foo" > A/D/foo
+# svn add A/D/foo
+# svn st
+# svn ci
+# """)
+#
+# (2) Add the test to the tests list at the bottom of the py file.
+# [...]
+# some_other_test,
+# my_new_test,
+# ]
+#
+#
+# (3) Run the test, paste the output back into your new test,
+# replacing the factory call.
+#
+# $ ./foo_tests.py my_new_test
+# OR
+# $ ./foo_tests.py my_new_test > new_test.snippet
+# OR
+# $ ./foo_tests.py my_new_test >> basic_tests.py
+# Then edit (e.g.) basic_tests.py to put the script in the right place.
+#
+# Ensure that the py script (e.g. basic_tests.py) has these imports,
+# so that the composed script that you pasted back finds everything
+# that it uses:
+# import os, shutil
+# from svntest import main, wc, actions, verify
+#
+# Be aware that you have to paste the result back to the .py file.
+#
+# Be more aware that you have to read every single line and understand
+# that it makes sense. If the current behaviour is wrong, you need to
+# make the changes to expect the correct behaviour and XFail() your test.
+#
+# factory.make() just probes the current situation and writes a test that
+# PASSES any success AND ANY FAILURE THAT IT FINDS. The resulting script
+# will never fail anything (if it works correctly), not even a failure.
+#
+# ### TODO: some sort of intelligent pasting directly into the
+# right place, like looking for the factory call,
+# inserting the new test there, back-up-ing the old file.
+#
+#
+# TROUBLESHOOTING
+# If the result has a problem somewhere along the middle, you can,
+# of course, only use the first part of the output result, maybe tweak
+# something, and continue with another factory.make() at the end of that.
+#
+# Or you can first do stuff to your sbox and then call factory on it.
+# Factory will notice if the sbox has already been built and calls
+# sbox.build() only if you didn't already.
+#
+# You can also have any number of factory.make() calls scattered
+# around "real" test code.
+#
+# Note that you can pass a prev_status and prev_disk to factory, to make
+# the expected_* trees re-use a pre-existing one in the test, entirely
+# for code beauty :P (has to match the wc_dir you will be using next).
+#
+#
+# YOU ARE CORDIALLY INVITED to add/tweak/change to your needs.
+# If you want to know what's going on, look at the switch()
+# function of TestFactory below.
+#
+#
+# DETAILS
+# =======
+#
+# The input to factory.make(sbox, input) is not "real" shell-script.
+# Factory goes at great lengths to try and understand your script, it
+# parses common shell operations during tests and translates them.
+#
+# All arguments are tokenized similarly to shell, so if you need a space
+# in an argument, use quotes.
+# echo "my content" > A/new_file
+# Quote char escaping is done like this:
+# echo "my \\" content" > A/new_file
+# echo 'my \\' content' > A/new_file
+# If you use r""" echo 'my \' content' > A/new_file """ (triple quotes
+# with a leading 'r' character), you don't need to double-escape any
+# characters.
+#
+# You can either supply multiple lines, or separate the lines with ';'.
+# factory.make(sbox, 'echo foo > bar; svn add bar')
+# factory.make(sbox, 'echo foo > bar\n svn add bar')
+# factory.make(sbox, r"""
+# echo "foo\nbar" > bar
+# svn add bar
+# """)
+#
+#
+# WORKING COPY PATHS
+# - Factory will automatically build sbox.wc_dir if you didn't do so yet.
+#
+# - If you supply any path or file name, factory will prepend sbox.wc_dir
+# to it.
+# echo more >> iota
+# --> main.file_append(
+# os.path.join(sbox.wc_dir, 'iota'),
+# "more")
+# You can also do so explicitly.
+# echo more >> wc_dir/iota
+# --> main.file_append(
+# os.path.join(sbox.wc_dir, 'iota'),
+# "more")
+#
+# Factory implies the sbox.wc_dir if you fail to supply an explicit
+# working copy dir. If you want to supply one explicitly, you can
+# choose among these wildcards:
+# 'wc_dir', 'wcdir', '$WC_DIR', '$WC' -- all expanded to sbox.wc_dir
+# For example:
+# 'svn mkdir wc_dir/A/D/X'
+# But as long as you want to use only the default sbox.wc_dir, you usually
+# don't need to supply any wc_dir-wildcard:
+# 'mkdir A/X' creates the directory sbox.wc_dir/A/X
+# (Factory tries to know which arguments of the commands you supplied
+# are eligible to be path arguments. If something goes wrong here, try
+# to fix factory.py to not mistake the arg for something different.
+# You usually just need to tweak some parameters to args2svntest() to
+# achieve correct expansion.)
+#
+# - If you want to use a second (or Nth) working copy, just supply any
+# working copy wildcard with any made-up suffix, e.g. like this:
+# 'svn st wc_dir_2' or 'svn info $WC_2'
+# Factory will detect that you used another wc_dir and will automatically
+# add a corresponding directory to your sbox. The directory will initially
+# be nonexistent, so call 'mkdir', 'svn co' or 'cp' before using:
+# 'cp wc_dir wc_dir_other' -- a copy of the current WC
+# 'svn co $URL wc_dir_new' -- a clean checkout
+# 'mkdir wc_dir_empty' -- an empty directory
+# You can subsequently use any wc-dir wildcard with your suffix added.
+#
+# cp wc_dir wc_dir_2
+# echo more >> wc_dir_2/iota
+# --> wc_dir_2 = sbox.add_wc_path('2')
+# shutil.copytrees(wc_dir, wc_dir_2)
+# main.file_append(
+# os.path.join(wc_dir_2, 'iota'),
+# "more")
+#
+#
+# URLs
+# Factory currently knows only one repository, thus only one repos root.
+# The wildcards you can use for it are:
+# 'url', '$URL'
+# A URL is not inserted automatically like wc_dir, you need to supply a
+# URL wildcard.
+# Alternatively, you can use '^/' URLs. However, that is in effect a different
+# test from an explicit entire URL. The test needs to chdir to the working
+# copy in order find which URL '^/' should expand to.
+# (currently, factory will chdir to sbox.wc_dir. It will only chdir
+# to another working copy if one of the other arguments involved a WC.
+# ### TODO add a 'cd wc_dir_2' command to select another WC as default.)
+# Example:
+# 'svn co $URL Y' -- make a new nested working copy in sbox.wc_dir/Y
+# 'svn co $URL wc_dir_2' -- create a new separate working copy
+# 'svn cp ^/A ^/X' -- do a URL copy, creating $URL/X (branch)
+#
+#
+# SOME EXAMPLES
+# These commands should work:
+#
+# - "svn <subcommand> <options>"
+# Some subcommands are parsed specially, others by a catch-all default
+# parser (cmd_svn()), see switch().
+# 'svn commit', 'svn commit --force', 'svn ci wc_dir_2'
+# 'svn copy url/A url/X'
+#
+# - "echo contents > file" (replace)
+# "echo contents >> file" (append)
+# Calls main.file_write() / main.file_append().
+# 'echo "froogle" >> A/D/G/rho' -- append to an existing file
+# 'echo "bar" > A/quux' -- create a new file
+# 'echo "fool" > wc_dir_2/me' -- manipulate other working copies
+#
+# - "mkdir <names> ..."
+# Calls os.makedirs().
+# You probably want 'svn mkdir' instead, or use 'svn add' after this.
+# 'mkdir A/D/X' -- create an unversioned directory
+# 'mkdir wc_dir_5' -- create a new, empty working copy
+#
+# - "rm <targets>"
+# Calls main.safe_rmtree().
+# You probably want to use 'svn delete' instead.
+# 'rm A/D/G'
+# 'rm wc_dir_2'
+#
+# - "mv <source> [<source2> ...] <target>"
+# Calls shutil.move()
+# You probably want to use 'svn move' instead.
+# 'mv iota A/D/' -- move sbox.wc_dir/iota to sbox.wc_dir/A/D/.
+#
+# - "cp <source> [<source2> ...] <target>"
+# Do a filesystem copy.
+# You probably want to use 'svn copy' instead.
+# 'cp wc_dir wc_dir_copy'
+# 'cp A/D/G A/X'
+#
+# IF YOU NEED ANY OTHER COMMANDS:
+# - first check if it doesn't work already. If not,
+# - add your desired commands to factory.py! :)
+# - alternatively, use a number of separate factory calls, doing what
+# you need done in "real" svntest language in-between.
+#
+# IF YOU REALLY DON'T GROK THIS:
+# - ask #svn-dev
+# - ask dev@
+# - ask neels
+
+import sys, re, os, shutil, bisect, textwrap, shlex
+
+import svntest
+from svntest import main, actions, tree
+from svntest import Failure
+
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+
+def make(wc_dir, commands, prev_status=None, prev_disk=None, verbose=True):
+ """The Factory Invocation Function. This is typically the only one
+ called from outside this file. See top comment in factory.py.
+ Prints the resulting py script to stdout when verbose is True and
+ returns the resulting line-list containing items as:
+ [ ['pseudo-shell input line #1', ' translation\n to\n py #1'], ...]"""
+ fac = TestFactory(wc_dir, prev_status, prev_disk)
+ fac.make(commands)
+ fac.print_script()
+ return fac.lines
+
+
+
+class TestFactory:
+ """This class keeps all state around a factory.make() call."""
+
+ def __init__(self, sbox, prev_status=None, prev_disk=None):
+ self.sbox = sbox
+
+ # The input lines and their translations.
+ # Each translation usually has multiple output lines ('\n' characters).
+ self.lines = [] # [ ['in1', 'out1'], ['in2', 'out'], ...
+
+ # Any expected_status still there from a previous verification
+ self.prev_status = None
+ if prev_status:
+ self.prev_status = [None, prev_status] # svntest.wc.State
+
+ # Any expected_disk still there from a previous verification
+ self.prev_disk = None
+ if prev_disk:
+ reparented_prev_disk = svntest.wc.State(prev_disk.wc_dir, {});
+ reparented_prev_disk.add_state(sbox.wc_dir, prev_disk);
+ self.prev_disk = [None, reparented_prev_disk]
+
+ # Those command line options that expect an argument following
+ # which is not a path. (don't expand args following these)
+ self.keep_args_of = ['--depth', '--encoding', '-r',
+ '--changelist', '-m', '--message']
+
+ # A stack of $PWDs, to be able to chdir back after a chdir.
+ self.prevdirs = []
+
+ # The python variables we want to be declared at the beginning.
+ # These are path variables like "A_D = os.path.join(wc_dir, 'A', 'D')".
+ # The original wc_dir and url vars are not kept here.
+ self.vars = {}
+
+ # An optimized list kept up-to-date by variable additions
+ self.sorted_vars_by_pathlen = []
+
+ # Whether we ever used the variables 'wc_dir' and 'url' (tiny tweak)
+ self.used_wc_dir = False
+ self.used_abs_wc_dir = False
+ self.used_url = False
+
+ # The alternate working copy directories created that need to be
+ # registered with sbox (are not inside another working copy).
+ self.other_wc_dirs = {}
+
+
+ def make(self, commands):
+ "internal main function, delegates everything except final output."
+
+ # keep a spacer for init
+ self.add_line(None, None)
+
+ init = ""
+ if not self.sbox.is_built():
+ self.sbox.build()
+ init += "sbox.build()\n"
+
+
+ try:
+ # split input args
+ input_lines = commands.replace(';','\n').splitlines()
+ for str in input_lines:
+ if len(str.strip()) > 0:
+ self.add_line(str)
+
+ for i in range(len(self.lines)):
+ if self.lines[i][0] is not None:
+ # This is where everything happens:
+ self.lines[i][1] = self.switch(self.lines[i][0])
+
+ # We're done. Add a final greeting.
+ self.add_line(
+ None,
+ "Remember, this only saves you typing. Doublecheck everything.")
+
+ # -- Insert variable defs in the first line --
+ # main wc_dir and url
+ if self.used_wc_dir:
+ init += 'wc_dir = sbox.wc_dir\n'
+ if self.used_abs_wc_dir:
+ init += 'abs_wc_dir = os.path.abspath(sbox.wc_dir)\n'
+ if self.used_url:
+ init += 'url = sbox.repo_url\n'
+
+ # registration of new WC dirs
+ sorted_names = self.get_sorted_other_wc_dir_names()
+ for name in sorted_names:
+ init += name + ' = ' + self.other_wc_dirs[name][0] + '\n'
+
+ if len(init) > 0:
+ init += '\n'
+
+ # general variable definitions
+ sorted_names = self.get_sorted_var_names()
+ for name in sorted_names:
+ init += name + ' = ' + self.vars[name][0] + '\n'
+
+ # Insert at the first line, being the spacer from above
+ if len(init) > 0:
+ self.lines[0][1] = init
+
+ # This usually goes to make() below (outside this class)
+ return self.lines
+ except:
+ for line in self.lines:
+ if line[1] is not None:
+ print(line[1])
+ raise
+
+
+ def print_script(self, stream=sys.stdout):
+ "Output the resulting script of the preceding make() call"
+ if self.lines is not None:
+ for line in self.lines:
+ if line[1] is None:
+ # fall back to just that line as it was in the source
+ stripped = line[0].strip()
+ if not stripped.startswith('#'):
+ # for comments, don't say this:
+ stream.write(" # don't know how to handle:\n")
+ stream.write(" " + line[0].strip() + '\n')
+ else:
+ if line[0] is not None:
+ stream.write( wrap_each_line(line[0].strip(),
+ " # ", " # ", True) + '\n')
+ stream.write(wrap_each_line(line[1], " ", " ", False) + '\n\n')
+ else:
+ stream.write(" # empty.\n")
+ stream.flush()
+
+
+ # End of public functions.
+
+
+
+ # "Shell" command handlers:
+
+ def switch(self, line):
+ "Given one input line, delegates to the appropriate sub-functions."
+ args = shlex.split(line)
+ if len(args) < 1:
+ return ""
+ first = args[0]
+
+ # This is just an if-cascade. Feel free to change that.
+
+ if first == 'svn':
+ second = args[1]
+
+ if second == 'add':
+ return self.cmd_svn(args[1:], False, self.keep_args_of)
+
+ if second in ['changelist', 'cl']:
+ keep_count = 2
+ if '--remove' in args:
+ keep_count = 1
+ return self.cmd_svn(args[1:], False, self.keep_args_of, keep_count)
+
+ if second in ['status','stat','st']:
+ return self.cmd_svn_status(args[2:])
+
+ if second in ['commit','ci']:
+ return self.cmd_svn_commit(args[2:])
+
+ if second in ['update','up']:
+ return self.cmd_svn_update(args[2:])
+
+ if second in ['switch','sw']:
+ return self.cmd_svn_switch(args[2:])
+
+ if second in ['copy', 'cp',
+ 'move', 'mv', 'rename', 'ren']:
+ return self.cmd_svn_copy_move(args[1:])
+
+ if second in ['checkout', 'co']:
+ return self.cmd_svn_checkout(args[2:])
+
+ if second in ['propset','pset','ps']:
+ multiline_args = [arg.replace(r'\n', '\n') for arg in args[1:]]
+ return self.cmd_svn(multiline_args, False,
+ self.keep_args_of, 3)
+
+ if second in ['propget','pget','pg']:
+ return self.cmd_svn(args[1:], False,
+ self.keep_args_of, 2)
+
+ if second in ['delete','del','remove', 'rm']:
+ return self.cmd_svn(args[1:], False,
+ self.keep_args_of + ['--with-revprop'])
+
+ # NOTE that not all commands need to be listed here, since
+ # some are already adequately handled by self.cmd_svn().
+ # If you find yours is not, add another self.cmd_svn_xxx().
+ return self.cmd_svn(args[1:], False, self.keep_args_of)
+
+ if first == 'echo':
+ return self.cmd_echo(args[1:])
+
+ if first == 'mkdir':
+ return self.cmd_mkdir(args[1:])
+
+ if first == 'rm':
+ return self.cmd_rm(args[1:])
+
+ if first == 'mv':
+ return self.cmd_mv(args[1:])
+
+ if first == 'cp':
+ return self.cmd_cp(args[1:])
+
+ # if all fails, take the line verbatim
+ return None
+
+
+ def cmd_svn_standard_run(self, pyargs, runargs, do_chdir, wc):
+ "The generic invocation of svn, helper function."
+ pychdir = self.chdir(do_chdir, wc)
+
+ code, out, err = main.run_svn("Maybe", *runargs)
+
+ if code == 0 and len(err) < 1:
+ # write a test that expects success
+ pylist = self.strlist2py(out)
+ if len(out) <= 1:
+ py = "expected_stdout = " + pylist + "\n\n"
+ else:
+ py = "expected_stdout = verify.UnorderedOutput(" + pylist + ")\n\n"
+ py += pychdir
+ py += "actions.run_and_verify_svn2(expected_stdout, [], 0"
+ else:
+ # write a test that expects failure
+ pylist = self.strlist2py(err)
+ if len(err) <= 1:
+ py = "expected_stderr = " + pylist + "\n\n"
+ else:
+ py = "expected_stderr = verify.UnorderedOutput(" + pylist + ")\n\n"
+ py += pychdir
+ py += ("actions.run_and_verify_svn2([], expected_stderr, " + str(code))
+
+ if len(pyargs) > 0:
+ py += ", " + ", ".join(pyargs)
+ py += ")\n"
+ py += self.chdir_back(do_chdir)
+ return py
+
+
+ def cmd_svn(self, svnargs, append_wc_dir_if_missing = False,
+ keep_args_of = [], keep_first_count = 1,
+ drop_with_arg = []):
+ "Handles all svn calls not handled by more specific functions."
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(svnargs,
+ append_wc_dir_if_missing, keep_args_of,
+ keep_first_count, drop_with_arg)
+
+ return self.cmd_svn_standard_run(pyargs, runargs, do_chdir,
+ self.get_first_wc(targets))
+
+
+ def cmd_svn_status(self, status_args):
+ "Runs svn status, looks what happened and writes the script for it."
+ pyargs, runargs, do_chdir, targets = self.args2svntest(
+ status_args, True, self.keep_args_of, 0)
+
+ py = ""
+
+ for target in targets:
+ if not target.wc:
+ py += '# SKIPPING NON-WC ' + target.runarg + '\n'
+ continue
+
+ if '-q' in status_args:
+ pystatus = self.get_current_status(target.wc, True)
+ py += (pystatus +
+ "actions.run_and_verify_status(" + target.wc.py +
+ ", expected_status)\n")
+ else:
+ pystatus = self.get_current_status(target.wc, False)
+ py += (pystatus +
+ "actions.run_and_verify_unquiet_status(" + target.wc.py +
+ ", expected_status)\n")
+ return py
+
+
+ def cmd_svn_commit(self, commit_args):
+ "Runs svn commit, looks what happened and writes the script for it."
+ # these are the options that are followed by something that should not
+ # be parsed as a filename in the WC.
+ commit_arg_opts = [
+ "--depth",
+ "--with-revprop",
+ "--changelist",
+ # "-F", "--file", these take a file argument, don't list here.
+ # "-m", "--message", treated separately
+ ]
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(
+ commit_args, True, commit_arg_opts, 0, ['-m', '--message'])
+
+ wc = self.get_first_wc(targets)
+ pychdir = self.chdir(do_chdir, wc)
+
+ code, output, err = main.run_svn("Maybe", 'ci',
+ '-m', 'log msg',
+ *runargs)
+
+ if code == 0 and len(err) < 1:
+ # write a test that expects success
+
+ output = actions.process_output_for_commit(output)
+ actual_out = tree.build_tree_from_commit(output)
+ py = ("expected_output = " +
+ self.tree2py(actual_out, wc) + "\n\n")
+
+ pystatus = self.get_current_status(wc)
+ py += pystatus
+
+ py += pychdir
+ py += ("actions.run_and_verify_commit(" + wc.py + ", " +
+ "expected_output, expected_status, " +
+ "None")
+ else:
+ # write a test that expects error
+ py = "expected_error = " + self.strlist2py(err) + "\n\n"
+ py += pychdir
+ py += ("actions.run_and_verify_commit(" + wc.py + ", " +
+ "None, None, expected_error")
+
+ if len(pyargs) > 0:
+ py += ', ' + ', '.join(pyargs)
+ py += ")"
+ py += self.chdir_back(do_chdir)
+ return py
+
+
+ def cmd_svn_update(self, update_args):
+ "Runs svn update, looks what happened and writes the script for it."
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(
+ update_args, True, self.keep_args_of, 0)
+
+ wc = self.get_first_wc(targets)
+ pychdir = self.chdir(do_chdir, wc)
+
+ code, output, err = main.run_svn('Maybe', 'up', *runargs)
+
+ if code == 0 and len(err) < 1:
+ # write a test that expects success
+
+ actual_out = svntest.wc.State.from_checkout(output).old_tree()
+ py = ("expected_output = " +
+ self.tree2py(actual_out, wc) + "\n\n")
+
+ pydisk = self.get_current_disk(wc)
+ py += pydisk
+
+ pystatus = self.get_current_status(wc)
+ py += pystatus
+
+ py += pychdir
+ py += ("actions.run_and_verify_update(" + wc.py + ", " +
+ "expected_output, expected_disk, expected_status, " +
+ "[], False")
+ else:
+ # write a test that expects error
+ py = "expected_error = " + self.strlist2py(err) + "\n\n"
+ py += pychdir
+ py += ("actions.run_and_verify_update(" + wc.py + ", None, None, " +
+ "None, expected_error")
+
+ if len(pyargs) > 0:
+ py += ', ' + ', '.join(pyargs)
+ py += ")"
+ py += self.chdir_back(do_chdir)
+ return py
+
+
+ def cmd_svn_switch(self, switch_args):
+ "Runs svn switch, looks what happened and writes the script for it."
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(
+ switch_args, True, self.keep_args_of, 0)
+
+ # Sort out the targets. We need one URL and one wc node, in that order.
+ if len(targets) < 2:
+ raise Failure("Sorry, I'm currently enforcing two targets for svn " +
+ "switch. If you want to supply less, remove this " +
+ "check and implement whatever seems appropriate.")
+
+ wc_arg = targets[1]
+ del pyargs[wc_arg.argnr]
+ del runargs[wc_arg.argnr]
+ url_arg = targets[0]
+ del pyargs[url_arg.argnr]
+ del runargs[url_arg.argnr]
+
+ wc = wc_arg.wc
+ if not wc:
+ raise Failure("Unexpected argument ordering to factory's 'svn switch'?")
+
+ pychdir = self.chdir(do_chdir, wc)
+
+ #if '--force' in runargs:
+ # self.really_safe_rmtree(wc_arg.runarg)
+
+ code, output, err = main.run_svn('Maybe', 'sw',
+ url_arg.runarg, wc_arg.runarg,
+ *runargs)
+
+ py = ""
+
+ if code == 0 and len(err) < 1:
+ # write a test that expects success
+
+ actual_out = tree.build_tree_from_checkout(output)
+ py = ("expected_output = " +
+ self.tree2py(actual_out, wc) + "\n\n")
+
+ pydisk = self.get_current_disk(wc)
+ py += pydisk
+
+ pystatus = self.get_current_status(wc)
+ py += pystatus
+
+ py += pychdir
+ py += ("actions.run_and_verify_switch(" + wc.py + ", " +
+ wc_arg.pyarg + ", " + url_arg.pyarg + ", " +
+ "expected_output, expected_disk, expected_status, " +
+ "[], False")
+ else:
+ # write a test that expects error
+ py = "expected_error = " + self.strlist2py(err) + "\n\n"
+ py += pychdir
+ py += ("actions.run_and_verify_switch(" + wc.py + ", " +
+ wc_arg.pyarg + ", " + url_arg.pyarg + ", " +
+ "None, None, None, expected_error, False")
+
+ if len(pyargs) > 0:
+ py += ', ' + ', '.join(pyargs)
+ py += ")"
+ py += self.chdir_back(do_chdir)
+
+ return py
+
+
+ def cmd_svn_checkout(self, checkout_args):
+ "Runs svn checkout, looks what happened and writes the script for it."
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(
+ checkout_args, True, self.keep_args_of, 0)
+
+ # Sort out the targets. We need one URL and one dir, in that order.
+ if len(targets) < 2:
+ raise Failure("Sorry, I'm currently enforcing two targets for svn " +
+ "checkout. If you want to supply less, remove this " +
+ "check and implement whatever seems appropriate.")
+ # We need this separate for the call to run_and_verify_checkout()
+ # that's composed in the output script.
+ wc_arg = targets[1]
+ del pyargs[wc_arg.argnr]
+ del runargs[wc_arg.argnr]
+ url_arg = targets[0]
+ del pyargs[url_arg.argnr]
+ del runargs[url_arg.argnr]
+
+ wc = wc_arg.wc
+
+ pychdir = self.chdir(do_chdir, wc)
+
+ #if '--force' in runargs:
+ # self.really_safe_rmtree(wc_arg.runarg)
+
+ code, output, err = main.run_svn('Maybe', 'co',
+ url_arg.runarg, wc_arg.runarg,
+ *runargs)
+
+ py = ""
+
+ if code == 0 and len(err) < 1:
+ # write a test that expects success
+
+ actual_out = tree.build_tree_from_checkout(output)
+ pyout = ("expected_output = " +
+ self.tree2py(actual_out, wc) + "\n\n")
+ py += pyout
+
+ pydisk = self.get_current_disk(wc)
+ py += pydisk
+
+ py += pychdir
+
+ py += ("actions.run_and_verify_checkout(" +
+ url_arg.pyarg + ", " + wc_arg.pyarg +
+ ", expected_output, expected_disk")
+ else:
+ # write a test that expects failure
+ pylist = self.strlist2py(err)
+ if len(err) <= 1:
+ py += "expected_stderr = " + pylist + "\n\n"
+ else:
+ py += "expected_stderr = verify.UnorderedOutput(" + pylist + ")\n\n"
+ py += pychdir
+ py += ("actions.run_and_verify_svn2([], expected_stderr, " + str(code) +
+ ", " + url_arg.pyarg + ", " + wc_arg.pyarg)
+
+ # Append the remaining args
+ if len(pyargs) > 0:
+ py += ', ' + ', '.join(pyargs)
+ py += ")"
+ py += self.chdir_back(do_chdir)
+ return py
+
+
+ def cmd_svn_copy_move(self, args):
+ "Runs svn copy or move, looks what happened and writes the script for it."
+
+ pyargs, runargs, do_chdir, targets = self.args2svntest(args,
+ False, self.keep_args_of, 1)
+
+ if len(targets) == 2 and targets[1].is_url:
+ # The second argument is a URL.
+ # This needs a log message. Is one supplied?
+ has_message = False
+ for arg in runargs:
+ if arg.startswith('-m') or arg == '--message':
+ has_message = True
+ break
+ if not has_message:
+ # add one
+ runargs += [ '-m', 'copy log' ]
+ pyargs = []
+ for arg in runargs:
+ pyargs += [ self.str2svntest(arg) ]
+
+ return self.cmd_svn_standard_run(pyargs, runargs, do_chdir,
+ self.get_first_wc(targets))
+
+
+ def cmd_echo(self, echo_args):
+ "Writes a string to a file and writes the script for it."
+ # split off target
+ target_arg = None
+ replace = True
+ contents = None
+ for i in range(len(echo_args)):
+ arg = echo_args[i]
+ if arg.startswith('>'):
+ if len(arg) > 1:
+ if arg[1] == '>':
+ # it's a '>>'
+ replace = False
+ arg = arg[2:]
+ else:
+ arg = arg[1:]
+ if len(arg) > 0:
+ target_arg = arg
+
+ if target_arg is None:
+ # we need an index (i+1) to exist, and
+ # we need (i+1) to be the only existing index left in the list.
+ if i+1 != len(echo_args)-1:
+ raise Failure("don't understand: echo " + " ".join(echo_args))
+ target_arg = echo_args[i+1]
+ else:
+ # already got the target. no more indexes should exist.
+ if i != len(echo_args)-1:
+ raise Failure("don't understand: echo " + " ".join(echo_args))
+
+ contents = " ".join(echo_args[:i]) + '\n'
+
+ if target_arg is None:
+ raise Failure("echo needs a '>' pipe to a file name: echo " +
+ " ".join(echo_args))
+
+ target = self.path2svntest(target_arg)
+
+ if replace:
+ main.file_write(target.runarg, contents)
+ py = "main.file_write("
+ else:
+ main.file_append(target.runarg, contents)
+ py = "main.file_append("
+ py += target.pyarg + ", " + self.str2svntest(contents) + ")"
+
+ return py
+
+
+ def cmd_mkdir(self, mkdir_args):
+ "Makes a new directory and writes the script for it."
+ # treat all mkdirs as -p, ignore all -options.
+ out = ""
+ for arg in mkdir_args:
+ if not arg.startswith('-'):
+ target = self.path2svntest(arg)
+ # don't check for not being a url,
+ # maybe it's desired by the test or something.
+ os.makedirs(target.runarg)
+ out += "os.makedirs(" + target.pyarg + ")\n"
+ return out
+
+
+ def cmd_rm(self, rm_args):
+ "Removes a directory tree and writes the script for it."
+ # treat all removes as -rf, ignore all -options.
+ out = ""
+ for arg in rm_args:
+ if not arg.startswith('-'):
+ target = self.path2svntest(arg)
+ if os.path.isfile(target.runarg):
+ os.remove(target.runarg)
+ out += "os.remove(" + target.pyarg + ")\n"
+ else:
+ self.really_safe_rmtree(target.runarg)
+ out += "main.safe_rmtree(" + target.pyarg + ")\n"
+ return out
+
+
+ def cmd_mv(self, mv_args):
+ "Moves things in the filesystem and writes the script for it."
+ # ignore all -options.
+ out = ""
+ sources = []
+ target = None
+ for arg in mv_args:
+ if not arg.startswith('-'):
+ if target is not None:
+ sources += [target]
+ target = self.path2svntest(arg)
+
+ out = ""
+ for source in sources:
+ out += "shutil.move(" + source.pyarg + ", " + target.pyarg + ")\n"
+ shutil.move(source.runarg, target.runarg)
+
+ return out
+
+
+ def cmd_cp(self, mv_args):
+ "Copies in the filesystem and writes the script for it."
+ # ignore all -options.
+ out = ""
+ sources = []
+ target = None
+ for arg in mv_args:
+ if not arg.startswith('-'):
+ if target is not None:
+ sources += [target]
+ target = self.path2svntest(arg)
+
+ if not target:
+ raise Failure("cp needs a source and a target 'cp wc_dir wc_dir_2'")
+
+ out = ""
+ for source in sources:
+ if os.path.exists(target.runarg):
+ raise Failure("cp target exists, remove first: " + target.pyarg)
+ if os.path.isdir(source.runarg):
+ shutil.copytree(source.runarg, target.runarg)
+ out += "shutil.copytree(" + source.pyarg + ", " + target.pyarg + ")\n"
+ elif os.path.isfile(source.runarg):
+ shutil.copy2(source.runarg, target.runarg)
+ out += "shutil.copy2(" + source.pyarg + ", " + target.pyarg + ")\n"
+ else:
+ raise Failure("cp copy source does not exist: " + source.pyarg)
+
+ return out
+
+
+ # End of "shell" command handling functions.
+
+
+
+ # Internal helpers:
+
+
+ class WorkingCopy:
+ "Defines the list of info we need around a working copy."
+ def __init__(self, py, realpath, suffix):
+ self.py = py
+ self.realpath = realpath
+ self.suffix = suffix
+
+
+ class Target:
+ "Defines the list of info we need around a command line supplied target."
+ def __init__(self, pyarg, runarg, argnr, is_url=False, wc=None):
+ self.pyarg = pyarg
+ self.runarg = runarg
+ self.argnr = argnr
+ self.is_url = is_url
+ self.wc = wc
+
+
+ def add_line(self, args, translation=None):
+ "Definition of how to add a new in/out line pair to LINES."
+ self.lines += [ [args, translation] ]
+
+
+ def really_safe_rmtree(self, dir):
+ # Safety catch. We don't want to remove outside the sandbox.
+ if dir.find('svn-test-work') < 0:
+ raise Failure("Tried to remove path outside working area: " + dir)
+ main.safe_rmtree(dir)
+
+
+ def get_current_disk(self, wc):
+ "Probes the given working copy and writes an expected_disk for it."
+ actual_disk = svntest.wc.State.from_wc(wc.realpath, False, True)
+ actual_disk.wc_dir = wc.realpath
+
+ make_py, prev_disk = self.get_prev_disk(wc)
+
+ # The tests currently compare SVNTreeNode trees, so let's do that too.
+ actual_disk_tree = actual_disk.old_tree()
+ prev_disk_tree = prev_disk.old_tree()
+
+ # find out the tweaks
+ tweaks = self.diff_trees(prev_disk_tree, actual_disk_tree, wc)
+ if tweaks == 'Purge':
+ make_py = ''
+ else:
+ tweaks = self.optimize_tweaks(tweaks, actual_disk_tree, wc)
+
+ self.remember_disk(wc, actual_disk)
+
+ pydisk = make_py + self.tweaks2py(tweaks, "expected_disk", wc)
+ if len(pydisk) > 0:
+ pydisk += '\n'
+ return pydisk
+
+ def get_prev_disk(self, wc):
+ "Retrieves the last used expected_disk tree if any."
+ make_py = ""
+ # If a disk was supplied via __init__(), self.prev_disk[0] is set
+ # to None, in which case we always use it, not checking WC.
+ if self.prev_disk is None or \
+ not self.prev_disk[0] in [None, wc.realpath]:
+ disk = svntest.main.greek_state.copy()
+ disk.wc_dir = wc.realpath
+ self.remember_disk(wc, disk)
+ make_py = "expected_disk = svntest.main.greek_state.copy()\n"
+ else:
+ disk = self.prev_disk[1]
+ return make_py, disk
+
+ def remember_disk(self, wc, actual):
+ "Remembers the current disk tree for future reference."
+ self.prev_disk = [wc.realpath, actual]
+
+
+ def get_current_status(self, wc, quiet=True):
+ "Probes the given working copy and writes an expected_status for it."
+ if quiet:
+ code, output, err = main.run_svn(None, 'status', '-v', '-u', '-q',
+ wc.realpath)
+ else:
+ code, output, err = main.run_svn(None, 'status', '-v', '-u',
+ wc.realpath)
+ if code != 0 or len(err) > 0:
+ raise Failure("Hmm. `svn status' failed. What now.")
+
+ make_py, prev_status = self.get_prev_status(wc)
+
+ actual_status = svntest.wc.State.from_status(output, wc_dir=wc.realpath)
+
+ # The tests currently compare SVNTreeNode trees, so let's do that too.
+ prev_status_tree = prev_status.old_tree()
+ actual_status_tree = actual_status.old_tree()
+
+ # Get the tweaks
+ tweaks = self.diff_trees(prev_status_tree, actual_status_tree, wc)
+
+ if tweaks == 'Purge':
+ # The tree is empty (happens with invalid WC dirs)
+ make_py = "expected_status = wc.State(" + wc.py + ", {})\n"
+ tweaks = []
+ else:
+ tweaks = self.optimize_tweaks(tweaks, actual_status_tree, wc)
+
+ self.remember_status(wc, actual_status)
+
+ pystatus = make_py + self.tweaks2py(tweaks, "expected_status", wc)
+ if len(pystatus) > 0:
+ pystatus += '\n'
+
+ return pystatus
+
+ def get_prev_status(self, wc):
+ "Retrieves the last used expected_status tree if any."
+ make_py = ""
+ prev_status = None
+
+ # re-use any previous status if we are still in the same WC dir.
+ # If a status was supplied via __init__(), self.prev_status[0] is set
+ # to None, in which case we always use it, not checking WC.
+ if self.prev_status is None or \
+ not self.prev_status[0] in [None, wc.realpath]:
+ # There is no or no matching previous status. Make new one.
+ try:
+ # If it's really a WC, use its base revision
+ base_rev = actions.get_wc_base_rev(wc.realpath)
+ except:
+ # Else, just use zero. Whatever.
+ base_rev = 0
+ prev_status = actions.get_virginal_state(wc.realpath, base_rev)
+ make_py += ("expected_status = actions.get_virginal_state(" +
+ wc.py + ", " + str(base_rev) + ")\n")
+ else:
+ # We will re-use the previous expected_status.
+ prev_status = self.prev_status[1]
+ # no need to make_py anything
+
+ return make_py, prev_status
+
+ def remember_status(self, wc, actual_status):
+ "Remembers the current status tree for future reference."
+ self.prev_status = [wc.realpath, actual_status]
+
+
+ def chdir(self, do_chdir, wc):
+ "Pushes the current dir onto the dir stack, does an os.chdir()."
+ if not do_chdir:
+ return ""
+ self.prevdirs.append(os.getcwd())
+ os.chdir(wc.realpath)
+ py = ("orig_dir = os.getcwd() # Need to chdir because of '^/' args\n" +
+ "os.chdir(" + wc.py + ")\n")
+ return py
+
+ def chdir_back(self, do_chdir):
+ "Does os.chdir() back to the directory popped from the dir stack's top."
+ if not do_chdir:
+ return ""
+ # If this fails, there's a missing chdir() call:
+ os.chdir(self.prevdirs.pop())
+ return "os.chdir(orig_dir)\n"
+
+
+ def get_sorted_vars_by_pathlen(self):
+ """Compose a listing of variable names to be expanded in script output.
+ This is intended to be stored in self.sorted_vars_by_pathlen."""
+ lst = []
+
+ for dict in [self.vars, self.other_wc_dirs]:
+ for name in dict:
+ runpath = dict[name][1]
+ if not runpath:
+ continue
+ strlen = len(runpath)
+ item = (strlen, name, runpath)
+ bisect.insort(lst, item)
+
+ return lst
+
+
+ def get_sorted_var_names(self):
+ """Compose a listing of variable names to be declared.
+ This is used by TestFactory.make()."""
+ paths = []
+ urls = []
+ for name in self.vars:
+ if name.startswith('url_'):
+ bisect.insort(urls, (name.lower(), name))
+ else:
+ bisect.insort(paths, (name.lower(), name))
+ list = []
+ for path in paths:
+ list.append(path[1])
+ for url in urls:
+ list.append(url[1])
+ return list
+
+
+ def get_sorted_other_wc_dir_names(self):
+ """Compose a listing of working copies to be declared with sbox.
+ This is used by TestFactory.make()."""
+ list = []
+ for name in self.other_wc_dirs:
+ bisect.insort(list, [name.lower(), name])
+ names = []
+ for item in list:
+ names += [item[1]]
+ return names
+
+
+ def str2svntest(self, str):
+ "Like str2py(), but replaces any known paths with variable names."
+ if str is None:
+ return "None"
+
+ str = str2py(str)
+ quote = str[0]
+
+ def replace(str, path, name, quote):
+ return str.replace(path, quote + " + " + name + " + " + quote)
+
+ # We want longer paths first.
+ for var in reversed(self.sorted_vars_by_pathlen):
+ name = var[1]
+ path = var[2]
+ str = replace(str, path, name, quote)
+
+ str2 = replace(str, os.path.abspath(self.sbox.wc_dir), 'abs_wc_dir', quote)
+ if str != str2:
+ self.used_abs_wc_dir = True
+ str = str2
+
+ str2 = replace(str, self.sbox.wc_dir, 'wc_dir', quote)
+ if str != str2:
+ self.used_wc_dir = True
+ str = str2
+
+ str2 = replace(str, self.sbox.repo_url, 'url', quote)
+ if str != str2:
+ self.used_url = True
+ str = str2
+
+ # now remove trailing null-str adds:
+ # '' + url_A_C + ''
+ str = str.replace("'' + ",'').replace(" + ''",'')
+ # "" + url_A_C + ""
+ str = str.replace('"" + ',"").replace(' + ""',"")
+
+ # just a stupid check. tiny tweak. (don't declare wc_dir and url
+ # if they never appear)
+ if not self.used_wc_dir:
+ self.used_wc_dir = (re.search('\bwc_dir\b', str) is not None)
+ if not self.used_url:
+ self.used_url = str.find('url') >= 0
+
+ return str
+
+
+ def strlist2py(self, list):
+ "Given a list of strings, composes a py script that produces the same."
+ if list is None:
+ return "None"
+ if len(list) < 1:
+ return "[]"
+ if len(list) == 1:
+ return "[" + self.str2svntest(list[0]) + "]"
+
+ py = "[\n"
+ for line in list:
+ py += " " + self.str2svntest(line) + ",\n"
+ py += "]"
+ return py
+
+
+ def get_node_path(self, node, wc):
+ "Tries to return the node path relative to the given working copy."
+ path = node.get_printable_path()
+ if path.startswith(wc.realpath + os.sep):
+ path = path[len(wc.realpath + os.sep):]
+ elif path.startswith(wc.realpath):
+ path = path[len(wc.realpath):]
+ return path
+
+
+ def node2py(self, node, wc, prepend="", drop_empties=True):
+ "Creates a line like 'A/C' : Item({ ... }) for wc.State composition."
+ buf = StringIO()
+ node.print_script(buf, wc.realpath, prepend, drop_empties)
+ return buf.getvalue()
+
+
+ def tree2py(self, node, wc):
+ "Writes the wc.State definition for the given SVNTreeNode in given WC."
+ # svntest.wc.State(wc_dir, {
+ # 'A/mu' : Item(verb='Sending'),
+ # 'A/D/G/rho' : Item(verb='Sending'),
+ # })
+ buf = StringIO()
+ tree.dump_tree_script(node, stream=buf, subtree=wc.realpath,
+ wc_varname=wc.py)
+ return buf.getvalue()
+
+
+ def diff_trees(self, left, right, wc):
+ """Compares the two trees given by the SVNTreeNode instances LEFT and
+ RIGHT in the given working copy and composes an internal list of
+ tweaks necessary to make LEFT into RIGHT."""
+ if not right.children:
+ return 'Purge'
+ return self._diff_trees(left, right, wc)
+
+ def _diff_trees(self, left, right, wc):
+ "Used by self.diff_trees(). No need to call this. See there."
+ # all tweaks collected
+ tweaks = []
+
+ # the current tweak in composition
+ path = self.get_node_path(left, wc)
+ tweak = []
+
+ # node attributes
+ if ((left.contents is None) != (right.contents is None)) or \
+ (left.contents != right.contents):
+ tweak += [ ["contents", right.contents] ]
+
+ for key in left.props:
+ if key not in right.props:
+ tweak += [ [key, None] ]
+ elif left.props[key] != right.props[key]:
+ tweak += [ [key, right.props[key]] ]
+
+ for key in right.props:
+ if key not in left.props:
+ tweak += [ [key, right.props[key]] ]
+
+ for key in left.atts:
+ if key not in right.atts:
+ tweak += [ [key, None] ]
+ elif left.atts[key] != right.atts[key]:
+ tweak += [ [key, right.atts[key]] ]
+
+ for key in right.atts:
+ if key not in left.atts:
+ tweak += [ [key, right.atts[key]] ]
+
+ if len(tweak) > 0:
+ changetweak = [ 'Change', [path], tweak]
+ tweaks += [changetweak]
+
+ if left.children is not None:
+ for leftchild in left.children:
+ rightchild = None
+ if right.children is not None:
+ rightchild = tree.get_child(right, leftchild.name)
+ if rightchild is None:
+ paths = leftchild.recurse(lambda n: self.get_node_path(n, wc))
+ removetweak = [ 'Remove', paths ]
+ tweaks += [removetweak]
+
+ if right.children is not None:
+ for rightchild in right.children:
+ leftchild = None
+ if left.children is not None:
+ leftchild = tree.get_child(left, rightchild.name)
+ if leftchild is None:
+ paths_and_nodes = rightchild.recurse(
+ lambda n: [ self.get_node_path(n, wc), n ] )
+ addtweak = [ 'Add', paths_and_nodes ]
+ tweaks += [addtweak]
+ else:
+ tweaks += self._diff_trees(leftchild, rightchild, wc)
+
+ return tweaks
+
+
+ def optimize_tweaks(self, tweaks, actual_tree, wc):
+ "Given an internal list of tweaks, make them optimal by common sense."
+ if tweaks == 'Purge':
+ return tweaks
+
+ subtree = actual_tree.find_node(wc.realpath)
+ if not subtree:
+ subtree = actual_tree
+
+ remove_paths = []
+ additions = []
+ changes = []
+
+ for tweak in tweaks:
+ if tweak[0] == 'Remove':
+ remove_paths += tweak[1]
+ elif tweak[0] == 'Add':
+ additions += tweak[1]
+ else:
+ changes += [tweak]
+
+ # combine removals
+ removal = []
+ if len(remove_paths) > 0:
+ removal = [ [ 'Remove', remove_paths] ]
+
+ # combine additions
+ addition = []
+ if len(additions) > 0:
+ addition = [ [ 'Add', additions ] ]
+
+ # find those changes that should be done on all nodes at once.
+ def remove_mod(mod):
+ for change in changes:
+ if mod in change[2]:
+ change[2].remove(mod)
+
+ seen = []
+ tweak_all = []
+ for change in changes:
+ tweak = change[2]
+ for mod in tweak:
+ if mod in seen:
+ continue
+ seen += [mod]
+
+ # here we see each single "name=value" tweak in mod.
+ # Check if the actual tree had this anyway all the way through.
+ name = mod[0]
+ val = mod[1]
+
+ if name == 'contents' and val is None:
+ continue;
+
+ def check_node(node):
+ if (
+ (name == 'contents' and node.contents == val)
+ or
+ (node.props and (name in node.props) and node.props[name] == val)
+ or
+ (node.atts and (name in node.atts) and node.atts[name] == val)):
+ # has this same thing set. count on the left.
+ return [node, None]
+ return [None, node]
+ results = subtree.recurse(check_node)
+ have = []
+ havent = []
+ for result in results:
+ if result[0]:
+ have += [result[0]]
+ else:
+ havent += [result[1]]
+
+ if havent == []:
+ # ok, then, remove all tweaks that are like this, then
+ # add a generic tweak.
+ remove_mod(mod)
+ tweak_all += [mod]
+ elif len(havent) < len(have) * 3: # this is "an empirical factor"
+ remove_mod(mod)
+ tweak_all += [mod]
+ # record the *other* nodes' actual item, overwritten above
+ for node in havent:
+ name = mod[0]
+ if name == 'contents':
+ value = node.contents
+ elif name in node.props:
+ value = node.props[name]
+ elif name in node.atts:
+ value = node.atts[name]
+ else:
+ continue
+ changes += [ ['Change',
+ [self.get_node_path(node, wc)],
+ [[name, value]]
+ ]
+ ]
+
+ # combine those paths that have exactly the same changes
+ i = 0
+ j = 0
+ while i < len(changes):
+ # find other changes that are identical
+ j = i + 1
+ while j < len(changes):
+ if changes[i][2] == changes[j][2]:
+ changes[i][1] += changes[j][1]
+ del changes[j]
+ else:
+ j += 1
+ i += 1
+
+ # combine those changes that have exactly the same paths
+ i = 0
+ j = 0
+ while i < len(changes):
+ # find other paths that are identical
+ j = i + 1
+ while j < len(changes):
+ if changes[i][1] == changes[j][1]:
+ changes[i][2] += changes[j][2]
+ del changes[j]
+ else:
+ j += 1
+ i += 1
+
+ if tweak_all != []:
+ changes = [ ['Change', [], tweak_all ] ] + changes
+
+ return removal + addition + changes
+
+
+ def tweaks2py(self, tweaks, var_name, wc):
+ "Given an internal list of tweaks, write the tweak script for it."
+ py = ""
+ if tweaks is None:
+ return ""
+
+ if tweaks == 'Purge':
+ return var_name + " = wc.State(" + wc.py + ", {})\n"
+
+ for tweak in tweaks:
+ if tweak[0] == 'Remove':
+ py += var_name + ".remove("
+ paths = tweak[1]
+ py += self.str2svntest(paths[0])
+ for path in paths[1:]:
+ py += ", " + self.str2svntest(path)
+ py += ")\n"
+
+ elif tweak[0] == 'Add':
+ # add({'A/D/H/zeta' : Item(status=' ', wc_rev=9), ...})
+ py += var_name + ".add({"
+ adds = tweak[1]
+ for add in adds:
+ path = add[0]
+ node = add[1]
+ py += self.node2py(node, wc, "\n ", False)
+ py += "\n})\n"
+
+ else:
+ paths = tweak[1]
+ mods = tweak[2]
+ if mods != []:
+ py += var_name + ".tweak("
+ for path in paths:
+ py += self.str2svntest(path) + ", "
+ def mod2py(mod):
+ return mod[0] + "=" + self.str2svntest(mod[1])
+ py += mod2py(mods[0])
+ for mod in mods[1:]:
+ py += ", " + mod2py(mod)
+ py += ")\n"
+ return py
+
+
+ def path2svntest(self, path, argnr=None, do_remove_on_new_wc_path=True):
+ """Given an input argument, do one hell of a path expansion on it.
+ ARGNR is simply inserted into the resulting Target.
+ Returns a self.Target instance.
+ """
+ wc = self.WorkingCopy('wc_dir', self.sbox.wc_dir, None)
+ url = self.sbox.repo_url # do we need multiple URLs too??
+
+ pathsep = '/'
+ if path.find('/') < 0 and path.find('\\') >= 0:
+ pathsep = '\\'
+
+ is_url = False
+
+ # If you add to these, make sure you add longer ones first, to
+ # avoid e.g. '$WC_DIR' matching '$WC' first.
+ wc_dir_wildcards = ['wc_dir', 'wcdir', '$WC_DIR', '$WC']
+ url_wildcards = ['url', '$URL']
+
+ first = path.split(pathsep, 1)[0]
+ if first in wc_dir_wildcards:
+ path = path[len(first):]
+ elif first in url_wildcards:
+ path = path[len(first):]
+ is_url = True
+ else:
+ for url_scheme in ['^/', 'file:/', 'http:/', 'svn:/', 'svn+ssh:/']:
+ if path.startswith(url_scheme):
+ is_url = True
+ # keep it as it is
+ pyarg = self.str2svntest(path)
+ runarg = path
+ return self.Target(pyarg, runarg, argnr, is_url, None)
+
+ for wc_dir_wildcard in wc_dir_wildcards:
+ if first.startswith(wc_dir_wildcard):
+ # The first path element starts with "wc_dir" (or similar),
+ # but it has more attached to it. Like "wc_dir.2" or "wc_dir_other"
+ # Record a new wc dir name.
+
+ # try to figure out a nice suffix to pass to sbox.
+ # (it will create a new dir called sbox.wc_dir + '.' + suffix)
+ suffix = ''
+ if first[len(wc_dir_wildcard)] in ['.','-','_']:
+ # it's a separator already, don't duplicate the dot. (warm&fuzzy)
+ suffix = first[len(wc_dir_wildcard) + 1:]
+ if len(suffix) < 1:
+ suffix = first[len(wc_dir_wildcard):]
+
+ if len(suffix) < 1:
+ raise Failure("no suffix supplied to other-wc_dir arg")
+
+ # Streamline the var name
+ suffix = suffix.replace('.','_').replace('-','_')
+ other_wc_dir_varname = 'wc_dir_' + suffix
+
+ path = path[len(first):]
+
+ real_path = self.get_other_wc_real_path(other_wc_dir_varname,
+ suffix,
+ do_remove_on_new_wc_path)
+
+ wc = self.WorkingCopy(other_wc_dir_varname,
+ real_path, suffix)
+ # found a match, no need to loop further, but still process
+ # the path further.
+ break
+
+ if len(path) < 1 or path == pathsep:
+ if is_url:
+ self.used_url = True
+ pyarg = 'url'
+ runarg = url
+ wc = None
+ else:
+ if wc.suffix is None:
+ self.used_wc_dir = True
+ pyarg = wc.py
+ runarg = wc.realpath
+ else:
+ pathelements = split_remove_empty(path, pathsep)
+
+ # make a new variable, if necessary
+ if is_url:
+ pyarg, runarg = self.ensure_url_var(pathelements)
+ wc = None
+ else:
+ pyarg, runarg = self.ensure_path_var(wc, pathelements)
+
+ return self.Target(pyarg, runarg, argnr, is_url, wc)
+
+
+ def get_other_wc_real_path(self, varname, suffix, do_remove):
+ "Create or retrieve the path of an alternate working copy."
+ if varname in self.other_wc_dirs:
+ return self.other_wc_dirs[varname][1]
+
+ # see if there is a wc already in the sbox
+ path = self.sbox.wc_dir + '.' + suffix
+ if path in self.sbox.test_paths:
+ py = "sbox.wc_dir + '." + suffix + "'"
+ else:
+ # else, we must still create one.
+ path = self.sbox.add_wc_path(suffix, do_remove)
+ py = "sbox.add_wc_path(" + str2py(suffix)
+ if not do_remove:
+ py += ", remove=False"
+ py += ')'
+
+ value = [py, path]
+ self.other_wc_dirs[varname] = [py, path]
+ self.sorted_vars_by_pathlen = self.get_sorted_vars_by_pathlen()
+ return path
+
+
+ def define_var(self, name, value):
+ "Add a variable definition, don't allow redefinitions."
+ # see if we already have this var
+ if name in self.vars:
+ if self.vars[name] != value:
+ raise Failure("Variable name collision. Hm, fix factory.py?")
+ # ok, it's recorded correctly. Nothing needs to happen.
+ return
+
+ # a new variable needs to be recorded
+ self.vars[name] = value
+ # update the sorted list of vars for substitution by str2svntest()
+ self.sorted_vars_by_pathlen = self.get_sorted_vars_by_pathlen()
+
+
+ def ensure_path_var(self, wc, pathelements):
+ "Given a path in a working copy, make sure we have a variable for it."
+
+ # special case: if a path is '.', simply use wc_dir.
+ if pathelements == ['.']:
+ self.used_wc_dir = True
+ return wc.py, wc.realpath
+
+ name = "_".join(pathelements)
+
+ if wc.suffix is not None:
+ # This is an "other" working copy (not the default).
+ # The suffix of the wc_dir variable serves as the prefix:
+ # wc_dir_other ==> other_A_D = os.path.join(wc_dir_other, 'A', 'D')
+ name = wc.suffix + "_" + name
+ if name[0].isdigit():
+ name = "_" + name
+ else:
+ self.used_wc_dir = True
+
+ py = 'os.path.join(' + wc.py
+ if len(pathelements) > 0:
+ py += ", '" + "', '".join(pathelements) + "'"
+ py += ')'
+
+ wc_dir_real_path = wc.realpath
+ run = os.path.join(wc_dir_real_path, *pathelements)
+
+ value = [py, run]
+ self.define_var(name, value)
+
+ return name, run
+
+
+ def ensure_url_var(self, pathelements):
+ "Given a path in the test repository, ensure we have a url var for it."
+ name = "url_" + "_".join(pathelements)
+
+ joined = "/" + "/".join(pathelements)
+
+ py = 'url'
+ if len(pathelements) > 0:
+ py += " + " + str2py(joined)
+ self.used_url = True
+
+ run = self.sbox.repo_url + joined
+
+ value = [py, run]
+ self.define_var(name, value)
+
+ return name, run
+
+
+ def get_first_wc(self, target_list):
+ """In a list of Target instances, find the first one that is in a
+ working copy and return that WorkingCopy. Default to sbox.wc_dir.
+ This is useful if we need a working copy for a '^/' URL."""
+ for target in target_list:
+ if target.wc:
+ return target.wc
+ return self.WorkingCopy('wc_dir', self.sbox.wc_dir, None)
+
+
+ def args2svntest(self, args, append_wc_dir_if_missing = False,
+ keep_args_of = [], keep_first_count = 1,
+ drop_with_arg = []):
+ """Tries to be extremely intelligent at parsing command line arguments.
+ It needs to know which args are file targets that should be in a
+ working copy. File targets are magically expanded.
+
+ args: list of string tokens as passed to factory.make(), e.g.
+ ['svn', 'commit', '--force', 'wc_dir2']
+
+ append_wc_dir_if_missing: It's a switch.
+
+ keep_args_of: See TestFactory.keep_args_of (comment in __init__)
+
+ keep_first_count: Don't expand the first N non-option args. This is used
+ to preserve e.g. the token 'update' in '[svn] update wc_dir'
+ (the 'svn' is usually split off before this function is called).
+
+ drop_with_arg: list of string tokens that are commandline options with
+ following argument which we want to drop from the list of args
+ (e.g. -m message).
+ """
+
+ wc_dir = self.sbox.wc_dir
+ url = self.sbox.repo_url
+
+ target_supplied = False
+ pyargs = []
+ runargs = []
+ do_chdir = False
+ targets = []
+ wc_dirs = []
+
+ i = 0
+ while i < len(args):
+ arg = args[i]
+
+ if arg in drop_with_arg:
+ # skip this and the next arg
+ if not arg.startswith('--') and len(arg) > 2:
+ # it is a concatenated arg like -r123 instead of -r 123
+ # skip only this one. Do nothing.
+ i = i
+ else:
+ # skip this and the next arg
+ i += 1
+
+ elif arg.startswith('-'):
+ # keep this option arg verbatim.
+ pyargs += [ self.str2svntest(arg) ]
+ runargs += [ arg ]
+ # does this option expect a non-filename argument?
+ # take that verbatim as well.
+ if arg in keep_args_of:
+ i += 1
+ if i < len(args):
+ arg = args[i]
+ pyargs += [ self.str2svntest(arg) ]
+ runargs += [ arg ]
+
+ elif keep_first_count > 0:
+ # args still to be taken verbatim.
+ pyargs += [ self.str2svntest(arg) ]
+ runargs += [ arg ]
+ keep_first_count -= 1
+
+ elif arg.startswith('^/'):
+ # this is a ^/url, keep it verbatim.
+ # if we use "^/", we need to chdir(wc_dir).
+ do_chdir = True
+ pyarg = str2py(arg)
+ targets += [ self.Target(pyarg, arg, len(pyargs), True, None) ]
+ pyargs += [ pyarg ]
+ runargs += [ arg ]
+
+ else:
+ # well, then this must be a filename or url, autoexpand it.
+ target = self.path2svntest(arg, argnr=len(pyargs))
+ pyargs += [ target.pyarg ]
+ runargs += [ target.runarg ]
+ target_supplied = True
+ targets += [ target ]
+
+ i += 1
+
+ if not target_supplied and append_wc_dir_if_missing:
+ # add a simple wc_dir target
+ self.used_wc_dir = True
+ wc = self.WorkingCopy('wc_dir', wc_dir, None)
+ targets += [ self.Target('wc_dir', wc_dir, len(pyargs), False, wc) ]
+ pyargs += [ 'wc_dir' ]
+ runargs += [ wc_dir ]
+
+ return pyargs, runargs, do_chdir, targets
+
+###### END of the TestFactory class ######
+
+
+
+# Quotes-preserving text wrapping for output
+
+def find_quote_end(text, i):
+ "In string TEXT, find the end of the qoute that starts at TEXT[i]"
+ # don't handle """ quotes
+ quote = text[i]
+ i += 1
+ while i < len(text):
+ if text[i] == '\\':
+ i += 1
+ elif text[i] == quote:
+ return i
+ i += 1
+ return len(text) - 1
+
+
+class MyWrapper(textwrap.TextWrapper):
+ "A textwrap.TextWrapper that doesn't break a line within quotes."
+ ### TODO regexes would be nice, maybe?
+ def _split(self, text):
+ parts = []
+ i = 0
+ start = 0
+ # This loop will break before and after each space, but keep
+ # quoted strings in one piece. Example, breaks marked '/':
+ # /(one,/ /two(blagger),/ /'three three three',)/
+ while i < len(text):
+ if text[i] in ['"', "'"]:
+ # handle """ quotes. (why, actually?)
+ if text[i:i+3] == '"""':
+ end = text[i+3:].find('"""')
+ if end >= 0:
+ i += end + 2
+ else:
+ i = len(text) - 1
+ else:
+ # handle normal quotes
+ i = find_quote_end(text, i)
+ elif text[i].isspace():
+ # split off previous section, if any
+ if start < i:
+ parts += [text[start:i]]
+ start = i
+ # split off this space
+ parts += [text[i]]
+ start = i + 1
+
+ i += 1
+
+ if start < len(text):
+ parts += [text[start:]]
+ return parts
+
+
+def wrap_each_line(str, ii, si, blw):
+ """Wrap lines to a defined width (<80 chars). Feed the lines single to
+ MyWrapper, so that it preserves the current line endings already in there.
+ We only want to insert new wraps, not remove existing newlines."""
+ wrapper = MyWrapper(77, initial_indent=ii,
+ subsequent_indent=si)
+
+ lines = str.splitlines()
+ for i in range(0,len(lines)):
+ if lines[i] != '':
+ lines[i] = wrapper.fill(lines[i])
+ return '\n'.join(lines)
+
+
+
+# Other miscellaneous helpers
+
+def sh2str(string):
+ "un-escapes away /x sequences"
+ if string is None:
+ return None
+ return string.decode("string-escape")
+
+
+def get_quote_style(str):
+ """find which quote is the outer one, ' or "."""
+ quote_char = None
+ at = None
+
+ found = str.find("'")
+ found2 = str.find('"')
+
+ # If found == found2, both must be -1, so nothing was found.
+ if found != found2:
+ # If a quote was found
+ if found >= 0 and found2 >= 0:
+ # If both were found, invalidate the later one
+ if found < found2:
+ found2 = -1
+ else:
+ found = -1
+ # See which one remains.
+ if found >= 0:
+ at = found + 1
+ quote_char = "'"
+ elif found2 >= 0:
+ at = found2 + 1
+ quote_char = '"'
+
+ return quote_char, at
+
+def split_remove_empty(str, sep):
+ "do a split, then remove empty elements."
+ list = str.split(sep)
+ return filter(lambda item: item and len(item) > 0, list)
+
+def str2py(str):
+ "returns the string enclosed in quotes, suitable for py scripts."
+ if str is None:
+ return "None"
+
+ # try to make a nice choice of quoting character
+ if str.find("'") >= 0:
+ return '"' + str.encode("string-escape"
+ ).replace("\\'", "'"
+ ).replace('"', '\\"') + '"'
+ else:
+ return "'" + str.encode("string-escape") + "'"
+
+ return str
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/svntest/main.py b/subversion/tests/cmdline/svntest/main.py
new file mode 100644
index 0000000..09a9722
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/main.py
@@ -0,0 +1,2531 @@
+#
+# main.py: a shared, automated test suite for Subversion
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import sys
+import os
+import shutil
+import re
+import stat
+import subprocess
+import time
+import threading
+import optparse
+import xml
+import urllib
+import logging
+import hashlib
+import zipfile
+import codecs
+
+try:
+ # Python >=3.0
+ import queue
+ from urllib.parse import quote as urllib_parse_quote
+ from urllib.parse import unquote as urllib_parse_unquote
+ from urllib.parse import urlparse
+except ImportError:
+ # Python <3.0
+ import Queue as queue
+ from urllib import quote as urllib_parse_quote
+ from urllib import unquote as urllib_parse_unquote
+ from urlparse import urlparse
+
+import svntest
+from svntest import Failure
+from svntest import Skip
+
+SVN_VER_MINOR = 10
+
+######################################################################
+#
+# HOW TO USE THIS MODULE:
+#
+# Write a new python script that
+#
+# 1) imports this 'svntest' package
+#
+# 2) contains a number of related 'test' routines. (Each test
+# routine should take no arguments, and return None on success
+# or throw a Failure exception on failure. Each test should
+# also contain a short docstring.)
+#
+# 3) places all the tests into a list that begins with None.
+#
+# 4) calls svntest.main.client_test() on the list.
+#
+# Also, your tests will probably want to use some of the common
+# routines in the 'Utilities' section below.
+#
+#####################################################################
+# Global stuff
+
+default_num_threads = 5
+
+# Don't try to use this before calling execute_tests()
+logger = None
+
+
+class SVNProcessTerminatedBySignal(Failure):
+ "Exception raised if a spawned process segfaulted, aborted, etc."
+ pass
+
+class SVNLineUnequal(Failure):
+ "Exception raised if two lines are unequal"
+ pass
+
+class SVNUnmatchedError(Failure):
+ "Exception raised if an expected error is not found"
+ pass
+
+class SVNCommitFailure(Failure):
+ "Exception raised if a commit failed"
+ pass
+
+class SVNRepositoryCopyFailure(Failure):
+ "Exception raised if unable to copy a repository"
+ pass
+
+class SVNRepositoryCreateFailure(Failure):
+ "Exception raised if unable to create a repository"
+ pass
+
+# Windows specifics
+if sys.platform == 'win32':
+ windows = True
+ file_scheme_prefix = 'file:///'
+ _exe = '.exe'
+ _bat = '.bat'
+ os.environ['SVN_DBG_STACKTRACES_TO_STDERR'] = 'y'
+else:
+ windows = False
+ file_scheme_prefix = 'file://'
+ _exe = ''
+ _bat = ''
+
+# The location of our mock svneditor script.
+if windows:
+ svneditor_script = os.path.join(sys.path[0], 'svneditor.bat')
+else:
+ svneditor_script = os.path.join(sys.path[0], 'svneditor.py')
+
+# Username and password used by the working copies
+wc_author = 'jrandom'
+wc_passwd = 'rayjandom'
+
+# Username and password used by svnrdump in dump/load cross-checks
+crosscheck_username = '__dumpster__'
+crosscheck_password = '__loadster__'
+
+# Username and password used by the working copies for "second user"
+# scenarios
+wc_author2 = 'jconstant' # use the same password as wc_author
+
+stack_trace_regexp = r'(?:.*subversion[\\//].*\.c:[0-9]*,$|.*apr_err=.*)'
+
+# Set C locale for command line programs
+os.environ['LC_ALL'] = 'C'
+
+######################################################################
+# Permission constants used with e.g. chmod() and open().
+# Define them here at a central location, so people aren't tempted to
+# use octal literals which are not portable between Python 2 and 3.
+
+S_ALL_READ = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
+S_ALL_WRITE = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
+S_ALL_EXEC = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+
+S_ALL_RW = S_ALL_READ | S_ALL_WRITE
+S_ALL_RX = S_ALL_READ | S_ALL_EXEC
+S_ALL_RWX = S_ALL_READ | S_ALL_WRITE | S_ALL_EXEC
+
+######################################################################
+# The locations of the svn binaries.
+# Use --bin to override these defaults.
+def P(relpath,
+ head=os.path.dirname(os.path.dirname(os.path.abspath('.')))
+ ):
+ if sys.platform=='win32':
+ return os.path.join(head, relpath + '.exe')
+ else:
+ return os.path.join(head, relpath)
+svn_binary = P('svn/svn')
+svnadmin_binary = P('svnadmin/svnadmin')
+svnlook_binary = P('svnlook/svnlook')
+svnrdump_binary = P('svnrdump/svnrdump')
+svnsync_binary = P('svnsync/svnsync')
+svnversion_binary = P('svnversion/svnversion')
+svndumpfilter_binary = P('svndumpfilter/svndumpfilter')
+svnmucc_binary = P('svnmucc/svnmucc')
+svnfsfs_binary = P('svnfsfs/svnfsfs')
+entriesdump_binary = P('tests/cmdline/entries-dump')
+lock_helper_binary = P('tests/cmdline/lock-helper')
+atomic_ra_revprop_change_binary = P('tests/cmdline/atomic-ra-revprop-change')
+wc_lock_tester_binary = P('tests/libsvn_wc/wc-lock-tester')
+wc_incomplete_tester_binary = P('tests/libsvn_wc/wc-incomplete-tester')
+del P
+
+######################################################################
+# The location of svnauthz binary, relative to the only scripts that
+# import this file right now (they live in ../).
+# Use --tools to overide these defaults.
+svnauthz_binary = os.path.abspath('../../../tools/server-side/svnauthz' + _exe)
+svnauthz_validate_binary = os.path.abspath(
+ '../../../tools/server-side/svnauthz-validate' + _exe
+)
+svnmover_binary = os.path.abspath('../../../tools/dev/svnmover/svnmover' + _exe)
+
+# Location to the pristine repository, will be calculated from test_area_url
+# when we know what the user specified for --url.
+pristine_greek_repos_url = None
+
+# Global variable to track all of our options
+options = None
+
+# End of command-line-set global variables.
+######################################################################
+
+# All temporary repositories and working copies are created underneath
+# this dir, so there's one point at which to mount, e.g., a ramdisk.
+work_dir = "svn-test-work"
+
+# Constant for the merge info property.
+SVN_PROP_MERGEINFO = "svn:mergeinfo"
+
+# Constant for the inheritable auto-props property.
+SVN_PROP_INHERITABLE_AUTOPROPS = "svn:auto-props"
+
+# Constant for the inheritable ignores property.
+SVN_PROP_INHERITABLE_IGNORES = "svn:global-ignores"
+
+# Where we want all the repositories and working copies to live.
+# Each test will have its own!
+general_repo_dir = os.path.join(work_dir, "repositories")
+general_wc_dir = os.path.join(work_dir, "working_copies")
+
+# temp directory in which we will create our 'pristine' local
+# repository and other scratch data. This should be removed when we
+# quit and when we startup.
+temp_dir = os.path.join(work_dir, 'local_tmp')
+
+# (derivatives of the tmp dir.)
+pristine_greek_repos_dir = os.path.join(temp_dir, "repos")
+greek_dump_dir = os.path.join(temp_dir, "greekfiles")
+default_config_dir = os.path.abspath(os.path.join(temp_dir, "config"))
+
+#
+# Our pristine greek-tree state.
+#
+# If a test wishes to create an "expected" working-copy tree, it should
+# call main.greek_state.copy(). That method will return a copy of this
+# State object which can then be edited.
+#
+_item = svntest.wc.StateItem
+greek_state = svntest.wc.State('', {
+ 'iota' : _item("This is the file 'iota'.\n"),
+ 'A' : _item(),
+ 'A/mu' : _item("This is the file 'mu'.\n"),
+ 'A/B' : _item(),
+ 'A/B/lambda' : _item("This is the file 'lambda'.\n"),
+ 'A/B/E' : _item(),
+ 'A/B/E/alpha' : _item("This is the file 'alpha'.\n"),
+ 'A/B/E/beta' : _item("This is the file 'beta'.\n"),
+ 'A/B/F' : _item(),
+ 'A/C' : _item(),
+ 'A/D' : _item(),
+ 'A/D/gamma' : _item("This is the file 'gamma'.\n"),
+ 'A/D/G' : _item(),
+ 'A/D/G/pi' : _item("This is the file 'pi'.\n"),
+ 'A/D/G/rho' : _item("This is the file 'rho'.\n"),
+ 'A/D/G/tau' : _item("This is the file 'tau'.\n"),
+ 'A/D/H' : _item(),
+ 'A/D/H/chi' : _item("This is the file 'chi'.\n"),
+ 'A/D/H/psi' : _item("This is the file 'psi'.\n"),
+ 'A/D/H/omega' : _item("This is the file 'omega'.\n"),
+ })
+
+
+######################################################################
+# Utilities shared by the tests
+def wrap_ex(func, output):
+ "Wrap a function, catch, print and ignore exceptions"
+ def w(*args, **kwds):
+ try:
+ return func(*args, **kwds)
+ except Failure as ex:
+ if ex.__class__ != Failure or ex.args:
+ ex_args = str(ex)
+ if ex_args:
+ logger.warn('EXCEPTION: %s: %s', ex.__class__.__name__, ex_args)
+ else:
+ logger.warn('EXCEPTION: %s', ex.__class__.__name__)
+ return w
+
+def setup_development_mode():
+ "Wraps functions in module actions"
+ l = [ 'run_and_verify_svn',
+ 'run_and_verify_svnversion',
+ 'run_and_verify_load',
+ 'run_and_verify_dump',
+ 'run_and_verify_checkout',
+ 'run_and_verify_export',
+ 'run_and_verify_update',
+ 'run_and_verify_merge',
+ 'run_and_verify_switch',
+ 'run_and_verify_commit',
+ 'run_and_verify_unquiet_status',
+ 'run_and_verify_status',
+ 'run_and_verify_diff_summarize',
+ 'run_and_verify_diff_summarize_xml',
+ 'run_and_validate_lock']
+
+ for func in l:
+ setattr(svntest.actions, func, wrap_ex(getattr(svntest.actions, func)))
+
+def get_admin_name():
+ "Return name of SVN administrative subdirectory."
+
+ if (windows or sys.platform == 'cygwin') \
+ and 'SVN_ASP_DOT_NET_HACK' in os.environ:
+ return '_svn'
+ else:
+ return '.svn'
+
+def wc_is_singledb(wcpath):
+ """Temporary function that checks whether a working copy directory looks
+ like it is part of a single-db working copy."""
+
+ pristine = os.path.join(wcpath, get_admin_name(), 'pristine')
+ if not os.path.exists(pristine):
+ return True
+
+ # Now we must be looking at a multi-db WC dir or the root dir of a
+ # single-DB WC. Sharded 'pristine' dir => single-db, else => multi-db.
+ for name in os.listdir(pristine):
+ if len(name) == 2:
+ return True
+ elif len(name) == 40:
+ return False
+
+ return False
+
+def get_start_commit_hook_path(repo_dir):
+ "Return the path of the start-commit-hook conf file in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "start-commit")
+
+def get_pre_commit_hook_path(repo_dir):
+ "Return the path of the pre-commit-hook conf file in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "pre-commit")
+
+def get_post_commit_hook_path(repo_dir):
+ "Return the path of the post-commit-hook conf file in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "post-commit")
+
+def get_pre_revprop_change_hook_path(repo_dir):
+ "Return the path of the pre-revprop-change hook script in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "pre-revprop-change")
+
+def get_pre_lock_hook_path(repo_dir):
+ "Return the path of the pre-lock hook script in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "pre-lock")
+
+def get_pre_unlock_hook_path(repo_dir):
+ "Return the path of the pre-unlock hook script in REPO_DIR."
+
+ return os.path.join(repo_dir, "hooks", "pre-unlock")
+
+def get_svnserve_conf_file_path(repo_dir):
+ "Return the path of the svnserve.conf file in REPO_DIR."
+
+ return os.path.join(repo_dir, "conf", "svnserve.conf")
+
+def get_fsfs_conf_file_path(repo_dir):
+ "Return the path of the fsfs.conf file in REPO_DIR."
+
+ return os.path.join(repo_dir, "db", "fsfs.conf")
+
+def get_fsfs_format_file_path(repo_dir):
+ "Return the path of the format file in REPO_DIR."
+
+ return os.path.join(repo_dir, "db", "format")
+
+def ensure_list(item):
+ "If ITEM is not already a list, convert it to a list."
+ if isinstance(item, list):
+ return item
+ elif isinstance(item, bytes) or isinstance(item, str):
+ return [ item ]
+ else:
+ return list(item)
+
+def filter_dbg(lines, binary = False):
+ if binary:
+ excluded = filter(lambda line: line.startswith(b'DBG:'), lines)
+ excluded = map(bytes.decode, excluded)
+ included = filter(lambda line: not line.startswith(b'DBG:'), lines)
+ else:
+ excluded = filter(lambda line: line.startswith('DBG:'), lines)
+ included = filter(lambda line: not line.startswith('DBG:'), lines)
+
+ sys.stdout.write(''.join(excluded))
+ return ensure_list(included)
+
+# Run any binary, logging the command line and return code
+def run_command(command, error_expected, binary_mode=False, *varargs):
+ """Run COMMAND with VARARGS. Return exit code as int; stdout, stderr
+ as lists of lines (including line terminators). See run_command_stdin()
+ for details. If ERROR_EXPECTED is None, any stderr output will be
+ printed and any stderr output or a non-zero exit code will raise an
+ exception."""
+
+ return run_command_stdin(command, error_expected, 0, binary_mode,
+ None, *varargs)
+
+# Frequently used constants:
+# If any of these relative path strings show up in a server response,
+# then we can assume that the on-disk repository path was leaked to the
+# client. Having these here as constants means we don't need to construct
+# them over and over again.
+_repos_diskpath1 = os.path.join('cmdline', 'svn-test-work', 'repositories')
+_repos_diskpath2 = os.path.join('cmdline', 'svn-test-work', 'local_tmp',
+ 'repos')
+_repos_diskpath1_bytes = _repos_diskpath1.encode()
+_repos_diskpath2_bytes = _repos_diskpath2.encode()
+
+# A regular expression that matches arguments that are trivially safe
+# to pass on a command line without quoting on any supported operating
+# system:
+_safe_arg_re = re.compile(r'^[A-Za-z\d\.\_\/\-\:\@]+$')
+
+def _quote_arg(arg):
+ """Quote ARG for a command line.
+
+ Return a quoted version of the string ARG, or just ARG if it contains
+ only universally harmless characters.
+
+ WARNING: This function cannot handle arbitrary command-line
+ arguments: it is just good enough for what we need here."""
+
+ arg = str(arg)
+ if _safe_arg_re.match(arg):
+ return arg
+
+ if windows:
+ # Note: subprocess.list2cmdline is Windows-specific.
+ return subprocess.list2cmdline([arg])
+ else:
+ # Quoting suitable for most Unix shells.
+ return "'" + arg.replace("'", "'\\''") + "'"
+
+def open_pipe(command, bufsize=-1, stdin=None, stdout=None, stderr=None):
+ """Opens a subprocess.Popen pipe to COMMAND using STDIN,
+ STDOUT, and STDERR. BUFSIZE is passed to subprocess.Popen's
+ argument of the same name.
+
+ Returns (infile, outfile, errfile, waiter); waiter
+ should be passed to wait_on_pipe."""
+ command = [str(x) for x in command]
+
+ # Always run python scripts under the same Python executable as used
+ # for the test suite.
+ if command[0].endswith('.py'):
+ command.insert(0, sys.executable)
+
+ command_string = command[0] + ' ' + ' '.join(map(_quote_arg, command[1:]))
+
+ if not stdin:
+ stdin = subprocess.PIPE
+ if not stdout:
+ stdout = subprocess.PIPE
+ if not stderr:
+ stderr = subprocess.PIPE
+
+ p = subprocess.Popen(command,
+ bufsize,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ close_fds=not windows)
+ return p.stdin, p.stdout, p.stderr, (p, command_string)
+
+def wait_on_pipe(waiter, binary_mode, stdin=None):
+ """WAITER is (KID, COMMAND_STRING). Wait for KID (opened with open_pipe)
+ to finish, dying if it does. If KID fails, create an error message
+ containing any stdout and stderr from the kid. Show COMMAND_STRING in
+ diagnostic messages. Normalize Windows line endings of stdout and stderr
+ if not BINARY_MODE. Return KID's exit code as int; stdout, stderr as
+ lists of lines (including line terminators)."""
+ if waiter is None:
+ return
+
+ kid, command_string = waiter
+ stdout, stderr = kid.communicate(stdin)
+ exit_code = kid.returncode
+
+ # We always expect STDERR to be strings, not byte-arrays.
+ if not isinstance(stderr, str):
+ stderr = stderr.decode("utf-8")
+ if not binary_mode:
+ if not isinstance(stdout, str):
+ stdout = stdout.decode("utf-8")
+
+ # Normalize Windows line endings if in text mode.
+ if windows:
+ stdout = stdout.replace('\r\n', '\n')
+ stderr = stderr.replace('\r\n', '\n')
+
+ # Convert output strings to lists.
+ stdout_lines = stdout.splitlines(True)
+ stderr_lines = stderr.splitlines(True)
+
+ if exit_code < 0:
+ if not windows:
+ exit_signal = os.WTERMSIG(-exit_code)
+ else:
+ exit_signal = exit_code
+
+ if stdout_lines is not None:
+ logger.info("".join(stdout_lines))
+ if stderr_lines is not None:
+ logger.warning("".join(stderr_lines))
+ # show the whole path to make it easier to start a debugger
+ logger.warning("CMD: %s terminated by signal %d"
+ % (command_string, exit_signal))
+ raise SVNProcessTerminatedBySignal
+ else:
+ if exit_code:
+ logger.info("CMD: %s exited with %d" % (command_string, exit_code))
+ return stdout_lines, stderr_lines, exit_code
+
+def spawn_process(command, bufsize=-1, binary_mode=False, stdin_lines=None,
+ *varargs):
+ """Run any binary, supplying input text, logging the command line.
+
+ BUFSIZE dictates the pipe buffer size used in communication with the
+ subprocess: quoting from subprocess.Popen(), "0 means unbuffered,
+ 1 means line buffered, any other positive value means use a buffer of
+ (approximately) that size. A negative bufsize means to use the system
+ default, which usually means fully buffered."
+
+ Normalize Windows line endings of stdout and stderr if not BINARY_MODE.
+ Return exit code as int; stdout, stderr as lists of lines (including
+ line terminators).
+ """
+ if stdin_lines and not isinstance(stdin_lines, list):
+ raise TypeError("stdin_lines should have list type")
+
+ # Log the command line
+ if not command.endswith('.py'):
+ logger.info('CMD: %s %s' % (os.path.basename(command),
+ ' '.join([_quote_arg(x) for x in varargs])))
+
+ infile, outfile, errfile, kid = open_pipe([command] + list(varargs), bufsize)
+
+ if stdin_lines:
+ for x in stdin_lines:
+ infile.write(x)
+
+ stdout_lines, stderr_lines, exit_code = wait_on_pipe(kid, binary_mode)
+ infile.close()
+
+ outfile.close()
+ errfile.close()
+
+ return exit_code, stdout_lines, stderr_lines
+
+def run_command_stdin(command, error_expected, bufsize=-1, binary_mode=False,
+ stdin_lines=None, *varargs):
+ """Run COMMAND with VARARGS; input STDIN_LINES (a list of strings
+ which should include newline characters) to program via stdin - this
+ should not be very large, as if the program outputs more than the OS
+ is willing to buffer, this will deadlock, with both Python and
+ COMMAND waiting to write to each other for ever. For tests where this
+ is a problem, setting BUFSIZE to a sufficiently large value will prevent
+ the deadlock, see spawn_process().
+ Normalize Windows line endings of stdout and stderr if not BINARY_MODE.
+ Return exit code as int; stdout, stderr as lists of lines (including
+ line terminators).
+ If ERROR_EXPECTED is None, any stderr output will be printed and any
+ stderr output or a non-zero exit code will raise an exception."""
+
+ start = time.time()
+
+ exit_code, stdout_lines, stderr_lines = spawn_process(command,
+ bufsize,
+ binary_mode,
+ stdin_lines,
+ *varargs)
+
+ def _line_contains_repos_diskpath(line):
+ # ### Note: this assumes that either svn-test-work isn't a symlink,
+ # ### or the diskpath isn't realpath()'d somewhere on the way from
+ # ### the server's configuration and the client's stderr. We could
+ # ### check for both the symlinked path and the realpath.
+ if isinstance(line, str):
+ return _repos_diskpath1 in line or _repos_diskpath2 in line
+ else:
+ return _repos_diskpath1_bytes in line or _repos_diskpath2_bytes in line
+
+ for lines, name in [[stdout_lines, "stdout"], [stderr_lines, "stderr"]]:
+ if is_ra_type_file() or 'svnadmin' in command or 'svnlook' in command:
+ break
+ # Does the server leak the repository on-disk path?
+ # (prop_tests-12 installs a hook script that does that intentionally)
+ if any(map(_line_contains_repos_diskpath, lines)) \
+ and not any(map(lambda arg: 'prop_tests-12' in arg, varargs)):
+ raise Failure("Repository diskpath in %s: %r" % (name, lines))
+
+ valgrind_diagnostic = False
+ # A valgrind diagnostic will raise a failure if the command is
+ # expected to run without error. When an error is expected any
+ # subsequent error pattern matching is usually lenient and will not
+ # detect the diagnostic so make sure a failure is raised here.
+ if error_expected and stderr_lines:
+ if any(map(lambda arg: re.match('==[0-9]+==', arg), stderr_lines)):
+ valgrind_diagnostic = True
+
+ stop = time.time()
+ logger.info('<TIME = %.6f>' % (stop - start))
+ for x in stdout_lines:
+ logger.info(x.rstrip())
+ for x in stderr_lines:
+ logger.info(x.rstrip())
+
+ if (((not error_expected) and ((stderr_lines) or (exit_code != 0)))
+ or valgrind_diagnostic):
+ for x in stderr_lines:
+ logger.warning(x.rstrip())
+ if len(varargs) <= 5:
+ brief_command = ' '.join((command,) + varargs)
+ else:
+ brief_command = ' '.join(((command,) + varargs)[:4]) + ' ...'
+ raise Failure('Command failed: "' + brief_command +
+ '"; exit code ' + str(exit_code))
+
+ return exit_code, \
+ filter_dbg(stdout_lines, binary_mode), \
+ stderr_lines
+
+def create_config_dir(cfgdir, config_contents=None, server_contents=None,
+ ssl_cert=None, ssl_url=None, http_proxy=None,
+ exclusive_wc_locks=None):
+ "Create config directories and files"
+
+ # config file names
+ cfgfile_cfg = os.path.join(cfgdir, 'config')
+ cfgfile_srv = os.path.join(cfgdir, 'servers')
+
+ # create the directory
+ if not os.path.isdir(cfgdir):
+ os.makedirs(cfgdir)
+
+ # define default config file contents if none provided
+ if config_contents is None:
+ config_contents = """
+#
+[auth]
+password-stores =
+
+[miscellany]
+interactive-conflicts = false
+"""
+ if exclusive_wc_locks:
+ config_contents += """
+[working-copy]
+exclusive-locking = true
+"""
+ # define default server file contents if none provided
+ if server_contents is None:
+ http_library_str = ""
+ if options.http_library:
+ http_library_str = "http-library=%s" % (options.http_library)
+ http_proxy_str = ""
+ http_proxy_username_str = ""
+ http_proxy_password_str = ""
+ if options.http_proxy:
+ http_proxy_parsed = urlparse("//" + options.http_proxy)
+ http_proxy_str = "http-proxy-host=%s\n" % (http_proxy_parsed.hostname) + \
+ "http-proxy-port=%d" % (http_proxy_parsed.port or 80)
+ if options.http_proxy_username:
+ http_proxy_username_str = "http-proxy-username=%s" % \
+ (options.http_proxy_username)
+ if options.http_proxy_password:
+ http_proxy_password_str = "http-proxy-password=%s" % \
+ (options.http_proxy_password)
+
+ server_contents = """
+#
+[global]
+%s
+%s
+%s
+%s
+store-plaintext-passwords=yes
+store-passwords=yes
+""" % (http_library_str, http_proxy_str, http_proxy_username_str,
+ http_proxy_password_str)
+
+ file_write(cfgfile_cfg, config_contents)
+ file_write(cfgfile_srv, server_contents)
+
+ if (ssl_cert and ssl_url):
+ trust_ssl_cert(cfgdir, ssl_cert, ssl_url)
+ elif cfgdir != default_config_dir:
+ copy_trust(cfgdir, default_config_dir)
+
+
+def trust_ssl_cert(cfgdir, ssl_cert, ssl_url):
+ """Setup config dir to trust the given ssl_cert for the given ssl_url
+ """
+
+ cert_rep = ''
+ fp = open(ssl_cert, 'r')
+ for line in fp.readlines()[1:-1]:
+ cert_rep = cert_rep + line.strip()
+
+ parsed_url = urlparse(ssl_url)
+ netloc_url = '%s://%s' % (parsed_url.scheme, parsed_url.netloc)
+ ssl_dir = os.path.join(cfgdir, 'auth', 'svn.ssl.server')
+ if not os.path.isdir(ssl_dir):
+ os.makedirs(ssl_dir)
+ md5_name = hashlib.md5(netloc_url).hexdigest()
+ md5_file = os.path.join(ssl_dir, md5_name)
+ md5_file_contents = """K 10
+ascii_cert
+V %d
+%s
+K 8
+failures
+V 1
+8
+K 15
+svn:realmstring
+V %d
+%s
+END
+""" % (len(cert_rep), cert_rep, len(netloc_url), netloc_url)
+ file_write(md5_file, md5_file_contents, mode='wb')
+
+def copy_trust(dst_cfgdir, src_cfgdir):
+ """Copy svn.ssl.server files from one config dir to another.
+ """
+
+ src_ssl_dir = os.path.join(src_cfgdir, 'auth', 'svn.ssl.server')
+ dst_ssl_dir = os.path.join(dst_cfgdir, 'auth', 'svn.ssl.server')
+ if not os.path.isdir(dst_ssl_dir):
+ os.makedirs(dst_ssl_dir)
+ for f in os.listdir(src_ssl_dir):
+ shutil.copy(os.path.join(src_ssl_dir, f), os.path.join(dst_ssl_dir, f))
+
+def _with_config_dir(args):
+ if '--config-dir' in args:
+ return args
+ else:
+ return args + ('--config-dir', default_config_dir)
+
+class svnrdump_crosscheck_authentication:
+ pass
+
+def _with_auth(args):
+ assert '--password' not in args
+ if svnrdump_crosscheck_authentication in args:
+ args = filter(lambda x: x is not svnrdump_crosscheck_authentication, args)
+ auth_username = crosscheck_username
+ auth_password = crosscheck_password
+ else:
+ auth_username = wc_author
+ auth_password = wc_passwd
+
+ args = args + ('--password', auth_password,
+ '--no-auth-cache' )
+ if '--username' in args:
+ return args
+ else:
+ return args + ('--username', auth_username )
+
+# For running subversion and returning the output
+def run_svn(error_expected, *varargs):
+ """Run svn with VARARGS; return exit code as int; stdout, stderr as
+ lists of lines (including line terminators). If ERROR_EXPECTED is
+ None, any stderr output will be printed and any stderr output or a
+ non-zero exit code will raise an exception. If
+ you're just checking that something does/doesn't come out of
+ stdout/stderr, you might want to use actions.run_and_verify_svn()."""
+ return run_command(svn_binary, error_expected, False,
+ *(_with_auth(_with_config_dir(varargs))))
+
+# For running svnadmin. Ignores the output.
+def run_svnadmin(*varargs):
+ """Run svnadmin with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators)."""
+
+ use_binary = ('dump' in varargs) or ('dump-revprops' in varargs)
+
+ exit_code, stdout_lines, stderr_lines = \
+ run_command(svnadmin_binary, 1, use_binary, *varargs)
+
+ if use_binary and sys.platform == 'win32':
+ # Callers don't expect binary output on stderr
+ stderr_lines = [x.replace('\r', '') for x in stderr_lines]
+
+ return exit_code, stdout_lines, stderr_lines
+
+# For running svnlook. Ignores the output.
+def run_svnlook(*varargs):
+ """Run svnlook with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators)."""
+ return run_command(svnlook_binary, 1, False, *varargs)
+
+def run_svnrdump(stdin_input, *varargs):
+ """Run svnrdump with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators). Use binary mode for output."""
+ if stdin_input:
+ return run_command_stdin(svnrdump_binary, 1, 1, True, stdin_input,
+ *(_with_auth(_with_config_dir(varargs))))
+ else:
+ return run_command(svnrdump_binary, 1, True,
+ *(_with_auth(_with_config_dir(varargs))))
+
+def run_svnsync(*varargs):
+ """Run svnsync with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators)."""
+ return run_command(svnsync_binary, 1, False,
+ *(_with_auth(_with_config_dir(varargs))))
+
+def run_svnversion(*varargs):
+ """Run svnversion with VARARGS, returns exit code as int; stdout, stderr
+ as list of lines (including line terminators)."""
+ return run_command(svnversion_binary, 1, False, *varargs)
+
+def run_svnmover(*varargs):
+ """Run svnmover with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators)."""
+ return run_command(svnmover_binary, 1, False,
+ *(_with_auth(_with_config_dir(varargs))))
+
+def run_svnmucc(*varargs):
+ """Run svnmucc with VARARGS, returns exit code as int; stdout, stderr as
+ list of lines (including line terminators). Use binary mode for output."""
+ return run_command(svnmucc_binary, 1, True,
+ *(_with_auth(_with_config_dir(varargs))))
+
+def run_svnauthz(*varargs):
+ """Run svnauthz with VARARGS, returns exit code as int; stdout, stderr
+ as list of lines (including line terminators)."""
+ return run_command(svnauthz_binary, 1, False, *varargs)
+
+def run_svnauthz_validate(*varargs):
+ """Run svnauthz-validate with VARARGS, returns exit code as int; stdout,
+ stderr as list of lines (including line terminators)."""
+ return run_command(svnauthz_validate_binary, 1, False, *varargs)
+
+def run_svnfsfs(*varargs):
+ """Run svnfsfs with VARARGS, returns exit code as int; stdout, stderr
+ as list of lines (including line terminators)."""
+ return run_command(svnfsfs_binary, 1, False, *varargs)
+
+def run_lock_helper(repo, path, user, seconds):
+ """Run lock-helper to lock path in repo by username for seconds"""
+
+ return run_command(lock_helper_binary, 1, False, repo, path, user, seconds)
+
+def run_entriesdump(path):
+ """Run the entries-dump helper, returning a dict of Entry objects."""
+ # use spawn_process rather than run_command to avoid copying all the data
+ # to stdout in verbose mode.
+ exit_code, stdout_lines, stderr_lines = spawn_process(entriesdump_binary,
+ 0, False, None, path)
+ if exit_code or stderr_lines:
+ ### report on this? or continue to just skip it?
+ return None
+
+ class Entry(object):
+ pass
+ entries = { }
+ exec(''.join(filter_dbg(stdout_lines)))
+ return entries
+
+def run_entriesdump_subdirs(path):
+ """Run the entries-dump helper, returning a list of directory names."""
+ # use spawn_process rather than run_command to avoid copying all the data
+ # to stdout in verbose mode.
+ exit_code, stdout_lines, stderr_lines = spawn_process(entriesdump_binary,
+ 0, False, None, '--subdirs', path)
+ return map(lambda line: line.strip(), filter_dbg(stdout_lines))
+
+def run_entriesdump_tree(path):
+ """Run the entries-dump helper, returning a dict of a dict of Entry objects."""
+ # use spawn_process rather than run_command to avoid copying all the data
+ # to stdout in verbose mode.
+ exit_code, stdout_lines, stderr_lines = spawn_process(entriesdump_binary,
+ 0, False, None,
+ '--tree-dump', path)
+ if exit_code or stderr_lines:
+ ### report on this? or continue to just skip it?
+ return None
+
+ class Entry(object):
+ pass
+ dirs = { }
+ exec(''.join(filter_dbg(stdout_lines)))
+ return dirs
+
+def run_atomic_ra_revprop_change(url, revision, propname, skel, want_error):
+ """Run the atomic-ra-revprop-change helper, returning its exit code, stdout,
+ and stderr. For HTTP, default HTTP library is used."""
+ # use spawn_process rather than run_command to avoid copying all the data
+ # to stdout in verbose mode.
+ #exit_code, stdout_lines, stderr_lines = spawn_process(entriesdump_binary,
+ # 0, False, None, path)
+
+ # This passes HTTP_LIBRARY in addition to our params.
+ return run_command(atomic_ra_revprop_change_binary, True, False,
+ url, revision, propname, skel,
+ want_error and 1 or 0, default_config_dir)
+
+def run_wc_lock_tester(recursive, path, work_queue=False):
+ "Run the wc-lock obtainer tool, returning its exit code, stdout and stderr"
+ if work_queue:
+ option = "-w"
+ elif recursive:
+ option = "-r"
+ else:
+ option = "-1"
+ return run_command(wc_lock_tester_binary, False, False, option, path)
+
+def run_wc_incomplete_tester(wc_dir, revision):
+ "Run the wc-incomplete tool, returning its exit code, stdout and stderr"
+ return run_command(wc_incomplete_tester_binary, False, False,
+ wc_dir, revision)
+
+def youngest(repos_path):
+ "run 'svnlook youngest' on REPOS_PATH, returns revision as int"
+ exit_code, stdout_lines, stderr_lines = run_command(svnlook_binary, None, False,
+ 'youngest', repos_path)
+ if exit_code or stderr_lines:
+ raise Failure("Unexpected failure of 'svnlook youngest':\n%s" % stderr_lines)
+ if len(stdout_lines) != 1:
+ raise Failure("Wrong output from 'svnlook youngest':\n%s" % stdout_lines)
+ return int(stdout_lines[0].rstrip())
+
+# Chmod recursively on a whole subtree
+def chmod_tree(path, mode, mask):
+ """For each node in the OS filesystem tree PATH, subtract MASK from its
+ permissions and add MODE to them."""
+ for dirpath, dirs, files in os.walk(path):
+ for name in dirs + files:
+ fullname = os.path.join(dirpath, name)
+ if not os.path.islink(fullname):
+ new_mode = (os.stat(fullname)[stat.ST_MODE] & ~mask) | mode
+ os.chmod(fullname, new_mode)
+
+# For clearing away working copies
+def safe_rmtree(dirname, retry=0):
+ """Remove the tree at DIRNAME, making it writable first.
+ If DIRNAME is a symlink, only remove the symlink, not its target."""
+ def rmtree(dirname):
+ chmod_tree(dirname, S_ALL_RW, S_ALL_RW)
+ shutil.rmtree(dirname)
+
+ if os.path.islink(dirname):
+ os.unlink(dirname)
+ return
+
+ if not os.path.exists(dirname):
+ return
+
+ if retry:
+ for delay in (0.5, 1, 2, 4):
+ try:
+ rmtree(dirname)
+ break
+ except:
+ time.sleep(delay)
+ else:
+ rmtree(dirname)
+ else:
+ rmtree(dirname)
+
+# For creating new files, and making local mods to existing files.
+def file_write(path, contents, mode='w'):
+ """Write the CONTENTS to the file at PATH, opening file using MODE,
+ which is (w)rite by default."""
+
+ if sys.version_info < (3, 0):
+ open(path, mode).write(contents)
+ else:
+ # Python 3: Write data in the format required by MODE, i.e. byte arrays
+ # to 'b' files, utf-8 otherwise."""
+ if 'b' in mode:
+ if isinstance(contents, str):
+ contents = contents.encode()
+ else:
+ if not isinstance(contents, str):
+ contents = contents.decode("utf-8")
+
+ if isinstance(contents, str):
+ codecs.open(path, mode, "utf-8").write(contents)
+ else:
+ open(path, mode).write(contents)
+
+# For making local mods to files
+def file_append(path, new_text):
+ "Append NEW_TEXT to file at PATH"
+ file_write(path, new_text, 'a')
+
+# Append in binary mode
+def file_append_binary(path, new_text):
+ "Append NEW_TEXT to file at PATH in binary mode"
+ file_write(path, new_text, 'ab')
+
+# For replacing parts of contents in an existing file, with new content.
+def file_substitute(path, contents, new_contents):
+ """Replace the CONTENTS in the file at PATH using the NEW_CONTENTS"""
+ fcontent = open(path, 'r').read().replace(contents, new_contents)
+ open(path, 'w').write(fcontent)
+
+# For setting up authz, hooks and making other tweaks to created repos
+def _post_create_repos(path, minor_version = None):
+ """Set default access right configurations for svnserve and mod_dav,
+ install hooks and perform other various tweaks according to the test
+ options in the SVN repository at PATH."""
+
+ # Require authentication to write to the repos, for ra_svn testing.
+ file_write(get_svnserve_conf_file_path(path),
+ "[general]\nauth-access = write\n");
+ if options.enable_sasl:
+ file_append(get_svnserve_conf_file_path(path),
+ "realm = svntest\n[sasl]\nuse-sasl = true\n")
+ else:
+ file_append(get_svnserve_conf_file_path(path), "password-db = passwd\n")
+ # This actually creates TWO [users] sections in the file (one of them is
+ # uncommented in `svnadmin create`'s template), so we exercise the .ini
+ # files reading code's handling of duplicates, too. :-)
+ users = ("[users]\n"
+ "jrandom = rayjandom\n"
+ "jconstant = rayjandom\n")
+ if tests_verify_dump_load_cross_check():
+ # Insert a user for the dump/load cross-check.
+ users += (crosscheck_username + " = " + crosscheck_password + "\n")
+ file_append(os.path.join(path, "conf", "passwd"), users)
+
+ if options.fs_type is None or options.fs_type == 'fsfs':
+ # fsfs.conf file
+ if (minor_version is None or minor_version >= 6):
+ confpath = get_fsfs_conf_file_path(path)
+ if options.config_file is not None:
+ shutil.copy(options.config_file, confpath)
+
+ if options.memcached_server is not None or \
+ options.fsfs_compression is not None or \
+ options.fsfs_dir_deltification is not None and \
+ os.path.exists(confpath):
+ with open(confpath, 'r') as conffile:
+ newlines = []
+ for line in conffile.readlines():
+ if line.startswith('# compression ') and \
+ options.fsfs_compression is not None:
+ line = 'compression = %s\n' % options.fsfs_compression
+ if line.startswith('# enable-dir-deltification ') and \
+ options.fsfs_dir_deltification is not None:
+ line = 'enable-dir-deltification = %s\n' % \
+ options.fsfs_dir_deltification
+ newlines += line
+ if options.memcached_server is not None and \
+ line == '[memcached-servers]\n':
+ newlines += ('key = %s\n' % options.memcached_server)
+ with open(confpath, 'w') as conffile:
+ conffile.writelines(newlines)
+
+ # format file
+ if options.fsfs_sharding is not None:
+ def transform_line(line):
+ if line.startswith('layout '):
+ if options.fsfs_sharding > 0:
+ line = 'layout sharded %d' % options.fsfs_sharding
+ else:
+ line = 'layout linear'
+ return line
+
+ # read it
+ format_file_path = get_fsfs_format_file_path(path)
+ contents = open(format_file_path, 'rb').read()
+
+ # tweak it
+ new_contents = "".join([transform_line(line) + "\n"
+ for line in contents.split("\n")])
+ if new_contents[-1] == "\n":
+ # we don't currently allow empty lines (\n\n) in the format file.
+ new_contents = new_contents[:-1]
+
+ # replace it
+ os.chmod(format_file_path, S_ALL_RW)
+ file_write(format_file_path, new_contents, 'wb')
+
+ # post-commit
+ # Note that some tests (currently only commit_tests) create their own
+ # post-commit hooks, which would override this one. :-(
+ if options.fsfs_packing and minor_version >=6:
+ # some tests chdir.
+ abs_path = os.path.abspath(path)
+ create_python_hook_script(get_post_commit_hook_path(abs_path),
+ "import subprocess\n"
+ "import sys\n"
+ "command = %s\n"
+ "sys.exit(subprocess.Popen(command).wait())\n"
+ % repr([svnadmin_binary, 'pack', abs_path]))
+
+ # make the repos world-writeable, for mod_dav_svn's sake.
+ chmod_tree(path, S_ALL_RW, S_ALL_RW)
+
+def _unpack_precooked_repos(path, template):
+ testdir = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
+ repozip = os.path.join(os.path.dirname(testdir), "templates", template)
+ zipfile.ZipFile(repozip, 'r').extractall(path)
+
+# For creating new, pre-cooked greek repositories
+def unpack_greek_repos(path):
+ template = "greek-fsfs-v%d.zip" % options.fsfs_version
+ _unpack_precooked_repos(path, template)
+ _post_create_repos(path, options.server_minor_version)
+
+# For creating blank new repositories
+def create_repos(path, minor_version = None):
+ """Create a brand-new SVN repository at PATH. If PATH does not yet
+ exist, create it."""
+
+ if not os.path.exists(path):
+ os.makedirs(path) # this creates all the intermediate dirs, if necessary
+
+ if options.fsfs_version is None:
+ if options.fs_type == "bdb":
+ opts = ("--bdb-txn-nosync",)
+ else:
+ opts = ()
+ if minor_version is None or minor_version > options.server_minor_version:
+ minor_version = options.server_minor_version
+ opts += ("--compatible-version=1.%d" % (minor_version),)
+ if options.fs_type is not None:
+ opts += ("--fs-type=" + options.fs_type,)
+ exit_code, stdout, stderr = run_command(svnadmin_binary, 1, False,
+ "create", path, *opts)
+ else:
+ # Copy a pre-cooked FSFS repository
+ assert options.fs_type == "fsfs"
+ template = "empty-fsfs-v%d.zip" % options.fsfs_version
+ _unpack_precooked_repos(path, template)
+ exit_code, stdout, stderr = run_command(svnadmin_binary, 1, False,
+ "setuuid", path)
+
+ # Skip tests if we can't create the repository.
+ if stderr:
+ stderr_lines = 0
+ not_using_fsfs_backend = (options.fs_type != "fsfs")
+ backend_deprecation_warning = False
+ for line in stderr:
+ stderr_lines += 1
+ if line.find('Unknown FS type') != -1:
+ raise Skip
+ if not_using_fsfs_backend:
+ if 0 < line.find('repository back-end is deprecated, consider using'):
+ backend_deprecation_warning = True
+
+ # Creating BDB repositories will cause svnadmin to print a warning
+ # which should be ignored.
+ if (stderr_lines == 1
+ and not_using_fsfs_backend
+ and backend_deprecation_warning):
+ pass
+ else:
+ # If the FS type is known and we noticed more than just the
+ # BDB-specific warning, assume the repos couldn't be created
+ # (e.g. due to a missing 'svnadmin' binary).
+ raise SVNRepositoryCreateFailure("".join(stderr).rstrip())
+
+ # Configure the new repository.
+ _post_create_repos(path, minor_version)
+
+# For copying a repository
+def copy_repos(src_path, dst_path, head_revision, ignore_uuid = 1,
+ minor_version = None):
+ "Copy the repository SRC_PATH, with head revision HEAD_REVISION, to DST_PATH"
+
+ # Save any previous value of SVN_DBG_QUIET
+ saved_quiet = os.environ.get('SVN_DBG_QUIET')
+ os.environ['SVN_DBG_QUIET'] = 'y'
+
+ # Do an svnadmin dump|svnadmin load cycle. Print a fake pipe command so that
+ # the displayed CMDs can be run by hand
+ create_repos(dst_path, minor_version)
+ dump_args = ['dump', src_path]
+ load_args = ['load', dst_path]
+
+ if ignore_uuid:
+ load_args = load_args + ['--ignore-uuid']
+
+ logger.info('CMD: %s %s | %s %s' %
+ (os.path.basename(svnadmin_binary), ' '.join(dump_args),
+ os.path.basename(svnadmin_binary), ' '.join(load_args)))
+ start = time.time()
+
+ dump_in, dump_out, dump_err, dump_kid = open_pipe(
+ [svnadmin_binary] + dump_args)
+ load_in, load_out, load_err, load_kid = open_pipe(
+ [svnadmin_binary] + load_args,
+ stdin=dump_out) # Attached to dump_kid
+
+ load_stdout, load_stderr, load_exit_code = wait_on_pipe(load_kid, True)
+ dump_stdout, dump_stderr, dump_exit_code = wait_on_pipe(dump_kid, True)
+
+ dump_in.close()
+ dump_out.close()
+ dump_err.close()
+ #load_in is dump_out so it's already closed.
+ load_out.close()
+ load_err.close()
+
+ stop = time.time()
+ logger.info('<TIME = %.6f>' % (stop - start))
+
+ if saved_quiet is None:
+ del os.environ['SVN_DBG_QUIET']
+ else:
+ os.environ['SVN_DBG_QUIET'] = saved_quiet
+
+ dump_re = re.compile(r'^\* Dumped revision (\d+)\.\r?$')
+ expect_revision = 0
+ dump_failed = False
+ for dump_line in dump_stderr:
+ match = dump_re.match(dump_line)
+ if not match or match.group(1) != str(expect_revision):
+ logger.warn('ERROR: dump failed: %s', dump_line.strip())
+ dump_failed = True
+ else:
+ expect_revision += 1
+ if dump_failed:
+ raise SVNRepositoryCopyFailure
+ if expect_revision != head_revision + 1:
+ logger.warn('ERROR: dump failed; did not see revision %s', head_revision)
+ raise SVNRepositoryCopyFailure
+
+ load_re = re.compile(b'^------- Committed revision (\\d+) >>>\\r?$')
+ expect_revision = 1
+ for load_line in filter_dbg(load_stdout, True):
+ match = load_re.match(load_line)
+ if match:
+ if match.group(1).decode() != str(expect_revision):
+ logger.warn('ERROR: load failed: %s', load_line.strip())
+ raise SVNRepositoryCopyFailure
+ expect_revision += 1
+ if expect_revision != head_revision + 1:
+ logger.warn('ERROR: load failed; did not see revision %s', head_revision)
+ raise SVNRepositoryCopyFailure
+
+
+def canonicalize_url(input):
+ "Canonicalize the url, if the scheme is unknown, returns intact input"
+
+ m = re.match(r"^((file://)|((svn|svn\+ssh|http|https)(://)))", input)
+ if m:
+ scheme = m.group(1)
+ return scheme + re.sub(r'//*', '/', input[len(scheme):])
+ else:
+ return input
+
+
+def create_python_hook_script(hook_path, hook_script_code,
+ cmd_alternative=None):
+ """Create a Python hook script at HOOK_PATH with the specified
+ HOOK_SCRIPT_CODE."""
+
+ if windows:
+ if cmd_alternative is not None:
+ file_write("%s.bat" % hook_path,
+ cmd_alternative)
+ else:
+ # Use an absolute path since the working directory is not guaranteed
+ hook_path = os.path.abspath(hook_path)
+ # Fill the python file.
+ file_write("%s.py" % hook_path, hook_script_code)
+ # Fill the batch wrapper file.
+ file_write("%s.bat" % hook_path,
+ "@\"%s\" %s.py %%*\n" % (sys.executable, hook_path))
+ else:
+ # For all other platforms
+ file_write(hook_path, "#!%s\n%s" % (sys.executable, hook_script_code))
+ os.chmod(hook_path, S_ALL_RW | stat.S_IXUSR)
+
+def create_http_connection(url, debuglevel=9):
+ """Create an http(s) connection to the host specified by URL.
+ Set the debugging level (the amount of debugging output printed when
+ working with this connection) to DEBUGLEVEL. By default, all debugging
+ output is printed. """
+
+ if sys.version_info < (3, 0):
+ # Python <3.0
+ import httplib
+ else:
+ # Python >=3.0
+ import http.client as httplib
+
+ loc = urlparse(url)
+ if loc.scheme == 'http':
+ h = httplib.HTTPConnection(loc.hostname, loc.port)
+ else:
+ try:
+ import ssl # new in python 2.6
+ c = ssl.create_default_context()
+ c.check_hostname = False
+ c.verify_mode = ssl.CERT_NONE
+ h = httplib.HTTPSConnection(loc.hostname, loc.port, context=c)
+ except:
+ h = httplib.HTTPSConnection(loc.hostname, loc.port)
+ h.set_debuglevel(debuglevel)
+ return h
+
+def write_restrictive_svnserve_conf(repo_dir, anon_access="none"):
+ "Create a restrictive authz file ( no anynomous access )."
+
+ fp = open(get_svnserve_conf_file_path(repo_dir), 'w')
+ fp.write("[general]\nanon-access = %s\nauth-access = write\n"
+ "authz-db = authz\n" % anon_access)
+ if options.enable_sasl:
+ fp.write("realm = svntest\n[sasl]\nuse-sasl = true\n");
+ else:
+ fp.write("password-db = passwd\n")
+ fp.close()
+
+def write_restrictive_svnserve_conf_with_groups(repo_dir,
+ anon_access="none"):
+ "Create a restrictive configuration with groups stored in a separate file."
+
+ fp = open(get_svnserve_conf_file_path(repo_dir), 'w')
+ fp.write("[general]\nanon-access = %s\nauth-access = write\n"
+ "authz-db = authz\ngroups-db = groups\n" % anon_access)
+ if options.enable_sasl:
+ fp.write("realm = svntest\n[sasl]\nuse-sasl = true\n");
+ else:
+ fp.write("password-db = passwd\n")
+ fp.close()
+
+# Warning: because mod_dav_svn uses one shared authz file for all
+# repositories, you *cannot* use write_authz_file in any test that
+# might be run in parallel.
+#
+# write_authz_file can *only* be used in test suites which disable
+# parallel execution at the bottom like so
+# if __name__ == '__main__':
+# svntest.main.run_tests(test_list, serial_only = True)
+def write_authz_file(sbox, rules, sections=None, prefixed_rules=None):
+ """Write an authz file to SBOX, appropriate for the RA method used,
+with authorizations rules RULES mapping paths to strings containing
+the rules. You can add sections SECTIONS (ex. groups, aliases...) with
+an appropriate list of mappings.
+"""
+ fp = open(sbox.authz_file, 'w')
+
+ # When the sandbox repository is read only its name will be different from
+ # the repository name.
+ repo_name = os.path.basename(sbox.repo_dir.rstrip('/'))
+
+ if sbox.repo_url.startswith("http"):
+ default_prefix = repo_name + ":"
+ else:
+ default_prefix = ""
+
+ if sections:
+ for p, r in sections.items():
+ fp.write("[%s]\n%s\n" % (p, r))
+
+ if not prefixed_rules:
+ prefixed_rules = dict()
+
+ if rules:
+ for p, r in rules.items():
+ prefixed_rules[default_prefix + p] = r
+
+ for p, r in prefixed_rules.items():
+ fp.write("[%s]\n%s\n" % (p, r))
+ if tests_verify_dump_load_cross_check():
+ # Insert an ACE that lets the dump/load cross-check bypass
+ # authz restrictions.
+ fp.write(crosscheck_username + " = rw\n")
+
+ if tests_verify_dump_load_cross_check() and '/' not in prefixed_rules:
+ # We need a repository-root ACE for the dump/load cross-check
+ fp.write("[/]\n" + crosscheck_username + " = rw\n")
+
+ fp.close()
+
+# See the warning about parallel test execution in write_authz_file
+# method description.
+def write_groups_file(sbox, groups):
+ """Write a groups file to SBOX, appropriate for the RA method used,
+with group contents set to GROUPS."""
+ fp = open(sbox.groups_file, 'w')
+ fp.write("[groups]\n")
+ if groups:
+ for p, r in groups.items():
+ fp.write("%s = %s\n" % (p, r))
+ fp.close()
+
+def use_editor(func):
+ os.environ['SVN_EDITOR'] = svneditor_script
+ os.environ['SVN_MERGE'] = svneditor_script
+ os.environ['SVNTEST_EDITOR_FUNC'] = func
+ os.environ['SVN_TEST_PYTHON'] = sys.executable
+
+def mergeinfo_notify_line(revstart, revend, target=None):
+ """Return an expected output line that describes the beginning of a
+ mergeinfo recording notification on revisions REVSTART through REVEND."""
+ if target:
+ target_re = re.escape(target)
+ else:
+ target_re = ".+"
+ if (revend is None):
+ if (revstart < 0):
+ revstart = abs(revstart)
+ return "--- Recording mergeinfo for reverse merge of r%ld into '%s':\n" \
+ % (revstart, target_re)
+ else:
+ return "--- Recording mergeinfo for merge of r%ld into '%s':\n" \
+ % (revstart, target_re)
+ elif (revstart < revend):
+ return "--- Recording mergeinfo for merge of r%ld through r%ld into '%s':\n" \
+ % (revstart, revend, target_re)
+ else:
+ return "--- Recording mergeinfo for reverse merge of r%ld through " \
+ "r%ld into '%s':\n" % (revstart, revend, target_re)
+
+def merge_notify_line(revstart=None, revend=None, same_URL=True,
+ foreign=False, target=None):
+ """Return an expected output line that describes the beginning of a
+ merge operation on revisions REVSTART through REVEND. Omit both
+ REVSTART and REVEND for the case where the left and right sides of
+ the merge are from different URLs."""
+ from_foreign_phrase = foreign and "\(from foreign repository\) " or ""
+ if target:
+ target_re = re.escape(target)
+ else:
+ target_re = ".+"
+ if not same_URL:
+ return "--- Merging differences between %srepository URLs into '%s':\n" \
+ % (foreign and "foreign " or "", target_re)
+ if revend is None:
+ if revstart is None:
+ # The left and right sides of the merge are from different URLs.
+ return "--- Merging differences between %srepository URLs into '%s':\n" \
+ % (foreign and "foreign " or "", target_re)
+ elif revstart < 0:
+ return "--- Reverse-merging %sr%ld into '%s':\n" \
+ % (from_foreign_phrase, abs(revstart), target_re)
+ else:
+ return "--- Merging %sr%ld into '%s':\n" \
+ % (from_foreign_phrase, revstart, target_re)
+ else:
+ if revstart > revend:
+ return "--- Reverse-merging %sr%ld through r%ld into '%s':\n" \
+ % (from_foreign_phrase, revstart, revend, target_re)
+ else:
+ return "--- Merging %sr%ld through r%ld into '%s':\n" \
+ % (from_foreign_phrase, revstart, revend, target_re)
+
+def summary_of_conflicts(text_conflicts=0,
+ prop_conflicts=0,
+ tree_conflicts=0,
+ text_resolved=0,
+ prop_resolved=0,
+ tree_resolved=0,
+ skipped_paths=0,
+ as_regex=False):
+ """Return a list of lines corresponding to the summary of conflicts and
+ skipped paths that is printed by merge and update and switch. If all
+ parameters are zero, return an empty list.
+ """
+ lines = []
+ if (text_conflicts or prop_conflicts or tree_conflicts
+ or text_resolved or prop_resolved or tree_resolved
+ or skipped_paths):
+ lines.append("Summary of conflicts:\n")
+ if text_conflicts or text_resolved:
+ if text_resolved == 0:
+ lines.append(" Text conflicts: %d\n" % text_conflicts)
+ else:
+ lines.append(" Text conflicts: %d remaining (and %d already resolved)\n"
+ % (text_conflicts, text_resolved))
+ if prop_conflicts or prop_resolved:
+ if prop_resolved == 0:
+ lines.append(" Property conflicts: %d\n" % prop_conflicts)
+ else:
+ lines.append(" Property conflicts: %d remaining (and %d already resolved)\n"
+ % (prop_conflicts, prop_resolved))
+ if tree_conflicts or tree_resolved:
+ if tree_resolved == 0:
+ lines.append(" Tree conflicts: %d\n" % tree_conflicts)
+ else:
+ lines.append(" Tree conflicts: %d remaining (and %d already resolved)\n"
+ % (tree_conflicts, tree_resolved))
+ if skipped_paths:
+ lines.append(" Skipped paths: %d\n" % skipped_paths)
+
+ if as_regex:
+ lines = map(re.escape, lines)
+ return lines
+
+
+def make_log_msg():
+ "Conjure up a log message based on the calling test."
+
+ for idx in range(1, 100):
+ frame = sys._getframe(idx)
+
+ # If this frame isn't from a function in *_tests.py, then skip it.
+ filename = frame.f_code.co_filename
+ if not filename.endswith('_tests.py'):
+ continue
+
+ # There should be a test_list in this module.
+ test_list = frame.f_globals.get('test_list')
+ if test_list is None:
+ continue
+
+ # If the function is not in the test_list, then skip it.
+ func_name = frame.f_code.co_name
+ func_ob = frame.f_globals.get(func_name)
+ if func_ob not in test_list:
+ continue
+
+ # Make the log message look like a line from a traceback.
+ # Well...close. We use single quotes to avoid interfering with the
+ # double-quote quoting performed on Windows
+ return "File '%s', line %d, in %s" % (filename, frame.f_lineno, func_name)
+
+
+######################################################################
+# Functions which check the test configuration
+# (useful for conditional XFails)
+
+def tests_use_prepackaged_repository():
+ return options.fsfs_version is not None
+
+def tests_verify_dump_load_cross_check():
+ return options.dump_load_cross_check
+
+def is_ra_type_dav():
+ return options.test_area_url.startswith('http')
+
+def is_ra_type_dav_neon():
+ """Return True iff running tests over RA-Neon.
+ CAUTION: Result is only valid if svn was built to support both."""
+ return options.test_area_url.startswith('http') and \
+ (options.http_library == "neon")
+
+def is_ra_type_dav_serf():
+ """Return True iff running tests over RA-Serf.
+ CAUTION: Result is only valid if svn was built to support both."""
+ return options.test_area_url.startswith('http') and \
+ (options.http_library == "serf")
+
+def is_ra_type_svn():
+ """Return True iff running tests over RA-svn."""
+ return options.test_area_url.startswith('svn')
+
+def is_ra_type_file():
+ """Return True iff running tests over RA-local."""
+ return options.test_area_url.startswith('file')
+
+def is_fs_type_fsfs():
+ # This assumes that fsfs is the default fs implementation.
+ return options.fs_type == 'fsfs' or options.fs_type is None
+
+def is_fs_type_fsx():
+ return options.fs_type == 'fsx'
+
+def is_fs_type_bdb():
+ return options.fs_type == 'bdb'
+
+def is_fs_log_addressing():
+ return is_fs_type_fsx() or \
+ (is_fs_type_fsfs() and options.server_minor_version >= 9)
+
+def fs_has_sha1():
+ return fs_has_rep_sharing()
+
+def fs_has_rep_sharing():
+ return options.server_minor_version >= 6
+
+def fs_has_pack():
+ return is_fs_type_fsx() or \
+ (is_fs_type_fsfs() and options.server_minor_version >= 6)
+
+def fs_has_unique_freeze():
+ return (is_fs_type_fsfs() and options.server_minor_version >= 9
+ or is_fs_type_bdb())
+
+def is_os_windows():
+ return os.name == 'nt'
+
+def is_windows_type_dav():
+ return is_os_windows() and is_ra_type_dav()
+
+def is_posix_os():
+ return os.name == 'posix'
+
+def is_os_darwin():
+ return sys.platform == 'darwin'
+
+def is_fs_case_insensitive():
+ return (is_os_darwin() or is_os_windows())
+
+def is_threaded_python():
+ return True
+
+def server_has_mergeinfo():
+ return options.server_minor_version >= 5
+
+def server_has_revprop_commit():
+ return options.server_caps.has_revprop_commit
+
+def server_authz_has_aliases():
+ return options.server_caps.authz_has_aliases
+
+def server_gets_client_capabilities():
+ return options.server_caps.gets_client_capabilities
+
+def server_has_partial_replay():
+ return options.server_caps.has_partial_replay
+
+def server_enforces_UTF8_fspaths_in_verify():
+ return options.server_caps.enforces_UTF8_fspaths_in_verify
+
+def server_enforces_date_syntax():
+ return options.server_caps.enforces_date_syntax
+
+def server_has_atomic_revprop():
+ return options.server_caps.has_atomic_revprop
+
+def server_has_reverse_get_file_revs():
+ return options.server_caps.has_reverse_get_file_revs
+
+def is_plaintext_password_storage_disabled():
+ try:
+ predicate = re.compile("^WARNING: Plaintext password storage is enabled!")
+ code, out, err = run_svn(False, "--version")
+ for line in out:
+ if predicate.match(line):
+ return False
+ except:
+ return False
+ return True
+
+
+# https://issues.apache.org/bugzilla/show_bug.cgi?id=56480
+# https://issues.apache.org/bugzilla/show_bug.cgi?id=55397
+__mod_dav_url_quoting_broken_versions = frozenset([
+ '2.2.27',
+ '2.2.26',
+ '2.2.25',
+ '2.4.9',
+ '2.4.8',
+ '2.4.7',
+ '2.4.6',
+ '2.4.5',
+])
+def is_mod_dav_url_quoting_broken():
+ if is_ra_type_dav() and options.httpd_version != options.httpd_whitelist:
+ return (options.httpd_version in __mod_dav_url_quoting_broken_versions)
+ return None
+
+def is_httpd_authz_provider_enabled():
+ if is_ra_type_dav():
+ v = options.httpd_version.split('.')
+ return (v[0] == '2' and int(v[1]) >= 3) or int(v[0]) > 2
+ return None
+
+######################################################################
+
+
+class TestSpawningThread(threading.Thread):
+ """A thread that runs test cases in their own processes.
+ Receives test numbers to run from the queue, and saves results into
+ the results field."""
+ def __init__(self, queue, progress_func, tests_total):
+ threading.Thread.__init__(self)
+ self.queue = queue
+ self.results = []
+ self.progress_func = progress_func
+ self.tests_total = tests_total
+
+ def run(self):
+ while True:
+ try:
+ next_index = self.queue.get_nowait()
+ except queue.Empty:
+ return
+
+ self.run_one(next_index)
+
+ # signal progress
+ if self.progress_func:
+ self.progress_func(self.tests_total - self.queue.qsize(),
+ self.tests_total)
+
+ def run_one(self, index):
+ command = os.path.abspath(sys.argv[0])
+
+ args = []
+ args.append(str(index))
+ args.append('-c')
+ args.append('--set-log-level=%s' % logger.getEffectiveLevel())
+ # add some startup arguments from this process
+ if options.fs_type:
+ args.append('--fs-type=' + options.fs_type)
+ if options.test_area_url:
+ args.append('--url=' + options.test_area_url)
+ if options.cleanup:
+ args.append('--cleanup')
+ if options.enable_sasl:
+ args.append('--enable-sasl')
+ if options.http_library:
+ args.append('--http-library=' + options.http_library)
+ if options.server_minor_version:
+ args.append('--server-minor-version=' + str(options.server_minor_version))
+ if options.mode_filter:
+ args.append('--mode-filter=' + options.mode_filter)
+ if options.milestone_filter:
+ args.append('--milestone-filter=' + options.milestone_filter)
+ if options.ssl_cert:
+ args.append('--ssl-cert=' + options.ssl_cert)
+ if options.http_proxy:
+ args.append('--http-proxy=' + options.http_proxy)
+ if options.http_proxy_username:
+ args.append('--http-proxy-username=' + options.http_proxy_username)
+ if options.http_proxy_password:
+ args.append('--http-proxy-password=' + options.http_proxy_password)
+ if options.httpd_version:
+ args.append('--httpd-version=' + options.httpd_version)
+ if options.httpd_whitelist:
+ args.append('--httpd-whitelist=' + options.httpd_whitelist)
+ if options.exclusive_wc_locks:
+ args.append('--exclusive-wc-locks')
+ if options.memcached_server:
+ args.append('--memcached-server=' + options.memcached_server)
+ if options.fsfs_sharding:
+ args.append('--fsfs-sharding=' + str(options.fsfs_sharding))
+ if options.fsfs_packing:
+ args.append('--fsfs-packing')
+ if options.fsfs_version:
+ args.append('--fsfs-version=' + str(options.fsfs_version))
+ if options.dump_load_cross_check:
+ args.append('--dump-load-cross-check')
+ if options.fsfs_compression:
+ args.append('--fsfs-compression=' + options.fsfs_compression)
+ if options.fsfs_dir_deltification:
+ args.append('--fsfs-dir-deltification=' + options.fsfs_dir_deltification)
+ if options.svn_bin:
+ args.append('--bin=' + options.svn_bin)
+
+ result, stdout_lines, stderr_lines = spawn_process(command, 0, False, None,
+ *args)
+ self.results.append((index, result, stdout_lines, stderr_lines))
+
+class TestRunner:
+ """Encapsulate a single test case (predicate), including logic for
+ runing the test and test list output."""
+
+ def __init__(self, func, index):
+ self.pred = svntest.testcase.create_test_case(func)
+ self.index = index
+
+ def list(self, milestones_dict=None):
+ """Print test doc strings. MILESTONES_DICT is an optional mapping
+ of issue numbers to an list containing target milestones and who
+ the issue is assigned to."""
+ if options.mode_filter.upper() == 'ALL' \
+ or options.mode_filter.upper() == self.pred.list_mode().upper() \
+ or (options.mode_filter.upper() == 'PASS' \
+ and self.pred.list_mode() == ''):
+ issues = []
+ tail = ''
+ if self.pred.issues:
+ if not options.milestone_filter or milestones_dict is None:
+ issues = self.pred.issues
+ tail += " [%s]" % ','.join(['#%s' % str(i) for i in issues])
+ else: # Limit listing by requested target milestone(s).
+ filter_issues = []
+ matches_filter = False
+
+ # Get the milestones for all the issues associated with this test.
+ # If any one of them matches the MILESTONE_FILTER then we'll print
+ # them all.
+ for issue in self.pred.issues:
+ # Some safe starting assumptions.
+ milestone = 'unknown'
+ assigned_to = 'unknown'
+ if milestones_dict:
+ if milestones_dict.has_key(str(issue)):
+ milestone = milestones_dict[str(issue)][0]
+ assigned_to = milestones_dict[str(issue)][1]
+
+ filter_issues.append(
+ str(issue) + '(' + milestone + '/' + assigned_to + ')')
+ pattern = re.compile(options.milestone_filter)
+ if pattern.match(milestone):
+ matches_filter = True
+
+ # Did at least one of the associated issues meet our filter?
+ if matches_filter:
+ issues = filter_issues
+ # Wrap the issue#/target-milestone/assigned-to string
+ # to the next line and add a line break to enhance
+ # readability.
+ tail += "\n %s" % '\n '.join(
+ ['#%s' % str(i) for i in issues])
+ tail += '\n'
+ # If there is no filter or this test made if through
+ # the filter then print it!
+ if options.milestone_filter is None or len(issues):
+ if self.pred.inprogress:
+ tail += " [[%s]]" % self.pred.inprogress
+ else:
+ print(" %3d %-5s %s%s" % (self.index,
+ self.pred.list_mode(),
+ self.pred.description,
+ tail))
+ sys.stdout.flush()
+
+ def get_mode(self):
+ return self.pred.list_mode()
+
+ def get_issues(self):
+ return self.pred.issues
+
+ def get_function_name(self):
+ return self.pred.get_function_name()
+
+ def _print_name(self, prefix):
+ if self.pred.inprogress:
+ print("%s %s %s: %s [[WIMP: %s]]" % (prefix,
+ os.path.basename(sys.argv[0]),
+ str(self.index),
+ self.pred.description,
+ self.pred.inprogress))
+ else:
+ print("%s %s %s: %s" % (prefix,
+ os.path.basename(sys.argv[0]),
+ str(self.index),
+ self.pred.description))
+ sys.stdout.flush()
+
+ def run(self):
+ """Run self.pred and return the result. The return value is
+ - 0 if the test was successful
+ - 1 if it errored in a way that indicates test failure
+ - 2 if the test skipped
+ """
+ sbox_name = self.pred.get_sandbox_name()
+ if sbox_name:
+ sandbox = svntest.sandbox.Sandbox(sbox_name, self.index)
+ else:
+ sandbox = None
+
+ # Explicitly set this so that commands that commit but don't supply a
+ # log message will fail rather than invoke an editor.
+ # Tests that want to use an editor should invoke svntest.main.use_editor.
+ os.environ['SVN_EDITOR'] = ''
+ os.environ['SVNTEST_EDITOR_FUNC'] = ''
+
+ if options.use_jsvn:
+ # Set this SVNKit specific variable to the current test (test name plus
+ # its index) being run so that SVNKit daemon could use this test name
+ # for its separate log file
+ os.environ['SVN_CURRENT_TEST'] = os.path.basename(sys.argv[0]) + "_" + \
+ str(self.index)
+
+ svntest.actions.no_sleep_for_timestamps()
+ svntest.actions.do_relocate_validation()
+
+ saved_dir = os.getcwd()
+ try:
+ rc = self.pred.run(sandbox)
+ if rc is not None:
+ self._print_name('STYLE ERROR in')
+ print('Test driver returned a status code.')
+ sys.exit(255)
+ result = svntest.testcase.RESULT_OK
+ except Skip as ex:
+ result = svntest.testcase.RESULT_SKIP
+ except Failure as ex:
+ result = svntest.testcase.RESULT_FAIL
+ msg = ''
+ # We captured Failure and its subclasses. We don't want to print
+ # anything for plain old Failure since that just indicates test
+ # failure, rather than relevant information. However, if there
+ # *is* information in the exception's arguments, then print it.
+ if ex.__class__ != Failure or ex.args:
+ ex_args = str(ex)
+ logger.warn('CWD: %s' % os.getcwd())
+ if ex_args:
+ msg = 'EXCEPTION: %s: %s' % (ex.__class__.__name__, ex_args)
+ else:
+ msg = 'EXCEPTION: %s' % ex.__class__.__name__
+ logger.warn(msg, exc_info=True)
+ except KeyboardInterrupt:
+ logger.error('Interrupted')
+ sys.exit(0)
+ except SystemExit as ex:
+ logger.error('EXCEPTION: SystemExit(%d), skipping cleanup' % ex.code)
+ self._print_name(ex.code and 'FAIL: ' or 'PASS: ')
+ raise
+ except:
+ result = svntest.testcase.RESULT_FAIL
+ logger.warn('CWD: %s' % os.getcwd(), exc_info=True)
+
+ os.chdir(saved_dir)
+ exit_code, result_text, result_benignity = self.pred.results(result)
+ if not (options.quiet and result_benignity):
+ self._print_name(result_text)
+ if sandbox is not None and exit_code != 1 and options.cleanup:
+ sandbox.cleanup_test_paths()
+ return exit_code
+
+######################################################################
+# Main testing functions
+
+# These two functions each take a TEST_LIST as input. The TEST_LIST
+# should be a list of test functions; each test function should take
+# no arguments and return a 0 on success, non-zero on failure.
+# Ideally, each test should also have a short, one-line docstring (so
+# it can be displayed by the 'list' command.)
+
+# Func to run one test in the list.
+def run_one_test(n, test_list, finished_tests = None):
+ """Run the Nth client test in TEST_LIST, return the result.
+
+ If we're running the tests in parallel spawn the test in a new process.
+ """
+
+ # allow N to be negative, so './basic_tests.py -- -1' works
+ num_tests = len(test_list) - 1
+ if (n == 0) or (abs(n) > num_tests):
+ print("There is no test %s.\n" % n)
+ return 1
+ if n < 0:
+ n += 1+num_tests
+
+ test_mode = TestRunner(test_list[n], n).get_mode().upper()
+ if options.mode_filter.upper() == 'ALL' \
+ or options.mode_filter.upper() == test_mode \
+ or (options.mode_filter.upper() == 'PASS' and test_mode == ''):
+ # Run the test.
+ exit_code = TestRunner(test_list[n], n).run()
+ return exit_code
+ else:
+ return 0
+
+def _internal_run_tests(test_list, testnums, parallel, srcdir, progress_func):
+ """Run the tests from TEST_LIST whose indices are listed in TESTNUMS.
+
+ If we're running the tests in parallel spawn as much parallel processes
+ as requested and gather the results in a temp. buffer when a child
+ process is finished.
+ """
+
+ exit_code = 0
+ finished_tests = []
+ tests_started = 0
+
+ # Some of the tests use sys.argv[0] to locate their test data
+ # directory. Perhaps we should just be passing srcdir to the tests?
+ if srcdir:
+ sys.argv[0] = os.path.join(srcdir, 'subversion', 'tests', 'cmdline',
+ sys.argv[0])
+
+ if not parallel:
+ for i, testnum in enumerate(testnums):
+
+ if run_one_test(testnum, test_list) == 1:
+ exit_code = 1
+ # signal progress
+ if progress_func:
+ progress_func(i+1, len(testnums))
+ else:
+ number_queue = queue.Queue()
+ for num in testnums:
+ number_queue.put(num)
+
+ threads = [ TestSpawningThread(number_queue, progress_func,
+ len(testnums)) for i in range(parallel) ]
+ for t in threads:
+ t.start()
+
+ for t in threads:
+ t.join()
+
+ # list of (index, result, stdout, stderr)
+ results = []
+ for t in threads:
+ results += t.results
+ results.sort()
+
+ # all tests are finished, find out the result and print the logs.
+ for (index, result, stdout_lines, stderr_lines) in results:
+ if stdout_lines:
+ for line in stdout_lines:
+ sys.stdout.write(line)
+ if stderr_lines:
+ for line in stderr_lines:
+ sys.stdout.write(line)
+ if result == 1:
+ exit_code = 1
+
+ svntest.sandbox.cleanup_deferred_test_paths()
+ return exit_code
+
+
+class AbbreviatedFormatter(logging.Formatter):
+ """A formatter with abbreviated loglevel indicators in the output.
+
+ Use %(levelshort)s in the format string to get a single character
+ representing the loglevel..
+ """
+
+ _level_short = {
+ logging.CRITICAL : 'C',
+ logging.ERROR : 'E',
+ logging.WARNING : 'W',
+ logging.INFO : 'I',
+ logging.DEBUG : 'D',
+ logging.NOTSET : '-',
+ }
+
+ def format(self, record):
+ record.levelshort = self._level_short[record.levelno]
+ return logging.Formatter.format(self, record)
+
+def _create_parser(usage=None):
+ """Return a parser for our test suite."""
+
+ global logger
+
+ # Initialize the LOGGER global variable so the option parsing can set
+ # its loglevel, as appropriate.
+ logger = logging.getLogger()
+
+ # Did some chucklehead log something before we configured it? If they
+ # did, then a default handler/formatter would get installed. We want
+ # to be the one to install the first (and only) handler.
+ for handler in logger.handlers:
+ if not isinstance(handler.formatter, AbbreviatedFormatter):
+ raise Exception('Logging occurred before configuration. Some code'
+ ' path needs to be fixed. Examine the log output'
+ ' to find what/where logged something.')
+
+ # Set a sane default log level
+ if logger.getEffectiveLevel() == logging.NOTSET:
+ logger.setLevel(logging.WARN)
+
+ def set_log_level(option, opt, value, parser, level=None):
+ if level:
+ # called from --verbose
+ logger.setLevel(level)
+ else:
+ # called from --set-log-level
+ logger.setLevel(getattr(logging, value, None) or int(value))
+
+ # Set up the parser.
+ # If you add new options, consider adding them in
+ #
+ # .../build/run_tests.py:main()
+ #
+ # and handling them in
+ #
+ # .../build/run_tests.py:TestHarness._init_py_tests()
+ #
+ _default_http_library = 'serf'
+ if usage is None:
+ usage = 'usage: %prog [options] [<test> ...]'
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-l', '--list', action='store_true', dest='list_tests',
+ help='Print test doc strings instead of running them')
+ parser.add_option('--milestone-filter', action='store', dest='milestone_filter',
+ help='Limit --list to those with target milestone specified')
+ parser.add_option('-v', '--verbose', action='callback',
+ callback=set_log_level, callback_args=(logging.DEBUG, ),
+ help='Print binary command-lines (same as ' +
+ '"--set-log-level logging.DEBUG")')
+ parser.add_option('-q', '--quiet', action='store_true',
+ help='Print only unexpected results (not with --verbose)')
+ parser.add_option('-p', '--parallel', action='store_const',
+ const=default_num_threads, dest='parallel',
+ help='Run the tests in parallel')
+ parser.add_option('--parallel-instances', action='store',
+ type='int', dest='parallel',
+ help='Run the given number of tests in parallel')
+ parser.add_option('-c', action='store_true', dest='is_child_process',
+ help='Flag if we are running this python test as a ' +
+ 'child process')
+ parser.add_option('--mode-filter', action='store', dest='mode_filter',
+ default='ALL',
+ help='Limit tests to those with type specified (e.g. XFAIL)')
+ parser.add_option('--url', action='store',
+ help='Base url to the repos (e.g. svn://localhost)')
+ parser.add_option('--fs-type', action='store',
+ help='Subversion file system type (fsfs, bdb or fsx)')
+ parser.add_option('--cleanup', action='store_true',
+ help='Whether to clean up')
+ parser.add_option('--enable-sasl', action='store_true',
+ help='Whether to enable SASL authentication')
+ parser.add_option('--bin', action='store', dest='svn_bin',
+ help='Use the svn binaries installed in this path')
+ parser.add_option('--use-jsvn', action='store_true',
+ help="Use the jsvn (SVNKit based) binaries. Can be " +
+ "combined with --bin to point to a specific path")
+ parser.add_option('--http-library', action='store',
+ help="Make svn use this DAV library (neon or serf) if " +
+ "it supports both, else assume it's using this " +
+ "one; the default is " + _default_http_library)
+ parser.add_option('--server-minor-version', type='int', action='store',
+ help="Set the minor version for the server ('3'..'%d')."
+ % SVN_VER_MINOR)
+ parser.add_option('--fsfs-packing', action='store_true',
+ help="Run 'svnadmin pack' automatically")
+ parser.add_option('--fsfs-sharding', action='store', type='int',
+ help='Default shard size (for fsfs)')
+ parser.add_option('--fsfs-version', type='int', action='store',
+ help='FSFS version (fsfs)')
+ parser.add_option('--dump-load-cross-check', action='store_true',
+ help="After every test, run a series of dump and load " +
+ "tests with svnadmin, svnrdump and svndumpfilter " +
+ " on the testcase repositories to cross-check " +
+ " dump file compatibility.")
+ parser.add_option('--config-file', action='store',
+ help="Configuration file for tests.")
+ parser.add_option('--set-log-level', action='callback', type='str',
+ callback=set_log_level,
+ help="Set log level (numerically or symbolically). " +
+ "Symbolic levels are: CRITICAL, ERROR, WARNING, " +
+ "INFO, DEBUG")
+ parser.add_option('--log-with-timestamps', action='store_true',
+ help="Show timestamps in test log.")
+ parser.add_option('--keep-local-tmp', action='store_true',
+ help="Don't remove svn-test-work/local_tmp after test " +
+ "run is complete. Useful for debugging failures.")
+ parser.add_option('--development', action='store_true',
+ help='Test development mode: provides more detailed ' +
+ 'test output and ignores all exceptions in the ' +
+ 'run_and_verify* functions. This option is only ' +
+ 'useful during test development!')
+ parser.add_option('--srcdir', action='store', dest='srcdir',
+ help='Source directory.')
+ parser.add_option('--ssl-cert', action='store',
+ help='Path to SSL server certificate.')
+ parser.add_option('--http-proxy', action='store',
+ help='Use the HTTP Proxy at hostname:port.')
+ parser.add_option('--http-proxy-username', action='store',
+ help='Username for the HTTP Proxy.')
+ parser.add_option('--http-proxy-password', action='store',
+ help='Password for the HTTP Proxy.')
+ parser.add_option('--httpd-version', action='store',
+ help='Assume HTTPD is this version.')
+ parser.add_option('--httpd-whitelist', action='store',
+ help='httpd whitelist version.')
+ parser.add_option('--tools-bin', action='store', dest='tools_bin',
+ help='Use the svn tools installed in this path')
+ parser.add_option('--exclusive-wc-locks', action='store_true',
+ help='Use sqlite exclusive locking for working copies')
+ parser.add_option('--memcached-server', action='store',
+ help='Use memcached server at specified URL (FSFS only)')
+ parser.add_option('--fsfs-compression', action='store', type='str',
+ help='Set compression type (for fsfs)')
+ parser.add_option('--fsfs-dir-deltification', action='store', type='str',
+ help='Set directory deltification option (for fsfs)')
+
+ # most of the defaults are None, but some are other values, set them here
+ parser.set_defaults(
+ server_minor_version=SVN_VER_MINOR,
+ url=file_scheme_prefix + \
+ svntest.wc.svn_uri_quote(
+ os.path.abspath(
+ os.getcwd()).replace(os.path.sep, '/')),
+ http_library=_default_http_library)
+
+ return parser
+
+class ServerCaps():
+ """A simple struct that contains the actual server capabilities that don't
+ depend on other settings like FS versions."""
+
+ def __init__(self, options):
+ self.has_revprop_commit = options.server_minor_version >= 5
+ self.authz_has_aliases = options.server_minor_version >= 5
+ self.gets_client_capabilities = options.server_minor_version >= 5
+ self.has_partial_replay = options.server_minor_version >= 5
+ self.enforces_UTF8_fspaths_in_verify = options.server_minor_version >= 6
+ self.enforces_date_syntax = options.server_minor_version >= 5
+ self.has_atomic_revprop = options.server_minor_version >= 7
+ self.has_reverse_get_file_revs = options.server_minor_version >= 8
+
+def parse_options(arglist=sys.argv[1:], usage=None):
+ """Parse the arguments in arg_list, and set the global options object with
+ the results"""
+
+ global options
+
+ parser = _create_parser(usage)
+ (options, args) = parser.parse_args(arglist)
+
+ # Peg the actual server capabilities.
+ # We tweak the server_minor_version later to accommodate FS restrictions,
+ # but we don't want them to interfere with expectations towards the "pure"
+ # server code.
+ options.server_caps = ServerCaps(options)
+
+ # If there are no logging handlers registered yet, then install our
+ # own with our custom formatter. (anything currently installed *is*
+ # our handler as tested above, in _create_parser)
+ if not logger.handlers:
+ # Now that we have some options, let's get the logger configured before
+ # doing anything more
+ if options.log_with_timestamps:
+ formatter = AbbreviatedFormatter('%(levelshort)s:'
+ ' [%(asctime)s] %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S')
+ else:
+ formatter = AbbreviatedFormatter('%(levelshort)s: %(message)s')
+ handler = logging.StreamHandler(sys.stdout)
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ # Normalize url to have no trailing slash
+ if options.url:
+ if options.url[-1:] == '/':
+ options.test_area_url = options.url[:-1]
+ else:
+ options.test_area_url = options.url
+
+ # Some sanity checking
+ if options.fsfs_packing and not options.fsfs_sharding:
+ parser.error("--fsfs-packing requires --fsfs-sharding")
+
+ if options.server_minor_version not in range(3, SVN_VER_MINOR+1):
+ parser.error("test harness only supports server minor versions 3-%d"
+ % SVN_VER_MINOR)
+
+ pass
+
+ return (parser, args)
+
+def tweak_options_for_precooked_repos():
+ """Make sure the server-minor-version matches the fsfs-version parameter
+ for pre-cooked repositories."""
+
+ global options
+
+ # Server versions that introduced the respective FSFS formats:
+ introducing_version = { 1:1, 2:4, 3:5, 4:6, 6:8, 7:9 }
+ if options.fsfs_version:
+ if options.fsfs_version in introducing_version:
+ introduced_in = introducing_version[options.fsfs_version]
+ if options.server_minor_version \
+ and options.server_minor_version != introduced_in \
+ and options.server_minor_version != SVN_VER_MINOR:
+ parser.error("--fsfs-version=%d requires --server-minor-version=%d" \
+ % (options.fsfs_version, introduced_in))
+ options.server_minor_version = introduced_in
+ # ### Add more tweaks here if and when we support pre-cooked versions
+ # ### of FSFS repositories.
+
+
+def run_tests(test_list, serial_only = False):
+ """Main routine to run all tests in TEST_LIST.
+
+ NOTE: this function does not return. It does a sys.exit() with the
+ appropriate exit code.
+ """
+
+ sys.exit(execute_tests(test_list, serial_only))
+
+def get_issue_details(issue_numbers):
+ """For each issue number in ISSUE_NUMBERS query the issue
+ tracker and determine what the target milestone is and
+ who the issue is assigned to. Return this information
+ as a dictionary mapping issue numbers to a list
+ [target_milestone, assigned_to]"""
+ xml_url = "http://subversion.tigris.org/issues/xml.cgi?id="
+ issue_dict = {}
+
+ if isinstance(issue_numbers, int):
+ issue_numbers = [str(issue_numbers)]
+ elif isinstance(issue_numbers, str):
+ issue_numbers = [issue_numbers]
+
+ if issue_numbers is None or len(issue_numbers) == 0:
+ return issue_dict
+
+ for num in issue_numbers:
+ xml_url += str(num) + ','
+ issue_dict[str(num)] = 'unknown'
+
+ try:
+ # Parse the xml for ISSUE_NO from the issue tracker into a Document.
+ issue_xml_f = urllib.urlopen(xml_url)
+ except:
+ print("WARNING: Unable to contact issue tracker; " \
+ "milestones defaulting to 'unknown'.")
+ return issue_dict
+
+ try:
+ xmldoc = xml.dom.minidom.parse(issue_xml_f)
+ issue_xml_f.close()
+
+ # For each issue: Get the target milestone and who
+ # the issue is assigned to.
+ issue_element = xmldoc.getElementsByTagName('issue')
+ for i in issue_element:
+ issue_id_element = i.getElementsByTagName('issue_id')
+ issue_id = issue_id_element[0].childNodes[0].nodeValue
+ milestone_element = i.getElementsByTagName('target_milestone')
+ milestone = milestone_element[0].childNodes[0].nodeValue
+ assignment_element = i.getElementsByTagName('assigned_to')
+ assignment = assignment_element[0].childNodes[0].nodeValue
+ issue_dict[issue_id] = [milestone, assignment]
+ except:
+ print("ERROR: Unable to parse target milestones from issue tracker")
+ raise
+
+ return issue_dict
+
+
+# Main func. This is the "entry point" that all the test scripts call
+# to run their list of tests.
+#
+# This routine parses sys.argv to decide what to do.
+def execute_tests(test_list, serial_only = False, test_name = None,
+ progress_func = None, test_selection = []):
+ """Similar to run_tests(), but just returns the exit code, rather than
+ exiting the process. This function can be used when a caller doesn't
+ want the process to die."""
+
+ global pristine_url
+ global pristine_greek_repos_url
+ global svn_binary
+ global svnadmin_binary
+ global svnlook_binary
+ global svnrdump_binary
+ global svnsync_binary
+ global svndumpfilter_binary
+ global svnversion_binary
+ global svnmover_binary
+ global svnmucc_binary
+ global svnauthz_binary
+ global svnauthz_validate_binary
+ global options
+
+ if test_name:
+ sys.argv[0] = test_name
+
+ testnums = []
+
+ if not options:
+ # Override which tests to run from the commandline
+ (parser, args) = parse_options()
+ tweak_options_for_precooked_repos()
+ test_selection = args
+ else:
+ parser = _create_parser()
+
+ # parse the positional arguments (test nums, names)
+ for arg in test_selection:
+ appended = False
+ try:
+ testnums.append(int(arg))
+ appended = True
+ except ValueError:
+ # Do nothing for now.
+ appended = False
+
+ if not appended:
+ try:
+ # Check if the argument is a range
+ numberstrings = arg.split(':');
+ if len(numberstrings) != 2:
+ numberstrings = arg.split('-');
+ if len(numberstrings) != 2:
+ raise ValueError
+ left = int(numberstrings[0])
+ right = int(numberstrings[1])
+ if left > right:
+ raise ValueError
+
+ for nr in range(left,right+1):
+ testnums.append(nr)
+ else:
+ appended = True
+ except ValueError:
+ appended = False
+
+ if not appended:
+ try:
+ # Check if the argument is a function name, and translate
+ # it to a number if possible
+ for testnum in list(range(1, len(test_list))):
+ test_case = TestRunner(test_list[testnum], testnum)
+ if test_case.get_function_name() == str(arg).rstrip(','):
+ testnums.append(testnum)
+ appended = True
+ break
+ except ValueError:
+ appended = False
+
+ if not appended:
+ parser.error("invalid test number, range of numbers, " +
+ "or function '%s'\n" % arg)
+
+ # Calculate pristine_greek_repos_url from test_area_url.
+ pristine_greek_repos_url = options.test_area_url + '/' + \
+ svntest.wc.svn_uri_quote(
+ pristine_greek_repos_dir.replace(
+ os.path.sep, '/'))
+
+ if options.use_jsvn:
+ if options.svn_bin is None:
+ options.svn_bin = ''
+ svn_binary = os.path.join(options.svn_bin, 'jsvn' + _bat)
+ svnadmin_binary = os.path.join(options.svn_bin, 'jsvnadmin' + _bat)
+ svnlook_binary = os.path.join(options.svn_bin, 'jsvnlook' + _bat)
+ svnsync_binary = os.path.join(options.svn_bin, 'jsvnsync' + _bat)
+ svndumpfilter_binary = os.path.join(options.svn_bin,
+ 'jsvndumpfilter' + _bat)
+ svnversion_binary = os.path.join(options.svn_bin,
+ 'jsvnversion' + _bat)
+ svnmucc_binary = os.path.join(options.svn_bin, 'jsvnmucc' + _bat)
+ else:
+ if options.svn_bin:
+ svn_binary = os.path.join(options.svn_bin, 'svn' + _exe)
+ svnadmin_binary = os.path.join(options.svn_bin, 'svnadmin' + _exe)
+ svnlook_binary = os.path.join(options.svn_bin, 'svnlook' + _exe)
+ svnrdump_binary = os.path.join(options.svn_bin, 'svnrdump' + _exe)
+ svnsync_binary = os.path.join(options.svn_bin, 'svnsync' + _exe)
+ svndumpfilter_binary = os.path.join(options.svn_bin,
+ 'svndumpfilter' + _exe)
+ svnversion_binary = os.path.join(options.svn_bin, 'svnversion' + _exe)
+ svnmucc_binary = os.path.join(options.svn_bin, 'svnmucc' + _exe)
+
+ if options.tools_bin:
+ svnauthz_binary = os.path.join(options.tools_bin, 'svnauthz' + _exe)
+ svnauthz_validate_binary = os.path.join(options.tools_bin,
+ 'svnauthz-validate' + _exe)
+ svnmover_binary = os.path.join(options.tools_bin, 'svnmover' + _exe)
+
+ ######################################################################
+
+ # Cleanup: if a previous run crashed or interrupted the python
+ # interpreter, then `temp_dir' was never removed. This can cause wonkiness.
+ if not options.is_child_process:
+ safe_rmtree(temp_dir, 1)
+
+ if not testnums:
+ # If no test numbers were listed explicitly, include all of them:
+ testnums = list(range(1, len(test_list)))
+
+ if options.list_tests:
+
+ # If we want to list the target milestones, then get all the issues
+ # associated with all the individual tests.
+ milestones_dict = None
+ if options.milestone_filter:
+ issues_dict = {}
+ for testnum in testnums:
+ issues = TestRunner(test_list[testnum], testnum).get_issues()
+ test_mode = TestRunner(test_list[testnum], testnum).get_mode().upper()
+ if issues:
+ for issue in issues:
+ if (options.mode_filter.upper() == 'ALL' or
+ options.mode_filter.upper() == test_mode or
+ (options.mode_filter.upper() == 'PASS' and test_mode == '')):
+ issues_dict[issue]=issue
+ milestones_dict = get_issue_details(issues_dict.keys())
+
+ header = "Test # Mode Test Description\n"
+ if options.milestone_filter:
+ header += " Issue#(Target Mileston/Assigned To)\n"
+ header += "------ ----- ----------------"
+
+ printed_header = False
+ for testnum in testnums:
+ test_mode = TestRunner(test_list[testnum], testnum).get_mode().upper()
+ if options.mode_filter.upper() == 'ALL' \
+ or options.mode_filter.upper() == test_mode \
+ or (options.mode_filter.upper() == 'PASS' and test_mode == ''):
+ if not printed_header:
+ print(header)
+ printed_header = True
+ TestRunner(test_list[testnum], testnum).list(milestones_dict)
+ # We are simply listing the tests so always exit with success.
+ return 0
+
+ # don't run tests in parallel when the tests don't support it or
+ # there are only a few tests to run.
+ options_parallel = options.parallel
+ if serial_only or len(testnums) < 2:
+ options.parallel = 0
+
+ try:
+ if not options.is_child_process:
+ # Build out the default configuration directory
+ create_config_dir(default_config_dir,
+ ssl_cert=options.ssl_cert,
+ ssl_url=options.test_area_url,
+ http_proxy=options.http_proxy,
+ exclusive_wc_locks=options.exclusive_wc_locks)
+
+ # Setup the pristine repository
+ svntest.actions.setup_pristine_greek_repository()
+
+ # Run the tests.
+ exit_code = _internal_run_tests(test_list, testnums, options.parallel,
+ options.srcdir, progress_func)
+ finally:
+ options.parallel = options_parallel
+
+ # Remove all scratchwork: the 'pristine' repository, greek tree, etc.
+ # This ensures that an 'import' will happen the next time we run.
+ if not options.is_child_process and not options.keep_local_tmp:
+ try:
+ safe_rmtree(temp_dir, 1)
+ except:
+ logger.error("ERROR: cleanup of '%s' directory failed." % temp_dir)
+ exit_code = 1
+
+ # Cleanup after ourselves.
+ svntest.sandbox.cleanup_deferred_test_paths()
+
+ # Return the appropriate exit code from the tests.
+ return exit_code
diff --git a/subversion/tests/cmdline/svntest/mergetrees.py b/subversion/tests/cmdline/svntest/mergetrees.py
new file mode 100755
index 0000000..0cee3d2
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/mergetrees.py
@@ -0,0 +1,507 @@
+#!/usr/bin/env python
+#
+# mergetrees.py: routines that create merge scenarios
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os
+import time
+
+# Our testing module
+from svntest import main, wc, verify, actions, testcase
+
+from prop_tests import binary_mime_type_on_text_file_warning
+
+# (abbreviation)
+Item = wc.StateItem
+Skip = testcase.Skip_deco
+SkipUnless = testcase.SkipUnless_deco
+XFail = testcase.XFail_deco
+Issues = testcase.Issues_deco
+Issue = testcase.Issue_deco
+Wimp = testcase.Wimp_deco
+exp_noop_up_out = actions.expected_noop_update_output
+
+from svntest.main import SVN_PROP_MERGEINFO
+
+def expected_merge_output(rev_ranges, additional_lines=[], foreign=False,
+ elides=False, two_url=False, target=None,
+ text_conflicts=0, prop_conflicts=0, tree_conflicts=0,
+ text_resolved=0, prop_resolved=0, tree_resolved=0,
+ skipped_paths=0):
+ """Generate an (inefficient) regex representing the expected merge
+ output and mergeinfo notifications from REV_RANGES and ADDITIONAL_LINES.
+
+ REV_RANGES is a list of revision ranges for which mergeinfo is being
+ recorded. Each range is of the form [start, end] (where both START and
+ END are inclusive, unlike in '-rX:Y') or the form [single_rev] (which is
+ like '-c SINGLE_REV'). If REV_RANGES is None then only the standard
+ notification for a 3-way merge is expected.
+
+ ADDITIONAL_LINES is a list of strings to match the other lines of output;
+ these are basically regular expressions except that backslashes will be
+ escaped herein. If ADDITIONAL_LINES is a single string, it is interpreted
+ the same as a list containing that string.
+
+ If ELIDES is true, add to the regex an expression representing elision
+ notification. If TWO_URL is true, tweak the regex to expect the
+ appropriate mergeinfo notification for a 3-way merge.
+
+ TARGET is the local path to the target, as it should appear in
+ notifications; if None, it is not checked.
+
+ TEXT_CONFLICTS, PROP_CONFLICTS, TREE_CONFLICTS and SKIPPED_PATHS specify
+ the number of each kind of conflict to expect.
+ """
+
+ if rev_ranges is None:
+ lines = [main.merge_notify_line(None, None, False, foreign)]
+ else:
+ lines = []
+ for rng in rev_ranges:
+ start_rev = rng[0]
+ if len(rng) > 1:
+ end_rev = rng[1]
+ else:
+ end_rev = None
+ lines += [main.merge_notify_line(start_rev, end_rev,
+ True, foreign, target)]
+ lines += [main.mergeinfo_notify_line(start_rev, end_rev, target)]
+
+ if (elides):
+ lines += ["--- Eliding mergeinfo from .*\n"]
+
+ if (two_url):
+ lines += ["--- Recording mergeinfo for merge between repository URLs .*\n"]
+
+ # Address "The Backslash Plague"
+ #
+ # If ADDITIONAL_LINES are present there are possibly paths in it with
+ # multiple components and on Windows these components are separated with
+ # '\'. These need to be escaped properly in the regexp for the match to
+ # work correctly. See http://aspn.activestate.com/ASPN/docs/ActivePython
+ # /2.2/howto/regex/regex.html#SECTION000420000000000000000.
+ if isinstance(additional_lines, str):
+ additional_lines = [additional_lines]
+ if sys.platform == 'win32':
+ additional_lines = [line.replace("\\", "\\\\") for line in additional_lines]
+ lines += additional_lines
+
+ lines += main.summary_of_conflicts(
+ text_conflicts, prop_conflicts, tree_conflicts,
+ text_resolved, prop_resolved, tree_resolved,
+ skipped_paths,
+ as_regex=True)
+
+ return "|".join(lines)
+
+def check_mergeinfo_recursively(root_path, subpaths_mergeinfo):
+ """Check that the mergeinfo properties on and under ROOT_PATH are those in
+ SUBPATHS_MERGEINFO, a {path: mergeinfo-prop-val} dictionary."""
+ expected = verify.UnorderedOutput(
+ [path + ' - ' + subpaths_mergeinfo[path] + '\n'
+ for path in subpaths_mergeinfo])
+ actions.run_and_verify_svn(expected, [],
+ 'propget', '-R', SVN_PROP_MERGEINFO,
+ root_path)
+
+######################################################################
+#----------------------------------------------------------------------
+def set_up_dir_replace(sbox):
+ """Set up the working copy for directory replace tests, creating
+ directory 'A/B/F/foo' with files 'new file' and 'new file2' within
+ it (r2), and merging 'foo' onto 'C' (r3), then deleting 'A/B/F/foo'
+ (r4)."""
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ F_path = sbox.ospath('A/B/F')
+ F_url = sbox.repo_url + '/A/B/F'
+
+ foo_path = os.path.join(F_path, 'foo')
+ new_file = os.path.join(foo_path, "new file")
+ new_file2 = os.path.join(foo_path, "new file 2")
+
+ # Make directory foo in F, and add some files within it.
+ actions.run_and_verify_svn(None, [], 'mkdir', foo_path)
+ main.file_append(new_file, "Initial text in new file.\n")
+ main.file_append(new_file2, "Initial text in new file 2.\n")
+ main.run_svn(None, "add", new_file)
+ main.run_svn(None, "add", new_file2)
+
+ # Commit all the new content, creating r2.
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/foo' : Item(verb='Adding'),
+ 'A/B/F/foo/new file' : Item(verb='Adding'),
+ 'A/B/F/foo/new file 2' : Item(verb='Adding'),
+ })
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2),
+ })
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # Merge foo onto C
+ expected_output = wc.State(C_path, {
+ 'foo' : Item(status='A '),
+ 'foo/new file' : Item(status='A '),
+ 'foo/new file 2' : Item(status='A '),
+ })
+ expected_mergeinfo_output = wc.State(C_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(C_path, {
+ })
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}),
+ 'foo' : Item(),
+ 'foo/new file' : Item("Initial text in new file.\n"),
+ 'foo/new file 2' : Item("Initial text in new file 2.\n"),
+ })
+ expected_status = wc.State(C_path, {
+ '' : Item(status=' M', wc_rev=1),
+ 'foo' : Item(status='A ', wc_rev='-', copied='+'),
+ 'foo/new file' : Item(status=' ', wc_rev='-', copied='+'),
+ 'foo/new file 2' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ expected_skip = wc.State(C_path, { })
+ actions.run_and_verify_merge(C_path, '1', '2', F_url, None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_disk,
+ expected_status,
+ expected_skip,
+ check_props=True)
+ # Commit merge of foo onto C, creating r3.
+ expected_output = wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/C/foo' : Item(verb='Adding'),
+ })
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/foo' : Item(status=' ', wc_rev=2),
+ 'A/C' : Item(status=' ', wc_rev=3),
+ 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2),
+ 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2),
+ 'A/C/foo' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3),
+
+ })
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # Delete foo on F, creating r4.
+ actions.run_and_verify_svn(None, [], 'rm', foo_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/foo' : Item(verb='Deleting'),
+ })
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/C' : Item(status=' ', wc_rev=3),
+ 'A/C/foo' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file' : Item(status=' ', wc_rev=3),
+ 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3),
+ })
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+#----------------------------------------------------------------------
+def set_up_branch(sbox, branch_only = False, nbr_of_branches = 1):
+ '''Starting with standard greek tree, copy 'A' NBR_OF_BRANCHES times
+ to A_COPY, A_COPY_2, A_COPY_3, and so on. Then, unless BRANCH_ONLY is
+ true, make four modifications (setting file contents to "New content")
+ under A:
+ r(2 + NBR_OF_BRANCHES) - A/D/H/psi
+ r(3 + NBR_OF_BRANCHES) - A/D/G/rho
+ r(4 + NBR_OF_BRANCHES) - A/B/E/beta
+ r(5 + NBR_OF_BRANCHES) - A/D/H/omega
+ Return (expected_disk, expected_status).'''
+
+ # With the default parameters, the branching looks like this:
+ #
+ # A -1-----3-4-5-6--
+ # \
+ # A_COPY 2-----------
+
+ wc_dir = sbox.wc_dir
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_disk = main.greek_state.copy()
+
+ def copy_A(dest_name, rev):
+ expected = verify.UnorderedOutput(
+ ["A " + os.path.join(wc_dir, dest_name, "B") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "lambda") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E", "alpha") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "E", "beta") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "B", "F") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "mu") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "C") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "gamma") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "pi") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "rho") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "G", "tau") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "chi") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "omega") + "\n",
+ "A " + os.path.join(wc_dir, dest_name, "D", "H", "psi") + "\n",
+ "Checked out revision " + str(rev - 1) + ".\n",
+ "A " + os.path.join(wc_dir, dest_name) + "\n"])
+ expected_status.add({
+ dest_name + "/B" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/lambda" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E/alpha" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/E/beta" : Item(status=' ', wc_rev=rev),
+ dest_name + "/B/F" : Item(status=' ', wc_rev=rev),
+ dest_name + "/mu" : Item(status=' ', wc_rev=rev),
+ dest_name + "/C" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/gamma" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/pi" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/rho" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/G/tau" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/chi" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/omega" : Item(status=' ', wc_rev=rev),
+ dest_name + "/D/H/psi" : Item(status=' ', wc_rev=rev),
+ dest_name : Item(status=' ', wc_rev=rev)})
+ expected_disk.add({
+ dest_name : Item(),
+ dest_name + '/B' : Item(),
+ dest_name + '/B/lambda' : Item("This is the file 'lambda'.\n"),
+ dest_name + '/B/E' : Item(),
+ dest_name + '/B/E/alpha' : Item("This is the file 'alpha'.\n"),
+ dest_name + '/B/E/beta' : Item("This is the file 'beta'.\n"),
+ dest_name + '/B/F' : Item(),
+ dest_name + '/mu' : Item("This is the file 'mu'.\n"),
+ dest_name + '/C' : Item(),
+ dest_name + '/D' : Item(),
+ dest_name + '/D/gamma' : Item("This is the file 'gamma'.\n"),
+ dest_name + '/D/G' : Item(),
+ dest_name + '/D/G/pi' : Item("This is the file 'pi'.\n"),
+ dest_name + '/D/G/rho' : Item("This is the file 'rho'.\n"),
+ dest_name + '/D/G/tau' : Item("This is the file 'tau'.\n"),
+ dest_name + '/D/H' : Item(),
+ dest_name + '/D/H/chi' : Item("This is the file 'chi'.\n"),
+ dest_name + '/D/H/omega' : Item("This is the file 'omega'.\n"),
+ dest_name + '/D/H/psi' : Item("This is the file 'psi'.\n"),
+ })
+
+ # Make a branch A_COPY to merge into.
+ actions.run_and_verify_svn(expected, [], 'copy',
+ sbox.repo_url + "/A",
+ os.path.join(wc_dir,
+ dest_name))
+
+ expected_output = wc.State(wc_dir, {dest_name : Item(verb='Adding')})
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+ for i in range(nbr_of_branches):
+ if i == 0:
+ copy_A('A_COPY', i + 2)
+ else:
+ copy_A('A_COPY_' + str(i + 1), i + 2)
+
+ if branch_only:
+ return expected_disk, expected_status
+
+ # Make some changes under A which we'll later merge under A_COPY:
+
+ # r(nbr_of_branches + 2) - modify and commit A/D/H/psi
+ main.file_write(sbox.ospath('A/D/H/psi'),
+ "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/psi', wc_rev=nbr_of_branches + 2)
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+ expected_disk.tweak('A/D/H/psi', contents="New content")
+
+ # r(nbr_of_branches + 3) - modify and commit A/D/G/rho
+ main.file_write(sbox.ospath('A/D/G/rho'),
+ "New content")
+ expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')})
+ expected_status.tweak('A/D/G/rho', wc_rev=nbr_of_branches + 3)
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+ expected_disk.tweak('A/D/G/rho', contents="New content")
+
+ # r(nbr_of_branches + 4) - modify and commit A/B/E/beta
+ main.file_write(sbox.ospath('A/B/E/beta'),
+ "New content")
+ expected_output = wc.State(wc_dir, {'A/B/E/beta' : Item(verb='Sending')})
+ expected_status.tweak('A/B/E/beta', wc_rev=nbr_of_branches + 4)
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+ expected_disk.tweak('A/B/E/beta', contents="New content")
+
+ # r(nbr_of_branches + 5) - modify and commit A/D/H/omega
+ main.file_write(sbox.ospath('A/D/H/omega'),
+ "New content")
+ expected_output = wc.State(wc_dir, {'A/D/H/omega' : Item(verb='Sending')})
+ expected_status.tweak('A/D/H/omega', wc_rev=nbr_of_branches + 5)
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+ expected_disk.tweak('A/D/H/omega', contents="New content")
+
+ return expected_disk, expected_status
+
+#----------------------------------------------------------------------
+# Helper functions. These take local paths using '/' separators.
+
+def local_path(path):
+ "Convert a path from '/' separators to the local style."
+ return os.sep.join(path.split('/'))
+
+def svn_mkfile(path):
+ "Make and add a file with some default content, and keyword expansion."
+ path = local_path(path)
+ dirname, filename = os.path.split(path)
+ main.file_write(path, "This is the file '" + filename + "'.\n" +
+ "Last changed in '$Revision$'.\n")
+ actions.run_and_verify_svn(None, [], 'add', path)
+ actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:keywords', 'Revision', path)
+
+def svn_modfile(path):
+ "Make text and property mods to a WC file."
+ path = local_path(path)
+ main.file_append(path, "An extra line.\n")
+ actions.run_and_verify_svn(None, [], 'propset',
+ 'newprop', 'v', path)
+
+def svn_copy(s_rev, path1, path2):
+ "Copy a WC path locally."
+ path1 = local_path(path1)
+ path2 = local_path(path2)
+ actions.run_and_verify_svn(None, [], 'copy', '--parents',
+ '-r', s_rev, path1, path2)
+
+def svn_merge(rev_range, source, target, lines=None, elides=[],
+ text_conflicts=0, prop_conflicts=0, tree_conflicts=0,
+ text_resolved=0, prop_resolved=0, tree_resolved=0,
+ args=[]):
+ """Merge a single change from path SOURCE to path TARGET and verify the
+ output and that there is no error. (The changes made are not verified.)
+
+ REV_RANGE is either a number (to cherry-pick that specific change) or a
+ two-element list [X,Y] to pick the revision range '-r(X-1):Y'.
+
+ LINES is a list of regular expressions to match other lines of output; if
+ LINES is 'None' then match all normal (non-conflicting) merges.
+
+ ELIDES is a list of paths on which mergeinfo elision should be reported.
+
+ TEXT_CONFLICTS, PROP_CONFLICTS and TREE_CONFLICTS specify the number of
+ each kind of conflict to expect.
+
+ ARGS are additional arguments passed to svn merge.
+ """
+
+ source = local_path(source)
+ target = local_path(target)
+ elides = [local_path(p) for p in elides]
+ if isinstance(rev_range, int):
+ mi_rev_range = [rev_range]
+ rev_arg = '-c' + str(rev_range)
+ else:
+ mi_rev_range = rev_range
+ rev_arg = '-r' + str(rev_range[0] - 1) + ':' + str(rev_range[1])
+ if lines is None:
+ lines = ["(A |D |[UG] | [UG]|[UG][UG]) " + target + ".*\n"]
+ else:
+ # Expect mergeinfo on the target; caller must supply matches for any
+ # subtree mergeinfo paths.
+ lines.append(" [UG] " + target + "\n")
+ exp_out = expected_merge_output([mi_rev_range], lines, target=target,
+ elides=elides,
+ text_conflicts=text_conflicts,
+ prop_conflicts=prop_conflicts,
+ tree_conflicts=tree_conflicts,
+ text_resolved=text_resolved,
+ prop_resolved=prop_resolved,
+ tree_resolved=tree_resolved)
+ actions.run_and_verify_svn(exp_out, [],
+ 'merge', rev_arg, source, target,
+ '--accept=postpone', *args)
+
+#----------------------------------------------------------------------
+# Setup helper for issue #4056 and issue #4057 tests.
+def noninheritable_mergeinfo_test_set_up(sbox):
+ '''Starting with standard greek tree, copy 'A' to 'branch' in r2 and
+ then made a file edit to A/B/lambda in r3.
+ Return (expected_output, expected_mergeinfo_output, expected_elision_output,
+ expected_status, expected_disk, expected_skip) for a merge of
+ r3 from ^/A/B to branch/B.'''
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ lambda_path = sbox.ospath('A/B/lambda')
+ B_branch_path = sbox.ospath('branch/B')
+
+ # r2 - Branch ^/A to ^/branch.
+ main.run_svn(None, 'copy', sbox.repo_url + '/A',
+ sbox.repo_url + '/branch', '-m', 'make a branch')
+
+ # r3 - Make an edit to A/B/lambda.
+ main.file_write(lambda_path, "trunk edit.\n")
+ main.run_svn(None, 'commit', '-m', 'file edit', wc_dir)
+ main.run_svn(None, 'up', wc_dir)
+
+ expected_output = wc.State(B_branch_path, {
+ 'lambda' : Item(status='U '),
+ })
+ expected_mergeinfo_output = wc.State(B_branch_path, {
+ '' : Item(status=' U'),
+ 'lambda' : Item(status=' U'),
+ })
+ expected_elision_output = wc.State(B_branch_path, {
+ 'lambda' : Item(status=' U'),
+ })
+ expected_status = wc.State(B_branch_path, {
+ '' : Item(status=' M'),
+ 'lambda' : Item(status='M '),
+ 'E' : Item(status=' '),
+ 'E/alpha' : Item(status=' '),
+ 'E/beta' : Item(status=' '),
+ 'F' : Item(status=' '),
+ })
+ expected_status.tweak(wc_rev='3')
+ expected_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}),
+ 'lambda' : Item("trunk edit.\n"),
+ 'E' : Item(),
+ 'E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'E/beta' : Item("This is the file 'beta'.\n"),
+ 'F' : Item(),
+ })
+ expected_skip = wc.State(B_branch_path, {})
+
+ return expected_output, expected_mergeinfo_output, expected_elision_output, \
+ expected_status, expected_disk, expected_skip
+
diff --git a/subversion/tests/cmdline/svntest/objects.py b/subversion/tests/cmdline/svntest/objects.py
new file mode 100644
index 0000000..97bf31e
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/objects.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python
+#
+# objects.py: Objects that keep track of state during a test
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, sys, re, os, subprocess
+
+# Our testing module
+import svntest
+from svntest import actions, main, tree, verify, wc
+
+
+######################################################################
+# Helpers
+
+def local_path(path):
+ """Convert a path from internal style ('/' separators) to the local style."""
+ if path == '':
+ path = '.'
+ return os.sep.join(path.split('/'))
+
+
+def db_dump(db_dump_name, repo_path, table):
+ """Yield a human-readable representation of the rows of the BDB table
+ TABLE in the repo at REPO_PATH. Yield one line of text at a time.
+ Calls the external program "db_dump" which is supplied with BDB."""
+ table_path = repo_path + "/db/" + table
+ process = subprocess.Popen([db_dump_name, "-p", table_path],
+ stdout=subprocess.PIPE, universal_newlines=True)
+ retcode = process.wait()
+ assert retcode == 0
+
+ # Strip out the header and footer; copy the rest into FILE.
+ copying = False
+ for line in process.stdout.readlines():
+ if line == "HEADER=END\n":
+ copying = True;
+ elif line == "DATA=END\n":
+ break
+ elif copying:
+ yield line
+
+def pretty_print_skel(line):
+ """Return LINE modified so as to look prettier for human reading, but no
+ longer unambiguous or machine-parsable. LINE is assumed to be in the
+ "Skel" format in which some values are preceded by a decimal length. This
+ function removes the length indicator, and also replaces a zero-length
+ value with a pair of single quotes."""
+ new_line = ''
+ while line:
+ # an explicit-length atom
+ explicit_atom = re.match(r'\d+ ', line)
+ if explicit_atom:
+ n = int(explicit_atom.group())
+ line = line[explicit_atom.end():]
+ new_line += line[:n]
+ line = line[n:]
+ if n == 0:
+ new_line += "''"
+ continue
+
+ # an implicit-length atom
+ implicit_atom = re.match(r'[A-Za-z][^\s()]*', line)
+ if implicit_atom:
+ n = implicit_atom.end()
+ new_line += line[:n]
+ line = line[n:]
+ continue
+
+ # parentheses, white space or any other non-atom
+ new_line += line[:1]
+ line = line[1:]
+
+ return new_line
+
+def dump_bdb(db_dump_name, repo_path, dump_dir):
+ """Dump all the known BDB tables in the repository at REPO_PATH into a
+ single text file in DUMP_DIR. Omit any "next-key" records."""
+ dump_file = dump_dir + "/all.bdb"
+ file = open(dump_file, 'w')
+ for table in ['revisions', 'transactions', 'changes', 'copies', 'nodes',
+ 'node-origins', 'representations', 'checksum-reps', 'strings',
+ 'locks', 'lock-tokens', 'miscellaneous', 'uuids']:
+ file.write(table + ":\n")
+ next_key_line = False
+ for line in db_dump(db_dump_name, repo_path, table):
+ # Omit any 'next-key' line and the following line.
+ if next_key_line:
+ next_key_line = False
+ continue
+ if line == ' next-key\n':
+ next_key_line = True
+ continue
+ # The line isn't necessarily a skel, but pretty_print_skel() shouldn't
+ # do too much harm if it isn't.
+ file.write(pretty_print_skel(line))
+ file.write("\n")
+ file.close()
+
+def locate_db_dump():
+ """Locate a db_dump executable"""
+ # Assume that using the newest version is OK.
+ for db_dump_name in ['db4.8_dump', 'db4.7_dump', 'db4.6_dump',
+ 'db4.5_dump', 'db4.4_dump', 'db_dump']:
+ try:
+ if subprocess.Popen([db_dump_name, "-V"]).wait() == 0:
+ return db_dump_name
+ except OSError as e:
+ pass
+ return 'none'
+
+######################################################################
+# Class SvnRepository
+
+class SvnRepository:
+ """An object of class SvnRepository represents a Subversion repository,
+ providing both a client-side view and a server-side view."""
+
+ def __init__(self, repo_url, repo_dir):
+ self.repo_url = repo_url
+ self.repo_absdir = os.path.abspath(repo_dir)
+ self.db_dump_name = locate_db_dump()
+ # This repository object's idea of its own head revision.
+ self.head_rev = 0
+
+ def dump(self, output_dir):
+ """Dump the repository into the directory OUTPUT_DIR"""
+ ldir = local_path(output_dir)
+ os.mkdir(ldir)
+
+ """Run a BDB dump on the repository"""
+ if self.db_dump_name != 'none':
+ dump_bdb(self.db_dump_name, self.repo_absdir, ldir)
+
+ """Run 'svnadmin dump' on the repository."""
+ exit_code, stdout, stderr = \
+ actions.run_and_verify_svnadmin(None, None,
+ 'dump', self.repo_absdir)
+ ldumpfile = local_path(output_dir + "/svnadmin.dump")
+ main.file_write(ldumpfile, ''.join(stderr))
+ main.file_append(ldumpfile, ''.join(stdout))
+
+ def svn_mkdirs(self, *dirs):
+ """Run 'svn mkdir' on the repository. DIRS is a list of directories to
+ make, and each directory is a path relative to the repository root,
+ neither starting nor ending with a slash."""
+ urls = [self.repo_url + '/' + dir for dir in dirs]
+ actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'svn_mkdirs()', '--parents',
+ *urls)
+ self.head_rev += 1
+
+
+######################################################################
+# Class SvnWC
+
+class SvnWC:
+ """An object of class SvnWC represents a WC, and provides methods for
+ operating on it. It keeps track of the state of the WC and of the
+ repository, so that the expected results of common operations are
+ automatically known.
+
+ Path arguments to class methods paths are relative to the WC dir and
+ in Subversion canonical form ('/' separators).
+ """
+
+ def __init__(self, wc_dir, repo):
+ """Initialize the object to use the existing WC at path WC_DIR and
+ the existing repository object REPO."""
+ self.wc_absdir = os.path.abspath(wc_dir)
+ # 'state' is, at all times, the 'wc.State' representation of the state
+ # of the WC, with paths relative to 'wc_absdir'.
+ #self.state = wc.State('', {})
+ initial_wc_tree = tree.build_tree_from_wc(self.wc_absdir, load_props=True)
+ self.state = initial_wc_tree.as_state()
+ self.state.add({
+ '': wc.StateItem()
+ })
+ self.repo = repo
+
+ def __str__(self):
+ return "SvnWC(head_rev=" + str(self.repo.head_rev) + ", state={" + \
+ str(self.state.desc) + \
+ "})"
+
+ def svn_mkdir(self, rpath):
+ lpath = local_path(rpath)
+ actions.run_and_verify_svn(None, [], 'mkdir', lpath)
+
+ self.state.add({
+ rpath : wc.StateItem(status='A ')
+ })
+
+# def propset(self, pname, pvalue, *rpaths):
+# "Set property 'pname' to value 'pvalue' on each path in 'rpaths'"
+# local_paths = tuple([local_path(rpath) for rpath in rpaths])
+# actions.run_and_verify_svn(None, [], 'propset', pname, pvalue,
+# *local_paths)
+
+ def svn_set_props(self, rpath, props):
+ """Change the properties of PATH to be the dictionary {name -> value} PROPS.
+ """
+ lpath = local_path(rpath)
+ #for prop in path's existing props:
+ # actions.run_and_verify_svn(None, [], 'propdel',
+ # prop, lpath)
+ for prop in props:
+ actions.run_and_verify_svn(None, [], 'propset',
+ prop, props[prop], lpath)
+ self.state.tweak(rpath, props=props)
+
+ def svn_file_create_add(self, rpath, content=None, props=None):
+ "Make and add a file with some default content, and keyword expansion."
+ lpath = local_path(rpath)
+ ldirname, filename = os.path.split(lpath)
+ if content is None:
+ # Default content
+ content = "This is the file '" + filename + "'.\n" + \
+ "Last changed in '$Revision$'.\n"
+ main.file_write(lpath, content)
+ actions.run_and_verify_svn(None, [], 'add', lpath)
+
+ self.state.add({
+ rpath : wc.StateItem(status='A ')
+ })
+ if props is None:
+ # Default props
+ props = {
+ 'svn:keywords': 'Revision'
+ }
+ self.svn_set_props(rpath, props)
+
+ def file_modify(self, rpath, content=None, props=None):
+ "Make text and property mods to a WC file."
+ lpath = local_path(rpath)
+ if content is not None:
+ #main.file_append(lpath, "An extra line.\n")
+ #actions.run_and_verify_svn(None, [], 'propset',
+ # 'newprop', 'v', lpath)
+ main.file_write(lpath, content)
+ self.state.tweak(rpath, content=content)
+ if props is not None:
+ self.set_props(rpath, props)
+ self.state.tweak(rpath, props=props)
+
+ def svn_move(self, rpath1, rpath2, parents=False):
+ """Move/rename the existing WC item RPATH1 to become RPATH2.
+ RPATH2 must not already exist. If PARENTS is true, any missing parents
+ of RPATH2 will be created."""
+ lpath1 = local_path(rpath1)
+ lpath2 = local_path(rpath2)
+ args = [lpath1, lpath2]
+ if parents:
+ args += ['--parents']
+ actions.run_and_verify_svn(None, [], 'copy', *args)
+ self.state.add({
+ rpath2: self.state.desc[rpath1]
+ })
+ self.state.remove(rpath1)
+
+ def svn_copy(self, rpath1, rpath2, parents=False, rev=None):
+ """Copy the existing WC item RPATH1 to become RPATH2.
+ RPATH2 must not already exist. If PARENTS is true, any missing parents
+ of RPATH2 will be created. If REV is not None, copy revision REV of
+ the node identified by WC item RPATH1."""
+ lpath1 = local_path(rpath1)
+ lpath2 = local_path(rpath2)
+ args = [lpath1, lpath2]
+ if rev is not None:
+ args += ['-r', rev]
+ if parents:
+ args += ['--parents']
+ actions.run_and_verify_svn(None, [], 'copy', *args)
+ self.state.add({
+ rpath2: self.state.desc[rpath1]
+ })
+
+ def svn_delete(self, rpath, even_if_modified=False):
+ "Delete a WC path locally."
+ lpath = local_path(rpath)
+ args = []
+ if even_if_modified:
+ args += ['--force']
+ actions.run_and_verify_svn(None, [], 'delete', lpath, *args)
+
+ def svn_commit(self, rpath='', log=''):
+ "Commit a WC path (recursively). Return the new revision number."
+ lpath = local_path(rpath)
+ actions.run_and_verify_svn(verify.AnyOutput, [],
+ 'commit', '-m', log, lpath)
+ actions.run_and_verify_update(lpath, None, None, None)
+ self.repo.head_rev += 1
+ return self.repo.head_rev
+
+ def svn_update(self, rpath='', rev='HEAD'):
+ "Update the WC to the specified revision"
+ lpath = local_path(rpath)
+ actions.run_and_verify_update(lpath, None, None, None)
+
+# def svn_merge(self, rev_spec, source, target, exp_out=None):
+# """Merge a single change from path 'source' to path 'target'.
+# SRC_CHANGE_NUM is either a number (to cherry-pick that specific change)
+# or a command-line option revision range string such as '-r10:20'."""
+# lsource = local_path(source)
+# ltarget = local_path(target)
+# if isinstance(rev_spec, int):
+# rev_spec = '-c' + str(rev_spec)
+# if exp_out is None:
+# target_re = re.escape(target)
+# exp_1 = "--- Merging r.* into '" + target_re + ".*':"
+# exp_2 = "(A |D |[UG] | [UG]|[UG][UG]) " + target_re + ".*"
+# exp_out = verify.RegexOutput(exp_1 + "|" + exp_2)
+# actions.run_and_verify_svn(exp_out, [],
+# 'merge', rev_spec, lsource, ltarget)
+
diff --git a/subversion/tests/cmdline/svntest/sandbox.py b/subversion/tests/cmdline/svntest/sandbox.py
new file mode 100644
index 0000000..b1c9861
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/sandbox.py
@@ -0,0 +1,614 @@
+#
+# sandbox.py : tools for manipulating a test's working area ("a sandbox")
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+
+import os
+import shutil
+import copy
+import logging
+import re
+
+import svntest
+
+logger = logging.getLogger()
+
+
+def make_mirror(sbox, source_prop_encoding=None):
+ """Make a mirror of the repository in SBOX.
+ """
+ # Set up the mirror repository.
+ dest_sbox = sbox.clone_dependent()
+ dest_sbox.build(create_wc=False, empty=True)
+ exit_code, output, errput = svntest.main.run_svnlook("uuid", sbox.repo_dir)
+ svntest.actions.run_and_verify_svnadmin2(None, None, 0,
+ 'setuuid', dest_sbox.repo_dir,
+ output[0][:-1])
+ svntest.actions.enable_revprop_changes(dest_sbox.repo_dir)
+
+ repo_url = sbox.repo_url
+ dest_repo_url = dest_sbox.repo_url
+
+ # Synchronize it.
+ args = (svntest.main.svnrdump_crosscheck_authentication,)
+ if source_prop_encoding:
+ args = args + ("--source-prop-encoding=" + source_prop_encoding,)
+ svntest.actions.run_and_verify_svnsync(svntest.verify.AnyOutput, [],
+ "initialize",
+ dest_repo_url, repo_url, *args)
+ svntest.actions.run_and_verify_svnsync(None, [],
+ "synchronize",
+ dest_repo_url, repo_url, *args)
+
+ return dest_sbox
+
+def verify_mirror(repo_url, repo_dir, expected_dumpfile):
+ """Compare the repository content at REPO_URL/REPO_DIR with that in
+ EXPECTED_DUMPFILE (which is a non-delta dump).
+ """
+ # Remove some SVNSync-specific housekeeping properties from the
+ # mirror repository in preparation for the comparison dump.
+ for prop_name in ("svn:sync-from-url", "svn:sync-from-uuid",
+ "svn:sync-last-merged-rev"):
+ svntest.actions.run_and_verify_svn(
+ None, [], "propdel", "--revprop", "-r", "0",
+ prop_name, repo_url)
+ # Create a dump file from the mirror repository.
+ dumpfile_s_n = svntest.actions.run_and_verify_dump(repo_dir)
+ # Compare the mirror's dumpfile, ignoring any expected differences:
+ # The original dumpfile in some cases lacks 'Text-content-sha1' headers;
+ # the mirror dump always has them -- ### Why?
+ svnsync_headers_always = re.compile("Text-content-sha1: ")
+ dumpfile_a_n_cmp = [l for l in expected_dumpfile
+ if not svnsync_headers_always.match(l)]
+ dumpfile_s_n_cmp = [l for l in dumpfile_s_n
+ if not svnsync_headers_always.match(l)]
+ svntest.verify.compare_dump_files(None, None,
+ dumpfile_a_n_cmp,
+ dumpfile_s_n_cmp)
+
+
+class Sandbox:
+ """Manages a sandbox (one or more repository/working copy pairs) for
+ a test to operate within."""
+
+ dependents = None
+ tmp_dir = None
+
+ def __init__(self, module, idx):
+ self.test_paths = []
+
+ self._set_name("%s-%d" % (module, idx))
+ # This flag is set to True by build() and returned by is_built()
+ self._is_built = False
+
+ self.was_cwd = os.getcwd()
+
+ def _set_name(self, name, read_only=False, empty=False):
+ """A convenience method for renaming a sandbox, useful when
+ working with multiple repositories in the same unit test."""
+ if not name is None:
+ self.name = name
+ self.read_only = read_only
+ self.wc_dir = os.path.join(svntest.main.general_wc_dir, self.name)
+ self.add_test_path(self.wc_dir)
+ if empty or not read_only: # use a local repo
+ self.repo_dir = os.path.join(svntest.main.general_repo_dir, self.name)
+ self.repo_url = (svntest.main.options.test_area_url + '/'
+ + svntest.wc.svn_uri_quote(
+ self.repo_dir.replace(os.path.sep, '/')))
+ self.add_test_path(self.repo_dir)
+ else:
+ self.repo_dir = svntest.main.pristine_greek_repos_dir
+ self.repo_url = svntest.main.pristine_greek_repos_url
+
+ if self.repo_url.startswith("http"):
+ self.authz_file = os.path.join(svntest.main.work_dir, "authz")
+ self.groups_file = os.path.join(svntest.main.work_dir, "groups")
+ elif self.repo_url.startswith("svn"):
+ self.authz_file = os.path.join(self.repo_dir, "conf", "authz")
+ self.groups_file = os.path.join(self.repo_dir, "conf", "groups")
+
+ def clone_dependent(self, copy_wc=False):
+ """A convenience method for creating a near-duplicate of this
+ sandbox, useful when working with multiple repositories in the
+ same unit test. If COPY_WC is true, make an exact copy of this
+ sandbox's working copy at the new sandbox's working copy
+ directory. Any necessary cleanup operations are triggered by
+ cleanup of the original sandbox."""
+
+ if not self.dependents:
+ self.dependents = []
+ clone = copy.deepcopy(self)
+ self.dependents.append(clone)
+ clone._set_name("%s-%d" % (self.name, len(self.dependents)))
+ if copy_wc:
+ self.add_test_path(clone.wc_dir)
+ shutil.copytree(self.wc_dir, clone.wc_dir, symlinks=True)
+ return clone
+
+ def build(self, name=None, create_wc=True, read_only=False, empty=False,
+ minor_version=None):
+ """Make a 'Greek Tree' repo (or refer to the central one if READ_ONLY),
+ or make an empty repo if EMPTY is true,
+ and check out a WC from it (unless CREATE_WC is false). Change the
+ sandbox's name to NAME. See actions.make_repo_and_wc() for details."""
+ self._set_name(name, read_only, empty)
+ self._ensure_authz()
+ svntest.actions.make_repo_and_wc(self, create_wc, read_only, empty,
+ minor_version)
+ self._is_built = True
+
+ def _ensure_authz(self):
+ "make sure the repository is accessible"
+
+ if self.repo_url.startswith("http"):
+ default_authz = "[/]\n* = rw\n"
+
+ if (svntest.main.options.parallel == 0
+ and (not os.path.isfile(self.authz_file)
+ or open(self.authz_file,'r').read() != default_authz)):
+
+ tmp_authz_file = os.path.join(svntest.main.work_dir, "authz-" + self.name)
+ open(tmp_authz_file, 'w').write(default_authz)
+ shutil.move(tmp_authz_file, self.authz_file)
+
+ def authz_name(self, repo_dir=None):
+ "return this sandbox's name for use in an authz file"
+ repo_dir = repo_dir or self.repo_dir
+ if self.repo_url.startswith("http"):
+ return os.path.basename(repo_dir)
+ else:
+ return repo_dir.replace('\\', '/')
+
+ def add_test_path(self, path, remove=True):
+ self.test_paths.append(path)
+ if remove:
+ svntest.main.safe_rmtree(path)
+
+ def add_repo_path(self, suffix, remove=True):
+ """Generate a path, under the general repositories directory, with
+ a name that ends in SUFFIX, e.g. suffix="2" -> ".../basic_tests.2".
+ If REMOVE is true, remove anything currently on disk at that path.
+ Remember that path so that the automatic clean-up mechanism can
+ delete it at the end of the test. Generate a repository URL to
+ refer to a repository at that path. Do not create a repository.
+ Return (REPOS-PATH, REPOS-URL)."""
+ path = (os.path.join(svntest.main.general_repo_dir, self.name)
+ + '.' + suffix)
+ url = svntest.main.options.test_area_url + \
+ '/' + svntest.wc.svn_uri_quote(
+ path.replace(os.path.sep, '/'))
+ self.add_test_path(path, remove)
+ return path, url
+
+ def add_wc_path(self, suffix, remove=True):
+ """Generate a path, under the general working copies directory, with
+ a name that ends in SUFFIX, e.g. suffix="2" -> ".../basic_tests.2".
+ If REMOVE is true, remove anything currently on disk at that path.
+ Remember that path so that the automatic clean-up mechanism can
+ delete it at the end of the test. Do not create a working copy.
+ Return the generated WC-PATH."""
+ path = self.wc_dir + '.' + suffix
+ self.add_test_path(path, remove)
+ return path
+
+ tempname_offs = 0 # Counter for get_tempname
+
+ def get_tempname(self, prefix='tmp'):
+ """Get a stable name for a temporary file that will be removed after
+ running the test"""
+
+ if not self.tmp_dir:
+ # Create an empty directory for temporary files
+ self.tmp_dir = self.add_wc_path('tmp', remove=True)
+ os.mkdir(self.tmp_dir)
+
+ self.tempname_offs = self.tempname_offs + 1
+
+ return os.path.join(self.tmp_dir, '%s-%s' % (prefix, self.tempname_offs))
+
+ def create_config_dir(self, config_contents=None, server_contents=None,
+ ssl_cert=None, ssl_url=None, http_proxy=None,
+ exclusive_wc_locks=None):
+ """Create a config directory with specified or default files.
+ Return its path.
+ """
+
+ tmp_dir = os.path.abspath(svntest.main.temp_dir)
+ config_dir = os.path.join(tmp_dir, 'config_' + self.name)
+ svntest.main.create_config_dir(config_dir, config_contents, server_contents,
+ ssl_cert, ssl_url, http_proxy,
+ exclusive_wc_locks)
+ return config_dir
+
+ def cleanup_test_paths(self):
+ "Clean up detritus from this sandbox, and any dependents."
+ if self.dependents:
+ # Recursively cleanup any dependent sandboxes.
+ for sbox in self.dependents:
+ sbox.cleanup_test_paths()
+ # cleanup all test specific working copies and repositories
+ for path in self.test_paths:
+ if not path is svntest.main.pristine_greek_repos_dir:
+ _cleanup_test_path(path)
+
+ def is_built(self):
+ "Returns True when build() has been called on this instance."
+ return self._is_built
+
+ def ospath(self, relpath, wc_dir=None):
+ """Return RELPATH converted to an OS-style path relative to the WC dir
+ of this sbox, or relative to OS-style path WC_DIR if supplied."""
+ if wc_dir is None:
+ wc_dir = self.wc_dir
+
+ if relpath == '':
+ return wc_dir
+ else:
+ return os.path.join(wc_dir, svntest.wc.to_ospath(relpath))
+
+ def ospaths(self, relpaths, wc_dir=None):
+ """Return a list of RELPATHS but with each path converted to an OS-style
+ path relative to the WC dir of this sbox, or relative to OS-style
+ path WC_DIR if supplied."""
+ return [self.ospath(rp, wc_dir) for rp in relpaths]
+
+ def path(self, relpath, wc_dir=None):
+ """Return RELPATH converted to an path relative to the WC dir
+ of this sbox, or relative to WC_DIR if supplied, but always
+ using '/' as directory separator."""
+ return self.ospath(relpath, wc_dir=wc_dir).replace(os.path.sep, '/')
+
+ def redirected_root_url(self, temporary=False):
+ """If TEMPORARY is set, return the URL which should be configured
+ to temporarily redirect to the root of this repository;
+ otherwise, return the URL which should be configured to
+ permanent redirect there. (Assumes that the sandbox is not
+ read-only.)"""
+ assert not self.read_only
+ assert self.repo_url.startswith("http")
+ parts = self.repo_url.rsplit('/', 1)
+ return '%s/REDIRECT-%s-%s' % (parts[0],
+ temporary and 'TEMP' or 'PERM',
+ parts[1])
+
+ def file_protocol_repo_url(self):
+ """get a file:// url pointing to the repository"""
+ return svntest.main.file_scheme_prefix + \
+ svntest.wc.svn_uri_quote(
+ os.path.abspath(self.repo_dir).replace(os.path.sep, '/'))
+
+ def simple_update(self, target=None, revision='HEAD'):
+ """Update the WC or TARGET.
+ TARGET is a relpath relative to the WC."""
+ if target is None:
+ target = self.wc_dir
+ else:
+ target = self.ospath(target)
+ svntest.main.run_svn(False, 'update', target, '-r', revision)
+
+ def simple_switch(self, url, target=None):
+ """Switch the WC or TARGET to URL.
+ TARGET is a relpath relative to the WC."""
+ if target is None:
+ target = self.wc_dir
+ else:
+ target = self.ospath(target)
+ svntest.main.run_svn(False, 'switch', url, target, '--ignore-ancestry')
+
+ def simple_commit(self, target=None, message=None):
+ """Commit the WC or TARGET, with a default or supplied log message.
+ Raise if the exit code is non-zero or there is output on stderr.
+ TARGET is a relpath relative to the WC."""
+ assert not self.read_only
+ if target is None:
+ target = self.wc_dir
+ else:
+ target = self.ospath(target)
+ if message is None:
+ message = svntest.main.make_log_msg()
+ svntest.actions.run_and_verify_commit(self.wc_dir, None, None, [],
+ '-m', message, target)
+
+ def simple_rm(self, *targets):
+ """Schedule TARGETS for deletion.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'rm', *targets)
+
+ def simple_mkdir(self, *targets):
+ """Create TARGETS as directories scheduled for addition.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'mkdir', *targets)
+
+ def simple_add(self, *targets):
+ """Schedule TARGETS for addition.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'add', *targets)
+
+ def simple_revert(self, *targets):
+ """Revert TARGETS.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'revert', *targets)
+
+ def simple_propset(self, name, value, *targets):
+ """Set property NAME to VALUE on TARGETS.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'propset', name, value, *targets)
+
+ def simple_propdel(self, name, *targets):
+ """Delete property NAME from TARGETS.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'propdel', name, *targets)
+
+ def simple_propget(self, name, target):
+ """Return the value of the property NAME on TARGET.
+ TARGET is a relpath relative to the WC."""
+ target = self.ospath(target)
+ exit, out, err = svntest.main.run_svn(False, 'propget',
+ '--strict', name, target)
+ return ''.join(out)
+
+ def simple_proplist(self, target):
+ """Return a dictionary mapping property name to property value, of the
+ properties on TARGET.
+ TARGET is a relpath relative to the WC."""
+ target = self.ospath(target)
+ exit, out, err = svntest.main.run_svn(False, 'proplist',
+ '--verbose', '--quiet', target)
+ props = {}
+ for line in out:
+ line = line.rstrip('\r\n')
+ if line[2] != ' ': # property name
+ name = line[2:]
+ val = None
+ elif line.startswith(' '): # property value
+ if val is None:
+ val = line[4:]
+ else:
+ val += '\n' + line[4:]
+ props[name] = val
+ else:
+ raise Exception("Unexpected line '" + line + "' in proplist output" + str(out))
+ return props
+
+ def simple_symlink(self, dest, target):
+ """Create a symlink TARGET pointing to DEST"""
+ if svntest.main.is_posix_os():
+ os.symlink(dest, self.ospath(target))
+ else:
+ svntest.main.file_write(self.ospath(target), "link %s" % dest)
+
+ def simple_add_symlink(self, dest, target, add=True):
+ """Create a symlink TARGET pointing to DEST and add it to subversion"""
+ self.simple_symlink(dest, target)
+ self.simple_add(target)
+ if not svntest.main.is_posix_os(): # '*' is evaluated on Windows
+ self.simple_propset('svn:special', 'X', target)
+
+ def simple_add_text(self, text, *targets):
+ """Create files containing TEXT as TARGETS"""
+ assert len(targets) > 0
+ for target in targets:
+ svntest.main.file_write(self.ospath(target), text, mode='wb')
+ self.simple_add(*targets)
+
+ def simple_copy(self, source, dest):
+ """Copy SOURCE to DEST in the WC.
+ SOURCE and DEST are relpaths relative to the WC."""
+ source = self.ospath(source)
+ dest = self.ospath(dest)
+ svntest.main.run_svn(False, 'copy', source, dest)
+
+ def simple_move(self, source, dest):
+ """Move SOURCE to DEST in the WC.
+ SOURCE and DEST are relpaths relative to the WC."""
+ source = self.ospath(source)
+ dest = self.ospath(dest)
+ svntest.main.run_svn(False, 'move', source, dest)
+
+ def simple_repo_copy(self, source, dest):
+ """Copy SOURCE to DEST in the repository, committing the result with a
+ default log message.
+ SOURCE and DEST are relpaths relative to the repo root."""
+ svntest.main.run_svn(False, 'copy', '-m', svntest.main.make_log_msg(),
+ self.repo_url + '/' + source,
+ self.repo_url + '/' + dest)
+
+ def simple_append(self, dest, contents, truncate=False):
+ """Append CONTENTS to file DEST, optionally truncating it first.
+ DEST is a relpath relative to the WC."""
+ svntest.main.file_write(self.ospath(dest), contents,
+ truncate and 'wb' or 'ab')
+
+ def simple_lock(self, *targets):
+ """Lock TARGETS in the WC.
+ TARGETS are relpaths relative to the WC."""
+ assert len(targets) > 0
+ targets = self.ospaths(targets)
+ svntest.main.run_svn(False, 'lock', *targets)
+
+ def youngest(self):
+ _, output, _ = svntest.actions.run_and_verify_svnlook(
+ svntest.verify.AnyOutput, [],
+ 'youngest', self.repo_dir)
+ youngest = int(output[0])
+ return youngest
+
+ def verify_repo(self):
+ """
+ """
+ svnrdump_headers_missing = re.compile(
+ "Text-content-sha1: .*|Text-copy-source-md5: .*|"
+ "Text-copy-source-sha1: .*|Text-delta-base-sha1: .*"
+ )
+ svnrdump_headers_always = re.compile(
+ "Prop-delta: .*"
+ )
+
+ dumpfile_a_n = svntest.actions.run_and_verify_dump(self.repo_dir,
+ deltas=False)
+ dumpfile_a_d = svntest.actions.run_and_verify_dump(self.repo_dir,
+ deltas=True)
+ dumpfile_r_d = svntest.actions.run_and_verify_svnrdump(
+ None, svntest.verify.AnyOutput, [], 0, 'dump', '-q', self.repo_url,
+ svntest.main.svnrdump_crosscheck_authentication)
+
+ # Compare the two deltas dumpfiles, ignoring expected differences
+ dumpfile_a_d_cmp = [l for l in dumpfile_a_d
+ if not svnrdump_headers_missing.match(l)
+ and not svnrdump_headers_always.match(l)]
+ dumpfile_r_d_cmp = [l for l in dumpfile_r_d
+ if not svnrdump_headers_always.match(l)]
+ # Ignore differences in number of blank lines between node records,
+ # as svnrdump puts 3 whereas svnadmin puts 2 after a replace-with-copy.
+ svntest.verify.compare_dump_files(None, None,
+ dumpfile_a_d_cmp,
+ dumpfile_r_d_cmp,
+ ignore_number_of_blank_lines=True)
+
+ # Try loading the dump files.
+ # For extra points, load each with the other tool:
+ # svnadmin dump | svnrdump load
+ # svnrdump dump | svnadmin load
+ repo_dir_a_n, repo_url_a_n = self.add_repo_path('load_a_n')
+ svntest.main.create_repos(repo_dir_a_n)
+ svntest.actions.enable_revprop_changes(repo_dir_a_n)
+ svntest.actions.run_and_verify_svnrdump(
+ dumpfile_a_n, svntest.verify.AnyOutput, [], 0, 'load', repo_url_a_n,
+ svntest.main.svnrdump_crosscheck_authentication)
+
+ repo_dir_a_d, repo_url_a_d = self.add_repo_path('load_a_d')
+ svntest.main.create_repos(repo_dir_a_d)
+ svntest.actions.enable_revprop_changes(repo_dir_a_d)
+ svntest.actions.run_and_verify_svnrdump(
+ dumpfile_a_d, svntest.verify.AnyOutput, [], 0, 'load', repo_url_a_d,
+ svntest.main.svnrdump_crosscheck_authentication)
+
+ repo_dir_r_d, repo_url_r_d = self.add_repo_path('load_r_d')
+ svntest.main.create_repos(repo_dir_r_d)
+ svntest.actions.run_and_verify_load(repo_dir_r_d, dumpfile_r_d)
+
+ # Dump the loaded repositories in the same way; expect exact equality
+ reloaded_dumpfile_a_n = svntest.actions.run_and_verify_dump(repo_dir_a_n)
+ reloaded_dumpfile_a_d = svntest.actions.run_and_verify_dump(repo_dir_a_d)
+ reloaded_dumpfile_r_d = svntest.actions.run_and_verify_dump(repo_dir_r_d)
+ svntest.verify.compare_dump_files(None, None,
+ reloaded_dumpfile_a_n,
+ reloaded_dumpfile_a_d,
+ ignore_uuid=True)
+ svntest.verify.compare_dump_files(None, None,
+ reloaded_dumpfile_a_d,
+ reloaded_dumpfile_r_d,
+ ignore_uuid=True)
+
+ # Run each dump through svndumpfilter and check for no further change.
+ for dumpfile in [dumpfile_a_n,
+ dumpfile_a_d,
+ dumpfile_r_d
+ ]:
+ ### No buffer size seems to work for update_tests-2. So skip that test?
+ ### (Its dumpfile size is ~360 KB non-delta, ~180 KB delta.)
+ if len(''.join(dumpfile)) > 100000:
+ continue
+
+ exit_code, dumpfile2, errput = svntest.main.run_command_stdin(
+ svntest.main.svndumpfilter_binary, None, -1, True,
+ dumpfile, '--quiet', 'include', '/')
+ assert not exit_code and not errput
+ # Ignore empty prop sections in the input file during comparison, as
+ # svndumpfilter strips them.
+ # Ignore differences in number of blank lines between node records,
+ # as svndumpfilter puts 3 instead of 2 after an add or delete record.
+ svntest.verify.compare_dump_files(None, None, dumpfile, dumpfile2,
+ expect_content_length_always=True,
+ ignore_empty_prop_sections=True,
+ ignore_number_of_blank_lines=True)
+
+ # Run the repository through 'svnsync' and check that this does not
+ # change the repository content. (Don't bother if it's already been
+ # created by svnsync.)
+ if "svn:sync-from-url\n" not in dumpfile_a_n:
+ dest_sbox = make_mirror(self)
+ verify_mirror(dest_sbox.repo_url, dest_sbox.repo_dir, dumpfile_a_n)
+
+ def verify(self, skip_cross_check=False):
+ """Do additional testing that should hold for any sandbox, such as
+ verifying that the repository can be dumped.
+ """
+ if (not skip_cross_check
+ and svntest.main.tests_verify_dump_load_cross_check()):
+ if self.is_built() and not self.read_only:
+ # verify that we can in fact dump the repo
+ # (except for the few tests that deliberately corrupt the repo)
+ os.chdir(self.was_cwd)
+ if os.path.exists(self.repo_dir):
+ logger.info("VERIFY: running dump/load cross-check")
+ self.verify_repo()
+ else:
+ logger.info("VERIFY: WARNING: skipping dump/load cross-check:"
+ " is-built=%s, read-only=%s"
+ % (self.is_built() and "true" or "false",
+ self.read_only and "true" or "false"))
+ pass
+
+def is_url(target):
+ return (target.startswith('^/')
+ or target.startswith('file://')
+ or target.startswith('http://')
+ or target.startswith('https://')
+ or target.startswith('svn://')
+ or target.startswith('svn+ssh://'))
+
+
+_deferred_test_paths = []
+
+def cleanup_deferred_test_paths():
+ global _deferred_test_paths
+ test_paths = _deferred_test_paths
+ _deferred_test_paths = []
+ for path in test_paths:
+ _cleanup_test_path(path, True)
+
+
+def _cleanup_test_path(path, retrying=False):
+ if retrying:
+ logger.info("CLEANUP: RETRY: %s", path)
+ else:
+ logger.info("CLEANUP: %s", path)
+
+ try:
+ svntest.main.safe_rmtree(path, retrying)
+ except:
+ logger.info("WARNING: cleanup failed, will try again later")
+ _deferred_test_paths.append(path)
diff --git a/subversion/tests/cmdline/svntest/testcase.py b/subversion/tests/cmdline/svntest/testcase.py
new file mode 100644
index 0000000..4782831
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/testcase.py
@@ -0,0 +1,351 @@
+#
+# testcase.py: Control of test case execution.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os, types, sys
+
+import svntest
+
+# if somebody does a "from testcase import *", they only get these names
+__all__ = ['_XFail', '_Wimp', '_Skip', '_SkipUnless',
+ '_SkipDumpLoadCrossCheck']
+
+RESULT_OK = 'ok'
+RESULT_FAIL = 'fail'
+RESULT_SKIP = 'skip'
+
+
+class TextColors:
+ '''Some ANSI terminal constants for output color'''
+ ENDC = '\033[0;m'
+ FAILURE = '\033[1;31m'
+ SUCCESS = '\033[1;32m'
+
+ @classmethod
+ def disable(cls):
+ cls.ENDC = ''
+ cls.FAILURE = ''
+ cls.SUCCESS = ''
+
+ @classmethod
+ def success(cls, str):
+ return lambda: cls.SUCCESS + str + cls.ENDC
+
+ @classmethod
+ def failure(cls, str):
+ return lambda: cls.FAILURE + str + cls.ENDC
+
+
+if not sys.stdout.isatty() or sys.platform == 'win32':
+ TextColors.disable()
+
+
+class TestCase:
+ """A thing that can be tested. This is an abstract class with
+ several methods that need to be overridden."""
+
+ _result_map = {
+ RESULT_OK: (0, TextColors.success('PASS: '), True),
+ RESULT_FAIL: (1, TextColors.failure('FAIL: '), False),
+ RESULT_SKIP: (2, TextColors.success('SKIP: '), True),
+ }
+
+ def __init__(self, delegate=None, cond_func=lambda: True, doc=None, wip=None,
+ issues=None):
+ """Create a test case instance based on DELEGATE.
+
+ COND_FUNC is a callable that is evaluated at test run time and should
+ return a boolean value that determines how a pass or failure is
+ interpreted: see the specialized kinds of test case such as XFail and
+ Skip for details. The evaluation of COND_FUNC is deferred so that it
+ can base its decision on useful bits of information that are not
+ available at __init__ time (like the fact that we're running over a
+ particular RA layer).
+
+ DOC is ...
+
+ WIP is a string describing the reason for the work-in-progress
+ """
+ assert hasattr(cond_func, '__call__')
+
+ self._delegate = delegate
+ self._cond_func = cond_func
+ self.description = doc or delegate.description
+ self.inprogress = wip
+ self.issues = issues
+
+ def get_function_name(self):
+ """Return the name of the python function implementing the test."""
+ return self._delegate.get_function_name()
+
+ def get_sandbox_name(self):
+ """Return the name that should be used for the sandbox.
+
+ If a sandbox should not be constructed, this method returns None.
+ """
+ return self._delegate.get_sandbox_name()
+
+ def set_issues(self, issues):
+ """Set the issues associated with this test."""
+ self.issues = issues
+
+ def run(self, sandbox):
+ """Run the test within the given sandbox."""
+ return self._delegate.run(sandbox)
+
+ def list_mode(self):
+ return ''
+
+ def results(self, result):
+ # if our condition applied, then use our result map. otherwise, delegate.
+ if self._cond_func():
+ val = list(self._result_map[result])
+ val[1] = val[1]()
+ return val
+ return self._delegate.results(result)
+
+
+class FunctionTestCase(TestCase):
+ """A TestCase based on a naked Python function object.
+
+ FUNC should be a function that returns None on success and throws an
+ svntest.Failure exception on failure. It should have a brief
+ docstring describing what it does (and fulfilling certain conditions).
+ FUNC must take one argument, an Sandbox instance. (The sandbox name
+ is derived from the file name in which FUNC was defined)
+ """
+
+ def __init__(self, func, issues=None, skip_cross_check=False):
+ # it better be a function that accepts an sbox parameter and has a
+ # docstring on it.
+ assert isinstance(func, types.FunctionType)
+
+ name = func.__name__
+
+ assert func.__code__.co_argcount == 1, \
+ '%s must take an sbox argument' % name
+
+ doc = func.__doc__.strip()
+ assert doc, '%s must have a docstring' % name
+
+ # enforce stylistic guidelines for the function docstrings:
+ # - no longer than 50 characters
+ # - should not end in a period
+ # - should not be capitalized
+ assert len(doc) <= 50, \
+ "%s's docstring must be 50 characters or less" % name
+ assert doc[-1] != '.', \
+ "%s's docstring should not end in a period" % name
+ assert doc[0].lower() == doc[0], \
+ "%s's docstring should not be capitalized" % name
+
+ TestCase.__init__(self, doc=doc, issues=issues)
+ self.func = func
+ self.skip_cross_check = skip_cross_check
+
+ def get_function_name(self):
+ return self.func.__name__
+
+ def get_sandbox_name(self):
+ """Base the sandbox's name on the name of the file in which the
+ function was defined."""
+
+ filename = self.func.__code__.co_filename
+ return os.path.splitext(os.path.basename(filename))[0]
+
+ def run(self, sandbox):
+ result = self.func(sandbox)
+ sandbox.verify(skip_cross_check = self.skip_cross_check)
+ return result
+
+
+class _XFail(TestCase):
+ """A test that is expected to fail, if its condition is true."""
+
+ _result_map = {
+ RESULT_OK: (1, TextColors.failure('XPASS:'), False),
+ RESULT_FAIL: (0, TextColors.success('XFAIL:'), True),
+ RESULT_SKIP: (2, TextColors.success('SKIP: '), True),
+ }
+
+ def __init__(self, test_case, cond_func=lambda: True, wip=None,
+ issues=None):
+ """Create an XFail instance based on TEST_CASE. COND_FUNC is a
+ callable that is evaluated at test run time and should return a
+ boolean value. If COND_FUNC returns true, then TEST_CASE is
+ expected to fail (and a pass is considered an error); otherwise,
+ TEST_CASE is run normally. The evaluation of COND_FUNC is
+ deferred so that it can base its decision on useful bits of
+ information that are not available at __init__ time (like the fact
+ that we're running over a particular RA layer).
+
+ WIP is ...
+
+ ISSUES is an issue number (or a list of issue numbers) tracking this."""
+
+ TestCase.__init__(self, create_test_case(test_case), cond_func, wip=wip,
+ issues=issues)
+
+ def list_mode(self):
+ # basically, the only possible delegate is a Skip test. favor that mode.
+ return self._delegate.list_mode() or 'XFAIL'
+
+
+class _Wimp(_XFail):
+ """Like XFail, but indicates a work-in-progress: an unexpected pass
+ is not considered a test failure."""
+
+ _result_map = {
+ RESULT_OK: (0, TextColors.success('XPASS:'), True),
+ RESULT_FAIL: (0, TextColors.success('XFAIL:'), True),
+ RESULT_SKIP: (2, TextColors.success('SKIP: '), True),
+ }
+
+ def __init__(self, wip, test_case, cond_func=lambda: True):
+ _XFail.__init__(self, test_case, cond_func, wip)
+
+
+class _Skip(TestCase):
+ """A test that will be skipped if its conditional is true."""
+
+ def __init__(self, test_case, cond_func=lambda: True, issues=None):
+ """Create a Skip instance based on TEST_CASE. COND_FUNC is a
+ callable that is evaluated at test run time and should return a
+ boolean value. If COND_FUNC returns true, then TEST_CASE is
+ skipped; otherwise, TEST_CASE is run normally.
+ The evaluation of COND_FUNC is deferred so that it can base its
+ decision on useful bits of information that are not available at
+ __init__ time (like the fact that we're running over a
+ particular RA layer)."""
+
+ TestCase.__init__(self, create_test_case(test_case), cond_func,
+ issues=issues)
+
+ def list_mode(self):
+ if self._cond_func():
+ return 'SKIP'
+ return self._delegate.list_mode()
+
+ def get_sandbox_name(self):
+ if self._cond_func():
+ return None
+ return self._delegate.get_sandbox_name()
+
+ def run(self, sandbox):
+ if self._cond_func():
+ raise svntest.Skip
+ return self._delegate.run(sandbox)
+
+
+class _SkipUnless(_Skip):
+ """A test that will be skipped if its conditional is false."""
+
+ def __init__(self, test_case, cond_func):
+ _Skip.__init__(self, test_case, lambda c=cond_func: not c())
+
+
+class _SkipDumpLoadCrossCheck(TestCase):
+ """A test that will skip the post-test dump/load cross-check."""
+
+ def __init__(self, test_case, cond_func=lambda: True, wip=None,
+ issues=None):
+ TestCase.__init__(self,
+ create_test_case(test_case, skip_cross_check=True),
+ cond_func, wip=wip, issues=issues)
+
+
+def create_test_case(func, issues=None, skip_cross_check=False):
+ if isinstance(func, TestCase):
+ return func
+ else:
+ return FunctionTestCase(func, issues=issues,
+ skip_cross_check=skip_cross_check)
+
+
+# Various decorators to make declaring tests as such simpler
+def XFail_deco(cond_func = lambda: True):
+ def _second(func):
+ if isinstance(func, TestCase):
+ return _XFail(func, cond_func, issues=func.issues)
+ else:
+ return _XFail(func, cond_func)
+
+ return _second
+
+
+def Wimp_deco(wip, cond_func = lambda: True):
+ def _second(func):
+ if isinstance(func, TestCase):
+ return _Wimp(wip, func, cond_func, issues=func.issues)
+ else:
+ return _Wimp(wip, func, cond_func)
+
+ return _second
+
+
+def Skip_deco(cond_func = lambda: True):
+ def _second(func):
+ if isinstance(func, TestCase):
+ return _Skip(func, cond_func, issues=func.issues)
+ else:
+ return _Skip(func, cond_func)
+
+ return _second
+
+
+def SkipUnless_deco(cond_func):
+ def _second(func):
+ if isinstance(func, TestCase):
+ return _Skip(func, lambda c=cond_func: not c(), issues=func.issues)
+ else:
+ return _Skip(func, lambda c=cond_func: not c())
+
+ return _second
+
+
+def Issues_deco(*issues):
+ def _second(func):
+ if isinstance(func, TestCase):
+ # if the wrapped thing is already a test case, just set the issues
+ func.set_issues(issues)
+ return func
+
+ else:
+ # we need to wrap the function
+ return create_test_case(func, issues=issues)
+
+ return _second
+
+def SkipDumpLoadCrossCheck_deco(cond_func = lambda: True):
+ def _second(func):
+ if isinstance(func, TestCase):
+ return _SkipDumpLoadCrossCheck(func, cond_func, issues=func.issues)
+ else:
+ return _SkipDumpLoadCrossCheck(func, cond_func)
+
+ return _second
+
+
+# Create a singular alias, for linguistic correctness
+Issue_deco = Issues_deco
diff --git a/subversion/tests/cmdline/svntest/tree.py b/subversion/tests/cmdline/svntest/tree.py
new file mode 100644
index 0000000..6c34238
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/tree.py
@@ -0,0 +1,881 @@
+#
+# tree.py: tools for comparing directory trees
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import re
+import os
+import sys
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+from xml.dom.minidom import parseString
+import base64
+import logging
+
+import svntest
+
+logger = logging.getLogger()
+
+# Tree Exceptions.
+
+# All tree exceptions should inherit from SVNTreeError
+class SVNTreeError(svntest.Failure):
+ "Exception raised if you screw up in the tree module."
+ pass
+
+class SVNTreeUnequal(SVNTreeError):
+ "Exception raised if two trees are unequal."
+ pass
+
+class SVNTypeMismatch(SVNTreeError):
+ "Exception raised if one node is file and other is dir"
+ pass
+
+#========================================================================
+
+# ===> Overview of our Datastructures <===
+
+# The general idea here is that many, many things can be represented by
+# a tree structure:
+
+# - a working copy's structure and contents
+# - the output of 'svn status'
+# - the output of 'svn checkout/update'
+# - the output of 'svn commit'
+
+# The idea is that a test function creates a "expected" tree of some
+# kind, and is then able to compare it to an "actual" tree that comes
+# from running the Subversion client. This is what makes a test
+# automated; if an actual and expected tree match exactly, then the test
+# has passed. (See compare_trees() below.)
+
+# The SVNTreeNode class is the fundamental data type used to build tree
+# structures. The class contains a method for "dropping" a new node
+# into an ever-growing tree structure. (See also create_from_path()).
+
+# We have four parsers in this file for the four use cases listed above:
+# each parser examines some kind of input and returns a tree of
+# SVNTreeNode objects. (See build_tree_from_checkout(),
+# build_tree_from_commit(), build_tree_from_status(), and
+# build_tree_from_wc()). These trees are the "actual" trees that result
+# from running the Subversion client.
+
+# Also necessary, of course, is a convenient way for a test to create an
+# "expected" tree. The test *could* manually construct and link a bunch
+# of SVNTreeNodes, certainly. But instead, all the tests are using the
+# build_generic_tree() routine instead.
+
+# build_generic_tree() takes a specially-formatted list of lists as
+# input, and returns a tree of SVNTreeNodes. The list of lists has this
+# structure:
+
+# [ ['/full/path/to/item', 'text contents', {prop-hash}, {att-hash}],
+# [...],
+# [...],
+# ... ]
+
+# You can see that each item in the list essentially defines an
+# SVNTreeNode. build_generic_tree() instantiates a SVNTreeNode for each
+# item, and then drops it into a tree by parsing each item's full path.
+
+# So a typical test routine spends most of its time preparing lists of
+# this format and sending them to build_generic_tree(), rather than
+# building the "expected" trees directly.
+
+# ### Note: in the future, we'd like to remove this extra layer of
+# ### abstraction. We'd like the SVNTreeNode class to be more
+# ### directly programmer-friendly, providing a number of accessor
+# ### routines, so that tests can construct trees directly.
+
+# The first three fields of each list-item are self-explanatory. It's
+# the fourth field, the "attribute" hash, that needs some explanation.
+# The att-hash is used to place extra information about the node itself,
+# depending on the parsing context:
+
+# - in the 'svn co/up' use-case, each line of output starts with two
+# characters from the set of (A, D, G, U, C, _) or 'Restored'. The
+# status code is stored in a attribute named 'status'. In the case
+# of a restored file, the word 'Restored' is stored in an attribute
+# named 'verb'.
+
+# - in the 'svn ci/im' use-case, each line of output starts with one
+# of the words (Adding, Deleting, Sending). This verb is stored in
+# an attribute named 'verb'.
+
+# - in the 'svn status' use-case (which is always run with the -v
+# (--verbose) flag), each line of output contains a working revision
+# number and a two-letter status code similar to the 'svn co/up'
+# case. This information is stored in attributes named 'wc_rev'
+# and 'status'. The repository revision is also printed, but it
+# is ignored.
+
+# - in the working-copy use-case, the att-hash is ignored.
+
+
+# Finally, one last explanation: the file 'actions.py' contain a number
+# of helper routines named 'run_and_verify_FOO'. These routines take
+# one or more "expected" trees as input, then run some svn subcommand,
+# then push the output through an appropriate parser to derive an
+# "actual" tree. Then it runs compare_trees() and raises an exception
+# on failure. This is why most tests typically end with a call to
+# run_and_verify_FOO().
+
+
+
+#========================================================================
+
+# A node in a tree.
+#
+# If CHILDREN is None, then the node is a file. Otherwise, CHILDREN
+# is a list of the nodes making up that directory's children.
+#
+# NAME is simply the name of the file or directory. CONTENTS is a
+# string that contains the file's contents (if a file), PROPS are
+# properties attached to files or dirs, and ATTS is a dictionary of
+# other metadata attached to the node.
+
+class SVNTreeNode:
+
+ def __init__(self, name, children=None, contents=None, props={}, atts={}):
+ self.name = name
+ self.children = children
+ self.contents = contents
+ self.props = props
+ self.atts = atts
+ self.path = name
+
+# TODO: Check to make sure contents and children are mutually exclusive
+
+ def add_child(self, newchild):
+ child_already_exists = 0
+ if self.children is None: # if you're a file,
+ self.children = [] # become an empty dir.
+ else:
+ for a in self.children:
+ if a.name == newchild.name:
+ child_already_exists = 1
+ break
+
+ if child_already_exists:
+ if newchild.children is None:
+ # this is the 'end' of the chain, so copy any content here.
+ a.contents = newchild.contents
+ a.props = newchild.props
+ a.atts = newchild.atts
+ a.path = os.path.join(self.path, newchild.name)
+ else:
+ # try to add dangling children to your matching node
+ for i in newchild.children:
+ a.add_child(i)
+ else:
+ self.children.append(newchild)
+ newchild.path = os.path.join(self.path, newchild.name)
+
+
+ def pprint(self, stream = sys.stdout):
+ "Pretty-print the meta data for this node to STREAM."
+ stream.write(" * Node name: %s\n" % self.name)
+ stream.write(" Path: %s\n" % self.path)
+ mime_type = self.props.get("svn:mime-type")
+ if not mime_type or mime_type.startswith("text/"):
+ if self.children is not None:
+ stream.write(" Contents: N/A (node is a directory)\n")
+ else:
+ stream.write(" Contents: %s\n" % self.contents)
+ else:
+ stream.write(" Contents: %d bytes (binary)\n" % len(self.contents))
+ stream.write(" Properties: %s\n" % self.props)
+ stream.write(" Attributes: %s\n" % self.atts)
+ ### FIXME: I'd like to be able to tell the difference between
+ ### self.children is None (file) and self.children == [] (empty
+ ### directory), but it seems that most places that construct
+ ### SVNTreeNode objects don't even try to do that. --xbc
+ ###
+ ### See issue #1611 about this problem. -kfogel
+ if self.children is not None:
+ stream.write(" Children: %s\n" % len(self.children))
+ else:
+ stream.write(" Children: None (node is probably a file)\n")
+ stream.flush()
+
+ def get_printable_path(self):
+ """Remove some occurrences of root_node_name = "__SVN_ROOT_NODE",
+ it is in the way when matching for a subtree, and looks bad."""
+ path = self.path
+ if path.startswith(root_node_name + os.sep):
+ path = path[len(root_node_name + os.sep):]
+ return path
+
+ def print_script(self, stream = sys.stdout, subtree = "", prepend="\n ",
+ drop_empties = True):
+ """Python-script-print the meta data for this node to STREAM.
+ Print only those nodes whose path string starts with the string SUBTREE,
+ and print only the part of the path string that remains after SUBTREE.
+ PREPEND is a string prepended to each node printout (does the line
+ feed if desired, don't include a comma in PREPEND).
+ If DROP_EMPTIES is true, all dir nodes that have no data set in them
+ (no props, no atts) and that have children (so they are included
+ implicitly anyway) are not printed.
+ Return 1 if this node was printed, 0 otherwise (added up by
+ dump_tree_script())"""
+
+ # figure out if this node would be obsolete to print.
+ if drop_empties and len(self.props) < 1 and len(self.atts) < 1 and \
+ self.contents is None and self.children is not None:
+ return 0
+
+ path = self.get_printable_path()
+
+ # remove the subtree path, skip this node if necessary.
+ if path.startswith(subtree):
+ path = path[len(subtree):]
+ elif path + os.sep == subtree:
+ # Many callers set subtree to 'some-path' + os.sep. Don't skip the
+ # root node in that case.
+ path = ''
+ else:
+ return 0
+
+ if path.startswith(os.sep):
+ path = path[1:]
+
+ line = prepend
+ line += "%-20s: Item(" % ("'%s'" % path.replace(os.sep, '/'))
+ comma = False
+
+ mime_type = self.props.get("svn:mime-type")
+ if not mime_type or mime_type.startswith("text/"):
+ if self.contents is not None:
+ # Escape some characters for nicer script and readability.
+ # (This is error output. I guess speed is no consideration here.)
+ line += "contents=\"%s\"" % (self.contents
+ .replace('\n','\\n')
+ .replace('"','\\"')
+ .replace('\r','\\r')
+ .replace('\t','\\t'))
+ comma = True
+ else:
+ line += 'content is binary data'
+ comma = True
+
+ if self.props:
+ if comma:
+ line += ", "
+ line += ("props=%s" % self.props)
+ comma = True
+
+ for name in self.atts:
+ if comma:
+ line += ", "
+ line += "%s='%s'" % (name, self.atts[name])
+ comma = True
+
+ line += "),"
+ stream.write("%s" % line)
+ stream.flush()
+ return 1
+
+
+ def __str__(self):
+ s = StringIO()
+ self.pprint(s)
+ return s.getvalue()
+
+
+ def __cmp__(self, other):
+ """Define a simple ordering of two nodes without regard to their full
+ path (i.e. position in the tree). This can be used for sorting the
+ children within a directory."""
+ return cmp(self.name, other.name)
+
+ def as_state(self, prefix=None):
+ """Return an svntest.wc.State instance that is equivalent to this tree."""
+ root = self
+ if self.path == root_node_name:
+ assert prefix is None
+ wc_dir = ''
+ while True:
+ if root is not self: # don't prepend ROOT_NODE_NAME
+ wc_dir = os.path.join(wc_dir, root.name)
+ if root.contents or root.props or root.atts:
+ break
+ if not root.children or len(root.children) != 1:
+ break
+ root = root.children[0]
+ state = svntest.wc.State(wc_dir, { })
+ if root.contents or root.props or root.atts:
+ state.add({'': root.as_item()})
+ prefix = wc_dir
+ else:
+ assert prefix is not None
+
+ path = self.path
+ if path.startswith(root_node_name):
+ path = path[len(root_node_name)+1:]
+ # prefix should only be set on a recursion, which means a child,
+ # which means this path better not be the same as the prefix.
+ assert path != prefix, 'not processing a child of the root'
+ l = len(prefix)
+ if l > 0:
+ assert path[:l] == prefix, \
+ '"%s" is not a prefix of "%s"' % (prefix, path)
+ # return the portion after the separator
+ path = path[l+1:].replace(os.sep, '/')
+
+ state = svntest.wc.State('', {
+ path: self.as_item()
+ })
+
+ if root.children:
+ for child in root.children:
+ state.add_state('', child.as_state(prefix))
+
+ return state
+
+ def as_item(self):
+ return svntest.wc.StateItem(self.contents,
+ self.props,
+ self.atts.get('status'),
+ self.atts.get('verb'),
+ self.atts.get('wc_rev'),
+ self.atts.get('locked'),
+ self.atts.get('copied'),
+ self.atts.get('switched'),
+ self.atts.get('writelocked'),
+ self.atts.get('treeconflict'))
+
+ def recurse(self, function):
+ results = []
+ results += [ function(self) ]
+ if self.children:
+ for child in self.children:
+ results += child.recurse(function)
+ return results
+
+ def find_node(self, path):
+ if self.get_printable_path() == path:
+ return self
+ if self.children:
+ for child in self.children:
+ result = child.find_node(path)
+ if result:
+ return result
+ return None
+
+# reserved name of the root of the tree
+root_node_name = "__SVN_ROOT_NODE"
+
+
+# helper func
+def add_elements_as_path(top_node, element_list):
+ """Add the elements in ELEMENT_LIST as if they were a single path
+ below TOP_NODE."""
+
+ # The idea of this function is to take a list like so:
+ # ['A', 'B', 'C'] and a top node, say 'Z', and generate a tree
+ # like this:
+ #
+ # Z -> A -> B -> C
+ #
+ # where 1 -> 2 means 2 is a child of 1.
+ #
+
+ prev_node = top_node
+ for i in element_list:
+ new_node = SVNTreeNode(i, None)
+ prev_node.add_child(new_node)
+ prev_node = new_node
+
+
+# Helper for compare_trees
+def compare_file_nodes(a, b):
+ """Compare two nodes, A (actual) and B (expected). Compare their names,
+ contents, properties and attributes, ignoring children. Return 0 if the
+ same, 1 otherwise."""
+ if a.name != b.name:
+ return 1
+ if a.contents != b.contents:
+ return 1
+ if a.props != b.props:
+ return 1
+ if a.atts == b.atts:
+ # No fixes necessary
+ return 0
+
+ # Fix a pre-WC-NG assumptions in our testsuite
+ if (b.atts == {'status': 'A ', 'wc_rev': '0'}) \
+ and (a.atts == {'status': 'A ', 'wc_rev': '-'}):
+ return 0
+ return 1
+
+# Helper for compare_trees
+def compare_dir_nodes(a, b):
+ """Compare two nodes, A (actual) and B (expected). Compare their names,
+ properties and attributes, ignoring children. Return 0 if the
+ same, 1 otherwise."""
+ if a.name != b.name:
+ return 1
+ if (a.props != b.props):
+ return 1
+ if (a.atts == b.atts):
+ # No fixes necessary
+ return 0
+
+ # Fix a pre-WC-NG assumptions in our testsuite
+ if (b.atts == {'status': 'A ', 'wc_rev': '0'}) \
+ and (a.atts == {'status': 'A ', 'wc_rev': '-'}):
+ return 0
+ return 1
+
+
+# Internal utility used by most build_tree_from_foo() routines.
+#
+# (Take the output and .add_child() it to a root node.)
+
+def create_from_path(path, contents=None, props={}, atts={}):
+ """Create and return a linked list of treenodes, given a PATH
+ representing a single entry into that tree. CONTENTS and PROPS are
+ optional arguments that will be deposited in the tail node."""
+
+ # get a list of all the names in the path
+ # each of these will be a child of the former
+ if os.sep != "/":
+ path = path.replace(os.sep, "/")
+ elements = path.split("/")
+ if len(elements) == 0:
+ ### we should raise a less generic error here. which?
+ raise SVNTreeError
+
+ root_node = None
+
+ # if this is Windows: if the path contains a drive name (X:), make it
+ # the root node.
+ if os.name == 'nt':
+ m = re.match("([a-zA-Z]:)(.+)", elements[0])
+ if m:
+ root_node = SVNTreeNode(m.group(1), None)
+ elements[0] = m.group(2)
+ add_elements_as_path(root_node, elements[0:])
+
+ if not root_node:
+ root_node = SVNTreeNode(elements[0], None)
+ add_elements_as_path(root_node, elements[1:])
+
+ # deposit contents in the very last node.
+ node = root_node
+ while True:
+ if node.children is None:
+ node.contents = contents
+ node.props = props
+ node.atts = atts
+ break
+ node = node.children[0]
+
+ return root_node
+
+
+eol_re = re.compile(r'(\r\n|\r)')
+eol_re_binary = re.compile(br'(\r\n|\r)')
+
+# helper for build_tree_from_wc()
+def get_props(paths):
+ """Return a hash of hashes of props for PATHS, using the svn client. Convert
+ each embedded end-of-line to a single LF character."""
+
+ # It's not kosher to look inside .svn/ and try to read the internal
+ # property storage format. Instead, we use 'svn proplist'. After
+ # all, this is the only way the user can retrieve them, so we're
+ # respecting the black-box paradigm.
+
+ files = {}
+ exit_code, output, errput = svntest.main.run_svn(1,
+ "proplist",
+ "--verbose",
+ "--xml",
+ *paths)
+
+ output = (line for line in output if not line.startswith('DBG:'))
+ dom = parseString(''.join(output))
+ target_nodes = dom.getElementsByTagName('target')
+ for target_node in target_nodes:
+ filename = target_node.attributes['path'].nodeValue
+ file_props = {}
+ for property_node in target_node.getElementsByTagName('property'):
+ name = property_node.attributes['name'].nodeValue
+ if property_node.hasChildNodes():
+ text_node = property_node.firstChild
+ value = text_node.nodeValue
+ else:
+ value = ''
+ try:
+ encoding = property_node.attributes['encoding'].nodeValue
+ if encoding == 'base64':
+ value = base64.b64decode(value)
+ else:
+ raise Exception("Unknown encoding '%s' for file '%s' property '%s'"
+ % (encoding, filename, name,))
+ except KeyError:
+ pass
+ # If the property value contained a CR, or if under Windows an
+ # "svn:*" property contains a newline, then the XML output
+ # contains a CR character XML-encoded as '&#13;'. The XML
+ # parser converts it back into a CR character. So again convert
+ # all end-of-line variants into a single LF:
+ if isinstance(value, str):
+ value = eol_re.sub('\n', value)
+ else:
+ value = eol_re_binary.sub(b'\n', value)
+ file_props[name] = value
+ files[filename] = file_props
+
+ dom.unlink()
+ return files
+
+
+### ridiculous function. callers should do this one line themselves.
+def get_text(path):
+ "Return a string with the textual contents of a file at PATH."
+
+ # sanity check
+ if not os.path.isfile(path):
+ return None
+
+ return open(path, 'r').read()
+
+
+def get_child(node, name):
+ """If SVNTreeNode NODE contains a child named NAME, return child;
+ else, return None. If SVNTreeNode is not a directory, exit completely."""
+ if node.children == None:
+ logger.error("Foolish call to get_child.")
+ sys.exit(1)
+ for n in node.children:
+ if name == n.name:
+ return n
+ return None
+
+
+# Helper for compare_trees
+def default_singleton_handler(node, description):
+ """Print SVNTreeNode NODE's name, describing it with the string
+ DESCRIPTION, then raise SVNTreeUnequal."""
+ logger.warn("Couldn't find node '%s' in %s tree" % (node.name, description))
+ logger.warn(str(node))
+ raise SVNTreeUnequal
+
+# A test helper function implementing the singleton_handler_a API.
+def detect_conflict_files(node, extra_files):
+ """NODE has been discovered, an extra file on disk. Verify that it
+ matches one of the regular expressions in the EXTRA_FILES list. If
+ it matches, remove the match from the list. If it doesn't match,
+ raise an exception."""
+
+ for pattern in extra_files:
+ mo = re.match(pattern, node.name)
+ if mo:
+ extra_files.pop(extra_files.index(pattern)) # delete pattern from list
+ break
+ else:
+ msg = "Encountered unexpected disk path '" + node.name + "'"
+ logger.warn(msg)
+ logger.warn(str(node))
+ raise SVNTreeUnequal(msg)
+
+def detect_conflict_files_done(extra_files):
+ """Done handler for detect_conflict_files"""
+ if len(extra_files):
+ raise SVNTreeError("Not all extra reject files have been accounted for")
+
+###########################################################################
+###########################################################################
+# EXPORTED ROUTINES ARE BELOW
+
+
+# Main tree comparison routine!
+
+def compare_trees(label,
+ a, b,
+ singleton_handler_a = None,
+ a_baton = None,
+ singleton_handler_b = None,
+ b_baton = None):
+ """Compare SVNTreeNodes A (actual) and B (expected), expressing
+ differences using FUNC_A and FUNC_B. FUNC_A and FUNC_B are
+ functions of two arguments (a SVNTreeNode and a context baton), and
+ may raise exception SVNTreeUnequal, in which case they use the
+ string LABEL to describe the error (their return value is ignored).
+ LABEL is typically "output", "disk", "status", or some other word
+ that labels the trees being compared.
+
+ If A and B are both files, then return if their contents,
+ properties, and names are all the same; else raise a SVNTreeUnequal.
+ If A is a file and B is a directory, raise a SVNTreeUnequal; same
+ vice-versa. If both are directories, then for each entry that
+ exists in both, call compare_trees on the two entries; otherwise, if
+ the entry exists only in A, invoke FUNC_A on it, and likewise for
+ B with FUNC_B."""
+
+ def display_nodes(a, b):
+ 'Display two nodes, expected and actual.'
+ o = StringIO()
+ o.write("=============================================================\n")
+ o.write("Expected '%s' and actual '%s' in %s tree are different!\n"
+ % (b.name, a.name, label))
+ o.write("=============================================================\n")
+ o.write("EXPECTED NODE TO BE:\n")
+ o.write("=============================================================\n")
+ b.pprint(o)
+ o.write("=============================================================\n")
+ o.write("ACTUAL NODE FOUND:\n")
+ o.write("=============================================================\n")
+ a.pprint(o)
+ logger.warn(o.getvalue())
+ o.close()
+
+ # Setup singleton handlers
+ if singleton_handler_a is None:
+ singleton_handler_a = default_singleton_handler
+ a_baton = "expected " + label
+ if singleton_handler_b is None:
+ singleton_handler_b = default_singleton_handler
+ b_baton = "actual " + label
+
+ try:
+ # A and B are both files.
+ if (a.children is None) and (b.children is None):
+ if compare_file_nodes(a, b):
+ display_nodes(a, b)
+ raise SVNTreeUnequal
+ # One is a file, one is a directory.
+ elif (((a.children is None) and (b.children is not None))
+ or ((a.children is not None) and (b.children is None))):
+ display_nodes(a, b)
+ raise SVNTypeMismatch
+ # They're both directories.
+ else:
+ if compare_dir_nodes(a, b):
+ display_nodes(a, b)
+ raise SVNTreeUnequal
+
+ accounted_for = []
+ # For each child of A, check and see if it's in B. If so, run
+ # compare_trees on the two children and add b's child to
+ # accounted_for. If not, run FUNC_A on the child. Next, for each
+ # child of B, check and see if it's in accounted_for. If it is,
+ # do nothing. If not, run FUNC_B on it.
+ for a_child in a.children:
+ b_child = get_child(b, a_child.name)
+ if b_child:
+ accounted_for.append(b_child)
+ compare_trees(label, a_child, b_child,
+ singleton_handler_a, a_baton,
+ singleton_handler_b, b_baton)
+ else:
+ singleton_handler_a(a_child, a_baton)
+ for b_child in b.children:
+ if b_child not in accounted_for:
+ singleton_handler_b(b_child, b_baton)
+ except SVNTypeMismatch:
+ logger.warn('Unequal Types: one Node is a file, the other is a directory')
+ raise SVNTreeUnequal
+ except IndexError:
+ logger.warn("Error: unequal number of children")
+ raise SVNTreeUnequal
+ except SVNTreeUnequal:
+ if a.name != root_node_name:
+ logger.warn("Unequal at node %s" % a.name)
+ raise
+
+
+
+# Visually show a tree's structure
+
+def _dump_tree(n,indent="",stream=sys.stdout):
+ """Print out a nice representation of the structure of the tree in
+ the SVNTreeNode N. Prefix each line with the string INDENT."""
+
+ # Code partially stolen from Dave Beazley
+ tmp_children = sorted(n.children or [], key=SVNTreeNode.get_printable_path)
+
+ if n.name == root_node_name:
+ stream.write("%s%s\n" % (indent, "ROOT"))
+ else:
+ stream.write("%s%s\n" % (indent, n.name))
+
+ indent = indent.replace("-", " ")
+ indent = indent.replace("+", " ")
+ for i in range(len(tmp_children)):
+ c = tmp_children[i]
+ if i == len(tmp_children)-1:
+ _dump_tree(c,indent + " +-- ",stream)
+ else:
+ _dump_tree(c,indent + " |-- ",stream)
+
+
+def dump_tree(n):
+ output = StringIO()
+ _dump_tree(n,stream=output)
+ logger.warn(output.getvalue())
+ output.close()
+
+
+def dump_tree_script__crawler(n, subtree="", stream=sys.stdout):
+ "Helper for dump_tree_script. See that comment."
+ count = 0
+
+ # skip printing the root node.
+ if n.name != root_node_name:
+ count += n.print_script(stream, subtree)
+
+ for child in n.children or []:
+ count += dump_tree_script__crawler(child, subtree, stream)
+
+ return count
+
+
+def dump_tree_script(n, subtree="", stream=sys.stdout, wc_varname='wc_dir'):
+ """Print out a python script representation of the structure of the tree
+ in the SVNTreeNode N. Print only those nodes whose path string starts
+ with the string SUBTREE, and print only the part of the path string
+ that remains after SUBTREE.
+ The result is printed to STREAM.
+ The WC_VARNAME is inserted in the svntest.wc.State(wc_dir,{}) call
+ that is printed out (this is used by factory.py)."""
+
+ stream.write("svntest.wc.State(" + wc_varname + ", {")
+ count = dump_tree_script__crawler(n, subtree, stream)
+ if count > 0:
+ stream.write('\n')
+ stream.write("})")
+
+
+###################################################################
+###################################################################
+# PARSERS that return trees made of SVNTreeNodes....
+
+
+###################################################################
+# Build an "expected" static tree from a list of lists
+
+
+# Create a list of lists, of the form:
+#
+# [ [path, contents, props, atts], ... ]
+#
+# and run it through this parser. PATH is a string, a path to the
+# object. CONTENTS is either a string or None, and PROPS and ATTS are
+# populated dictionaries or {}. Each CONTENTS/PROPS/ATTS will be
+# attached to the basename-node of the associated PATH.
+
+def build_generic_tree(nodelist):
+ "Given a list of lists of a specific format, return a tree."
+
+ root = SVNTreeNode(root_node_name)
+
+ for list in nodelist:
+ new_branch = create_from_path(list[0], list[1], list[2], list[3])
+ root.add_child(new_branch)
+
+ return root
+
+
+####################################################################
+# Build trees from different kinds of subcommand output.
+
+
+# Parse co/up output into a tree.
+#
+# Tree nodes will contain no contents, a 'status' att, and a
+# 'treeconflict' att.
+
+def build_tree_from_checkout(lines, include_skipped=True):
+ "Return a tree derived by parsing the output LINES from 'co' or 'up'."
+
+ return svntest.wc.State.from_checkout(lines, include_skipped).old_tree()
+
+
+# Parse ci/im output into a tree.
+#
+# Tree nodes will contain no contents, and only one 'verb' att.
+
+def build_tree_from_commit(lines):
+ "Return a tree derived by parsing the output LINES from 'ci' or 'im'."
+
+ return svntest.wc.State.from_commit(lines).old_tree()
+
+
+# Parse status output into a tree.
+#
+# Tree nodes will contain no contents, and these atts:
+#
+# 'status', 'wc_rev',
+# ... and possibly 'locked', 'copied', 'switched',
+# 'writelocked' and 'treeconflict',
+# IFF columns non-empty.
+#
+
+def build_tree_from_status(lines):
+ "Return a tree derived by parsing the output LINES from 'st -vuq'."
+
+ return svntest.wc.State.from_status(lines).old_tree()
+
+
+# Parse merge "skipped" output
+
+def build_tree_from_skipped(lines):
+
+ return svntest.wc.State.from_skipped(lines).old_tree()
+
+
+def build_tree_from_diff_summarize(lines):
+ "Build a tree from output of diff --summarize"
+
+ return svntest.wc.State.from_summarize(lines).old_tree()
+
+
+####################################################################
+# Build trees by looking at the working copy
+
+
+# The reason the 'load_props' flag is off by default is because it
+# creates a drastic slowdown -- we spawn a new 'svn proplist'
+# process for every file and dir in the working copy!
+
+
+def build_tree_from_wc(wc_path, load_props=0, ignore_svn=1, keep_eol_style=False):
+ """Takes WC_PATH as the path to a working copy. Walks the tree below
+ that path, and creates the tree based on the actual found
+ files. If IGNORE_SVN is true, then exclude SVN admin dirs from the tree.
+ If LOAD_PROPS is true, the props will be added to the tree.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+ """
+
+ return svntest.wc.State.from_wc(wc_path, load_props, ignore_svn,
+ keep_eol_style).old_tree()
diff --git a/subversion/tests/cmdline/svntest/verify.py b/subversion/tests/cmdline/svntest/verify.py
new file mode 100644
index 0000000..904a044
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/verify.py
@@ -0,0 +1,960 @@
+#
+# verify.py: routines that handle comparison and display of expected
+# vs. actual output
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import re, sys
+from difflib import unified_diff, ndiff
+import pprint
+import logging
+
+import svntest
+
+logger = logging.getLogger()
+
+
+######################################################################
+# Exception types
+
+class SVNUnexpectedOutput(svntest.Failure):
+ """Exception raised if an invocation of svn results in unexpected
+ output of any kind."""
+ pass
+
+class SVNUnexpectedStdout(SVNUnexpectedOutput):
+ """Exception raised if an invocation of svn results in unexpected
+ output on STDOUT."""
+ pass
+
+class SVNUnexpectedStderr(SVNUnexpectedOutput):
+ """Exception raised if an invocation of svn results in unexpected
+ output on STDERR."""
+ pass
+
+class SVNExpectedStdout(SVNUnexpectedOutput):
+ """Exception raised if an invocation of svn results in no output on
+ STDOUT when output was expected."""
+ pass
+
+class SVNExpectedStderr(SVNUnexpectedOutput):
+ """Exception raised if an invocation of svn results in no output on
+ STDERR when output was expected."""
+ pass
+
+class SVNUnexpectedExitCode(SVNUnexpectedOutput):
+ """Exception raised if an invocation of svn exits with a value other
+ than what was expected."""
+ pass
+
+class SVNIncorrectDatatype(SVNUnexpectedOutput):
+ """Exception raised if invalid input is passed to the
+ run_and_verify_* API"""
+ pass
+
+class SVNDumpParseError(svntest.Failure):
+ """Exception raised if parsing a dump file fails"""
+ pass
+
+
+######################################################################
+# Comparison of expected vs. actual output
+
+def createExpectedOutput(expected, output_type, match_all=True):
+ """Return EXPECTED, promoted to an ExpectedOutput instance if not
+ None. Raise SVNIncorrectDatatype if the data type of EXPECTED is
+ not handled."""
+ if isinstance(expected, list):
+ expected = ExpectedOutput(expected)
+ elif isinstance(expected, str):
+ expected = RegexOutput(expected, match_all)
+ elif isinstance(expected, int):
+ expected = RegexOutput(".*: E%d:.*" % expected, False)
+ elif expected is AnyOutput:
+ expected = AnyOutput()
+ elif expected is not None and not isinstance(expected, ExpectedOutput):
+ raise SVNIncorrectDatatype("Unexpected type for '%s' data" % output_type)
+ return expected
+
+class ExpectedOutput(object):
+ """Matches an ordered list of lines.
+
+ If MATCH_ALL is True, the expected lines must match all the actual
+ lines, one-to-one, in the same order. If MATCH_ALL is False, the
+ expected lines must match a subset of the actual lines, one-to-one,
+ in the same order, ignoring any other actual lines among the
+ matching ones.
+ """
+
+ def __init__(self, expected, match_all=True):
+ """Initialize the expected output to EXPECTED which is a string, or
+ a list of strings.
+
+ See also: svntest.verify.createExpectedOutput().
+ """
+ assert expected is not None
+ self.expected = expected
+ self.match_all = match_all
+
+ def __str__(self):
+ return str(self.expected)
+
+ def __cmp__(self, other):
+ raise TypeError("ExpectedOutput does not implement direct comparison; "
+ "see the 'matches()' method")
+
+ def matches(self, actual):
+ """Return whether SELF matches ACTUAL (which may be a list
+ of newline-terminated lines, or a single string).
+ """
+ assert actual is not None
+ expected = self.expected
+ if not isinstance(expected, list):
+ expected = [expected]
+ if not isinstance(actual, list):
+ actual = [actual]
+
+ if self.match_all:
+ return expected == actual
+
+ i_expected = 0
+ for actual_line in actual:
+ if expected[i_expected] == actual_line:
+ i_expected += 1
+ if i_expected == len(expected):
+ return True
+ return False
+
+ def display_differences(self, message, label, actual):
+ """Show the differences between the expected and ACTUAL lines. Print
+ MESSAGE unless it is None, the expected lines, the ACTUAL lines,
+ and a diff, all labeled with LABEL.
+ """
+ display_lines(message, self.expected, actual, label, label)
+ display_lines_diff(self.expected, actual, label, label)
+
+
+class AnyOutput(ExpectedOutput):
+ """Matches any non-empty output.
+ """
+
+ def __init__(self):
+ ExpectedOutput.__init__(self, [], False)
+
+ def matches(self, actual):
+ assert actual is not None
+
+ if len(actual) == 0:
+ # No actual output. No match.
+ return False
+
+ for line in actual:
+ # If any line has some text, then there is output, so we match.
+ if line:
+ return True
+
+ # We did not find a line with text. No match.
+ return False
+
+ def display_differences(self, message, label, actual):
+ if message:
+ logger.warn(message)
+
+
+class RegexOutput(ExpectedOutput):
+ """Matches a single regular expression.
+
+ If MATCH_ALL is true, every actual line must match the RE. If
+ MATCH_ALL is false, at least one actual line must match the RE. In
+ any case, there must be at least one line of actual output.
+ """
+
+ def __init__(self, expected, match_all=True):
+ "EXPECTED is a regular expression string."
+ assert isinstance(expected, str) or isinstance(expected, bytes)
+ ExpectedOutput.__init__(self, expected, match_all)
+ self.expected_re = re.compile(expected)
+
+ def matches(self, actual):
+ assert actual is not None
+
+ if not isinstance(actual, list):
+ actual = [actual]
+
+ # If a regex was provided assume that we require some actual output.
+ # Fail if we don't have any.
+ if len(actual) == 0:
+ return False
+
+ if self.match_all:
+ return all(self.expected_re.match(line) for line in actual)
+ else:
+ return any(self.expected_re.match(line) for line in actual)
+
+ def display_differences(self, message, label, actual):
+ display_lines(message, self.expected, actual, label + ' (regexp)', label)
+
+ def insert(self, index, line):
+ self.expected.insert(index, line)
+ self.expected_re = re.compile(self.expected)
+
+class RegexListOutput(ExpectedOutput):
+ """Matches an ordered list of regular expressions.
+
+ If MATCH_ALL is True, the expressions must match all the actual
+ lines, one-to-one, in the same order. If MATCH_ALL is False, the
+ expressions must match a subset of the actual lines, one-to-one, in
+ the same order, ignoring any other actual lines among the matching
+ ones.
+
+ In any case, there must be at least one line of actual output.
+ """
+
+ def __init__(self, expected, match_all=True):
+ "EXPECTED is a list of regular expression strings."
+ assert isinstance(expected, list)
+ ExpectedOutput.__init__(self, expected, match_all)
+ self.expected_res = [re.compile(e) for e in expected]
+
+ def matches(self, actual):
+ assert actual is not None
+ if not isinstance(actual, list):
+ actual = [actual]
+
+ if self.match_all:
+ return (len(self.expected_res) == len(actual) and
+ all(e.match(a) for e, a in zip(self.expected_res, actual)))
+
+ i_expected = 0
+ for actual_line in actual:
+ if self.expected_res[i_expected].match(actual_line):
+ i_expected += 1
+ if i_expected == len(self.expected_res):
+ return True
+ return False
+
+ def display_differences(self, message, label, actual):
+ display_lines(message, self.expected, actual, label + ' (regexp)', label)
+
+ def insert(self, index, line):
+ self.expected.insert(index, line)
+ self.expected_res = [re.compile(e) for e in self.expected]
+
+
+class UnorderedOutput(ExpectedOutput):
+ """Matches an unordered list of lines.
+
+ The expected lines must match all the actual lines, one-to-one, in
+ any order.
+ """
+
+ def __init__(self, expected):
+ assert isinstance(expected, list)
+ ExpectedOutput.__init__(self, expected)
+
+ def matches(self, actual):
+ if not isinstance(actual, list):
+ actual = [actual]
+
+ return sorted(self.expected) == sorted(actual)
+
+ def display_differences(self, message, label, actual):
+ display_lines(message, self.expected, actual, label + ' (unordered)', label)
+ display_lines_diff(self.expected, actual, label + ' (unordered)', label)
+
+
+class UnorderedRegexListOutput(ExpectedOutput):
+ """Matches an unordered list of regular expressions.
+
+ The expressions must match all the actual lines, one-to-one, in any
+ order.
+
+ Note: This can give a false negative result (no match) when there is
+ an actual line that matches multiple expressions and a different
+ actual line that matches some but not all of those same
+ expressions. The implementation matches each expression in turn to
+ the first unmatched actual line that it can match, and does not try
+ all the permutations when there are multiple possible matches.
+ """
+
+ def __init__(self, expected):
+ assert isinstance(expected, list)
+ ExpectedOutput.__init__(self, expected)
+
+ def matches(self, actual):
+ assert actual is not None
+ if not isinstance(actual, list):
+ actual = [actual]
+
+ if len(self.expected) != len(actual):
+ return False
+ for e in self.expected:
+ expect_re = re.compile(e)
+ for actual_line in actual:
+ if expect_re.match(actual_line):
+ actual.remove(actual_line)
+ break
+ else:
+ # One of the regexes was not found
+ return False
+ return True
+
+ def display_differences(self, message, label, actual):
+ display_lines(message, self.expected, actual,
+ label + ' (regexp) (unordered)', label)
+
+
+class AlternateOutput(ExpectedOutput):
+ """Matches any one of a list of ExpectedOutput instances.
+ """
+
+ def __init__(self, expected, match_all=True):
+ "EXPECTED is a list of ExpectedOutput instances."
+ assert isinstance(expected, list) and expected != []
+ assert all(isinstance(e, ExpectedOutput) for e in expected)
+ ExpectedOutput.__init__(self, expected)
+
+ def matches(self, actual):
+ assert actual is not None
+ for e in self.expected:
+ if e.matches(actual):
+ return True
+ return False
+
+ def display_differences(self, message, label, actual):
+ # For now, just display differences against the first alternative.
+ e = self.expected[0]
+ e.display_differences(message, label, actual)
+
+
+######################################################################
+# Displaying expected and actual output
+
+def display_trees(message, label, expected, actual):
+ 'Print two trees, expected and actual.'
+ if message is not None:
+ logger.warn(message)
+ if expected is not None:
+ logger.warn('EXPECTED %s:', label)
+ svntest.tree.dump_tree(expected)
+ if actual is not None:
+ logger.warn('ACTUAL %s:', label)
+ svntest.tree.dump_tree(actual)
+
+
+def display_lines_diff(expected, actual, expected_label, actual_label):
+ """Print a unified diff between EXPECTED (labeled with EXPECTED_LABEL)
+ and ACTUAL (labeled with ACTUAL_LABEL).
+ Each of EXPECTED and ACTUAL is a string or a list of strings.
+ """
+ if not isinstance(expected, list):
+ expected = [expected]
+ if not isinstance(actual, list):
+ actual = [actual]
+ logger.warn('DIFF ' + expected_label + ':')
+ for x in unified_diff(expected, actual,
+ fromfile='EXPECTED ' + expected_label,
+ tofile='ACTUAL ' + actual_label):
+ logger.warn('| ' + x.rstrip())
+
+def display_lines(message, expected, actual,
+ expected_label, actual_label=None):
+ """Print MESSAGE, unless it is None, then print EXPECTED (labeled
+ with EXPECTED_LABEL) followed by ACTUAL (labeled with ACTUAL_LABEL).
+ Each of EXPECTED and ACTUAL is a string or a list of strings.
+ """
+ if message is not None:
+ logger.warn(message)
+
+ if type(expected) is str:
+ expected = [expected]
+ if type(actual) is str:
+ actual = [actual]
+ if actual_label is None:
+ actual_label = expected_label
+ if expected is not None:
+ logger.warn('EXPECTED %s:', expected_label)
+ for x in expected:
+ logger.warn('| ' + x.rstrip())
+ if actual is not None:
+ logger.warn('ACTUAL %s:', actual_label)
+ for x in actual:
+ logger.warn('| ' + x.rstrip())
+
+def compare_and_display_lines(message, label, expected, actual,
+ raisable=None):
+ """Compare two sets of output lines, and print them if they differ,
+ preceded by MESSAGE iff not None. EXPECTED may be an instance of
+ ExpectedOutput (and if not, it is wrapped as such). ACTUAL may be a
+ list of newline-terminated lines, or a single string. RAISABLE is an
+ exception class, an instance of which is thrown if ACTUAL doesn't
+ match EXPECTED."""
+ if raisable is None:
+ raisable = svntest.main.SVNLineUnequal
+ ### It'd be nicer to use createExpectedOutput() here, but its
+ ### semantics don't match all current consumers of this function.
+ assert expected is not None
+ assert actual is not None
+ if not isinstance(expected, ExpectedOutput):
+ expected = ExpectedOutput(expected)
+
+ actual = svntest.main.ensure_list(actual)
+ if len(actual) > 0:
+ is_binary = not isinstance(actual[0], str)
+ actual = svntest.main.filter_dbg(actual, is_binary)
+
+ if not expected.matches(actual):
+ expected.display_differences(message, label, actual)
+ raise raisable
+
+def verify_outputs(message, actual_stdout, actual_stderr,
+ expected_stdout, expected_stderr, all_stdout=True):
+ """Compare and display expected vs. actual stderr and stdout lines:
+ if they don't match, print the difference (preceded by MESSAGE iff
+ not None) and raise an exception.
+
+ If EXPECTED_STDERR or EXPECTED_STDOUT is a string the string is
+ interpreted as a regular expression. For EXPECTED_STDOUT and
+ ACTUAL_STDOUT to match, every line in ACTUAL_STDOUT must match the
+ EXPECTED_STDOUT regex, unless ALL_STDOUT is false. For
+ EXPECTED_STDERR regexes only one line in ACTUAL_STDERR need match."""
+ expected_stderr = createExpectedOutput(expected_stderr, 'stderr', False)
+ expected_stdout = createExpectedOutput(expected_stdout, 'stdout', all_stdout)
+
+ for (actual, expected, label, raisable) in (
+ (actual_stderr, expected_stderr, 'STDERR', SVNExpectedStderr),
+ (actual_stdout, expected_stdout, 'STDOUT', SVNExpectedStdout)):
+ if expected is None:
+ continue
+
+ if isinstance(expected, RegexOutput):
+ raisable = svntest.main.SVNUnmatchedError
+ elif not isinstance(expected, AnyOutput):
+ raisable = svntest.main.SVNLineUnequal
+
+ compare_and_display_lines(message, label, expected, actual, raisable)
+
+def verify_exit_code(message, actual, expected,
+ raisable=SVNUnexpectedExitCode):
+ """Compare and display expected vs. actual exit codes:
+ if they don't match, print the difference (preceded by MESSAGE iff
+ not None) and raise an exception."""
+
+ if expected != actual:
+ display_lines(message, str(expected), str(actual), "Exit Code")
+ raise raisable
+
+# A simple dump file parser. While sufficient for the current
+# testsuite it doesn't cope with all valid dump files.
+class DumpParser:
+ def __init__(self, lines, ignore_sha1=False):
+ self.current = 0
+ self.lines = lines
+ self.parsed = {}
+ self.ignore_sha1 = ignore_sha1
+
+ def parse_line(self, regex, required=True):
+ m = re.match(regex, self.lines[self.current])
+ if not m:
+ if required:
+ raise SVNDumpParseError("expected '%s' at line %d\n%s"
+ "\nPrevious lines:\n%s"
+ % (regex, self.current,
+ self.lines[self.current],
+ ''.join(self.lines[max(0,self.current - 10):self.current])))
+ else:
+ return None
+ self.current += 1
+ return m.group(1)
+
+ def parse_blank(self, required=True):
+ if self.lines[self.current] != b'\n': # Works on Windows
+ if required:
+ raise SVNDumpParseError("expected blank at line %d\n%s"
+ % (self.current, self.lines[self.current]))
+ else:
+ return False
+ self.current += 1
+ return True
+
+ def parse_header(self, header):
+ regex = b'([^:]*): (.*)$'
+ m = re.match(regex, self.lines[self.current])
+ if not m:
+ raise SVNDumpParseError("expected a header at line %d, but found:\n%s"
+ % (self.current, self.lines[self.current]))
+ self.current += 1
+ return m.groups()
+
+ def parse_headers(self):
+ headers = []
+ while self.lines[self.current] != b'\n':
+ key, val = self.parse_header(self)
+ headers.append((key, val))
+ return headers
+
+
+ def parse_boolean(self, header, required):
+ return self.parse_line(header + b': (false|true)$', required)
+
+ def parse_format(self):
+ return self.parse_line(b'SVN-fs-dump-format-version: ([0-9]+)$')
+
+ def parse_uuid(self):
+ return self.parse_line(b'UUID: ([0-9a-z-]+)$')
+
+ def parse_revision(self):
+ return self.parse_line(b'Revision-number: ([0-9]+)$')
+
+ def parse_prop_delta(self):
+ return self.parse_line(b'Prop-delta: (false|true)$', required=False)
+
+ def parse_prop_length(self, required=True):
+ return self.parse_line(b'Prop-content-length: ([0-9]+)$', required)
+
+ def parse_content_length(self, required=True):
+ return self.parse_line(b'Content-length: ([0-9]+)$', required)
+
+ def parse_path(self):
+ path = self.parse_line(b'Node-path: (.*)$', required=False)
+ return path
+
+ def parse_kind(self):
+ return self.parse_line(b'Node-kind: (.+)$', required=False)
+
+ def parse_action(self):
+ return self.parse_line(b'Node-action: ([0-9a-z-]+)$')
+
+ def parse_copyfrom_rev(self):
+ return self.parse_line(b'Node-copyfrom-rev: ([0-9]+)$', required=False)
+
+ def parse_copyfrom_path(self):
+ path = self.parse_line(b'Node-copyfrom-path: (.+)$', required=False)
+ if not path and self.lines[self.current] == 'Node-copyfrom-path: \n':
+ self.current += 1
+ path = ''
+ return path
+
+ def parse_copy_md5(self):
+ return self.parse_line(b'Text-copy-source-md5: ([0-9a-z]+)$', required=False)
+
+ def parse_copy_sha1(self):
+ return self.parse_line(b'Text-copy-source-sha1: ([0-9a-z]+)$', required=False)
+
+ def parse_text_md5(self):
+ return self.parse_line(b'Text-content-md5: ([0-9a-z]+)$', required=False)
+
+ def parse_text_sha1(self):
+ return self.parse_line(b'Text-content-sha1: ([0-9a-z]+)$', required=False)
+
+ def parse_text_delta(self):
+ return self.parse_line(b'Text-delta: (false|true)$', required=False)
+
+ def parse_text_delta_base_md5(self):
+ return self.parse_line(b'Text-delta-base-md5: ([0-9a-f]+)$', required=False)
+
+ def parse_text_delta_base_sha1(self):
+ return self.parse_line(b'Text-delta-base-sha1: ([0-9a-f]+)$', required=False)
+
+ def parse_text_length(self):
+ return self.parse_line(b'Text-content-length: ([0-9]+)$', required=False)
+
+ def get_props(self):
+ props = []
+ while not re.match(b'PROPS-END$', self.lines[self.current]):
+ props.append(self.lines[self.current])
+ self.current += 1
+ self.current += 1
+
+ # Split into key/value pairs to do an unordered comparison.
+ # This parses the serialized hash under the assumption that it is valid.
+ prophash = {}
+ curprop = [0]
+ while curprop[0] < len(props):
+ def read_key_or_value(curprop):
+ # klen / vlen
+ klen = int(props[curprop[0]].split()[1])
+ curprop[0] += 1
+
+ # key / value
+ key = b''
+ while len(key) != klen + 1:
+ key += props[curprop[0]]
+ curprop[0] += 1
+ key = key[:-1]
+
+ return key
+
+ if props[curprop[0]].startswith(b'K'):
+ key = read_key_or_value(curprop)
+ value = read_key_or_value(curprop)
+ elif props[curprop[0]].startswith(b'D'):
+ key = read_key_or_value(curprop)
+ value = None
+ else:
+ raise
+ prophash[key] = value
+
+ return prophash
+
+ def get_content(self, length):
+ content = b''
+ while len(content) < length:
+ content += self.lines[self.current]
+ self.current += 1
+ if len(content) == length + 1:
+ content = content[:-1]
+ elif len(content) != length:
+ raise SVNDumpParseError("content length expected %d actual %d at line %d"
+ % (length, len(content), self.current))
+ return content
+
+ def parse_one_node(self):
+ node = {}
+
+ # optional 'kind' and required 'action' must be next
+ node['kind'] = self.parse_kind()
+ action = self.parse_action()
+
+ # read any remaining headers
+ headers_list = self.parse_headers()
+ headers = dict(headers_list)
+
+ # Content-length must be last, if present
+ if b'Content-length' in headers and headers_list[-1][0] != b'Content-length':
+ raise SVNDumpParseError("'Content-length' header is not last, "
+ "in header block ending at line %d"
+ % (self.current,))
+
+ # parse the remaining optional headers and store in specific keys in NODE
+ for key, header, regex in [
+ ('copyfrom_rev', b'Node-copyfrom-rev', b'([0-9]+)$'),
+ ('copyfrom_path', b'Node-copyfrom-path', b'(.*)$'),
+ ('copy_md5', b'Text-copy-source-md5', b'([0-9a-z]+)$'),
+ ('copy_sha1', b'Text-copy-source-sha1',b'([0-9a-z]+)$'),
+ ('prop_length', b'Prop-content-length', b'([0-9]+)$'),
+ ('text_length', b'Text-content-length', b'([0-9]+)$'),
+ ('text_md5', b'Text-content-md5', b'([0-9a-z]+)$'),
+ ('text_sha1', b'Text-content-sha1', b'([0-9a-z]+)$'),
+ ('content_length', b'Content-length', b'([0-9]+)$'),
+ ]:
+ if not header in headers:
+ node[key] = None
+ continue
+ if self.ignore_sha1 and (key in ['copy_sha1', 'text_sha1']):
+ node[key] = None
+ continue
+ m = re.match(regex, headers[header])
+ if not m:
+ raise SVNDumpParseError("expected '%s' at line %d\n%s"
+ % (regex, self.current,
+ self.lines[self.current]))
+ node[key] = m.group(1)
+
+ self.parse_blank()
+ if node['prop_length']:
+ node['props'] = self.get_props()
+ if node['text_length']:
+ node['content'] = self.get_content(int(node['text_length']))
+ # Hard to determine how may blanks is 'correct' (a delete that is
+ # followed by an add that is a replace and a copy has one fewer
+ # than expected but that can't be predicted until seeing the add)
+ # so allow arbitrary number
+ blanks = 0
+ while self.current < len(self.lines) and self.parse_blank(required=False):
+ blanks += 1
+ node['blanks'] = blanks
+ return action, node
+
+ def parse_all_nodes(self):
+ nodes = {}
+ while True:
+ if self.current >= len(self.lines):
+ break
+ path = self.parse_path()
+ if path is None:
+ break
+ if not nodes.get(path):
+ nodes[path] = {}
+ action, node = self.parse_one_node()
+ if nodes[path].get(action):
+ raise SVNDumpParseError("duplicate action '%s' for node '%s' at line %d"
+ % (action, path, self.current))
+ nodes[path][action] = node
+ return nodes
+
+ def parse_one_revision(self):
+ revision = {}
+ number = self.parse_revision()
+ revision['prop_length'] = self.parse_prop_length()
+ revision['content_length'] = self.parse_content_length()
+ self.parse_blank()
+ revision['props'] = self.get_props()
+ self.parse_blank()
+ revision['nodes'] = self.parse_all_nodes()
+ return number, revision
+
+ def parse_all_revisions(self):
+ while self.current < len(self.lines):
+ number, revision = self.parse_one_revision()
+ if self.parsed.get(number):
+ raise SVNDumpParseError("duplicate revision %d at line %d"
+ % (number, self.current))
+ self.parsed[number] = revision
+
+ def parse(self):
+ self.parsed['format'] = self.parse_format()
+ self.parse_blank()
+ self.parsed['uuid'] = self.parse_uuid()
+ self.parse_blank()
+ self.parse_all_revisions()
+ return self.parsed
+
+def compare_dump_files(message, label, expected, actual,
+ ignore_uuid=False,
+ expect_content_length_always=False,
+ ignore_empty_prop_sections=False,
+ ignore_number_of_blank_lines=False):
+ """Parse two dump files EXPECTED and ACTUAL, both of which are lists
+ of lines as returned by run_and_verify_dump, and check that the same
+ revisions, nodes, properties, etc. are present in both dumps.
+ """
+ parsed_expected = DumpParser(expected, not svntest.main.fs_has_sha1()).parse()
+ parsed_actual = DumpParser(actual).parse()
+
+ if ignore_uuid:
+ parsed_expected['uuid'] = '<ignored>'
+ parsed_actual['uuid'] = '<ignored>'
+
+ for parsed in [parsed_expected, parsed_actual]:
+ for rev_name, rev_record in parsed.items():
+ #print "Found %s" % (rev_name,)
+ if b'nodes' in rev_record:
+ #print "Found %s.%s" % (rev_name, 'nodes')
+ for path_name, path_record in rev_record['nodes'].items():
+ #print "Found %s.%s.%s" % (rev_name, 'nodes', path_name)
+ for action_name, action_record in path_record.items():
+ #print "Found %s.%s.%s.%s" % (rev_name, 'nodes', path_name, action_name)
+
+ if expect_content_length_always:
+ if action_record.get('content_length') == None:
+ #print 'Adding: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'content_length=0')
+ action_record['content_length'] = '0'
+ if ignore_empty_prop_sections:
+ if action_record.get('prop_length') == '10':
+ #print 'Removing: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'prop_length')
+ action_record['prop_length'] = None
+ del action_record['props']
+ old_content_length = int(action_record['content_length'])
+ action_record['content_length'] = str(old_content_length - 10)
+ if ignore_number_of_blank_lines:
+ action_record['blanks'] = 0
+
+ if parsed_expected != parsed_actual:
+ print('DIFF of raw dumpfiles (including expected differences)')
+ print(''.join(ndiff(expected, actual)))
+ raise svntest.Failure('DIFF of parsed dumpfiles (ignoring expected differences)\n'
+ + '\n'.join(ndiff(
+ pprint.pformat(parsed_expected).splitlines(),
+ pprint.pformat(parsed_actual).splitlines())))
+
+##########################################################################################
+## diff verifications
+def is_absolute_url(target):
+ return (target.startswith('file://')
+ or target.startswith('http://')
+ or target.startswith('https://')
+ or target.startswith('svn://')
+ or target.startswith('svn+ssh://'))
+
+def make_diff_header(path, old_tag, new_tag, src_label=None, dst_label=None):
+ """Generate the expected diff header for file PATH, with its old and new
+ versions described in parentheses by OLD_TAG and NEW_TAG. SRC_LABEL and
+ DST_LABEL are paths or urls that are added to the diff labels if we're
+ diffing against the repository or diffing two arbitrary paths.
+ Return the header as an array of newline-terminated strings."""
+ if src_label:
+ src_label = src_label.replace('\\', '/')
+ if not is_absolute_url(src_label):
+ src_label = '.../' + src_label
+ src_label = '\t(' + src_label + ')'
+ else:
+ src_label = ''
+ if dst_label:
+ dst_label = dst_label.replace('\\', '/')
+ if not is_absolute_url(dst_label):
+ dst_label = '.../' + dst_label
+ dst_label = '\t(' + dst_label + ')'
+ else:
+ dst_label = ''
+ path_as_shown = path.replace('\\', '/')
+ return [
+ "Index: " + path_as_shown + "\n",
+ "===================================================================\n",
+ "--- " + path_as_shown + src_label + "\t(" + old_tag + ")\n",
+ "+++ " + path_as_shown + dst_label + "\t(" + new_tag + ")\n",
+ ]
+
+def make_no_diff_deleted_header(path, old_tag, new_tag):
+ """Generate the expected diff header for a deleted file PATH when in
+ 'no-diff-deleted' mode. (In that mode, no further details appear after the
+ header.) Return the header as an array of newline-terminated strings."""
+ path_as_shown = path.replace('\\', '/')
+ return [
+ "Index: " + path_as_shown + " (deleted)\n",
+ "===================================================================\n",
+ ]
+
+def make_git_diff_header(target_path, repos_relpath,
+ old_tag, new_tag, add=False, src_label=None,
+ dst_label=None, delete=False, text_changes=True,
+ cp=False, mv=False, copyfrom_path=None,
+ copyfrom_rev=None):
+ """ Generate the expected 'git diff' header for file TARGET_PATH.
+ REPOS_RELPATH is the location of the path relative to the repository root.
+ The old and new versions ("revision X", or "working copy") must be
+ specified in OLD_TAG and NEW_TAG.
+ SRC_LABEL and DST_LABEL are paths or urls that are added to the diff
+ labels if we're diffing against the repository. ADD, DELETE, CP and MV
+ denotes the operations performed on the file. COPYFROM_PATH is the source
+ of a copy or move. Return the header as an array of newline-terminated
+ strings."""
+
+ path_as_shown = target_path.replace('\\', '/')
+ if src_label:
+ src_label = src_label.replace('\\', '/')
+ src_label = '\t(.../' + src_label + ')'
+ else:
+ src_label = ''
+ if dst_label:
+ dst_label = dst_label.replace('\\', '/')
+ dst_label = '\t(.../' + dst_label + ')'
+ else:
+ dst_label = ''
+
+ output = [
+ "Index: " + path_as_shown + "\n",
+ "===================================================================\n"
+ ]
+ if add:
+ output.extend([
+ "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
+ "new file mode 100644\n",
+ ])
+ if text_changes:
+ output.extend([
+ "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n",
+ "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n"
+ ])
+ elif delete:
+ output.extend([
+ "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
+ "deleted file mode 100644\n",
+ ])
+ if text_changes:
+ output.extend([
+ "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n",
+ "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n"
+ ])
+ elif cp:
+ if copyfrom_rev:
+ copyfrom_rev = '@' + copyfrom_rev
+ else:
+ copyfrom_rev = ''
+ output.extend([
+ "diff --git a/" + copyfrom_path + " b/" + repos_relpath + "\n",
+ "copy from " + copyfrom_path + copyfrom_rev + "\n",
+ "copy to " + repos_relpath + "\n",
+ ])
+ if text_changes:
+ output.extend([
+ "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n",
+ "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n"
+ ])
+ elif mv:
+ output.extend([
+ "diff --git a/" + copyfrom_path + " b/" + path_as_shown + "\n",
+ "rename from " + copyfrom_path + "\n",
+ "rename to " + repos_relpath + "\n",
+ ])
+ if text_changes:
+ output.extend([
+ "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n",
+ "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n"
+ ])
+ else:
+ output.extend([
+ "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n",
+ "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n",
+ "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n",
+ ])
+ return output
+
+def make_diff_prop_header(path):
+ """Return a property diff sub-header, as a list of newline-terminated
+ strings."""
+ return [
+ "\n",
+ "Property changes on: " + path.replace('\\', '/') + "\n",
+ "___________________________________________________________________\n"
+ ]
+
+def make_diff_prop_val(plus_minus, pval):
+ "Return diff for prop value PVAL, with leading PLUS_MINUS (+ or -)."
+ if len(pval) > 0 and pval[-1] != '\n':
+ return [plus_minus + pval + "\n","\\ No newline at end of property\n"]
+ return [plus_minus + pval]
+
+def make_diff_prop_deleted(pname, pval):
+ """Return a property diff for deletion of property PNAME, old value PVAL.
+ PVAL is a single string with no embedded newlines. Return the result
+ as a list of newline-terminated strings."""
+ return [
+ "Deleted: " + pname + "\n",
+ "## -1 +0,0 ##\n"
+ ] + make_diff_prop_val("-", pval)
+
+def make_diff_prop_added(pname, pval):
+ """Return a property diff for addition of property PNAME, new value PVAL.
+ PVAL is a single string with no embedded newlines. Return the result
+ as a list of newline-terminated strings."""
+ return [
+ "Added: " + pname + "\n",
+ "## -0,0 +1 ##\n",
+ ] + make_diff_prop_val("+", pval)
+
+def make_diff_prop_modified(pname, pval1, pval2):
+ """Return a property diff for modification of property PNAME, old value
+ PVAL1, new value PVAL2.
+
+ PVAL is a single string with no embedded newlines. A newline at the
+ end is significant: without it, we add an extra line saying '\ No
+ newline at end of property'.
+
+ Return the result as a list of newline-terminated strings.
+ """
+ return [
+ "Modified: " + pname + "\n",
+ "## -1 +1 ##\n",
+ ] + make_diff_prop_val("-", pval1) + make_diff_prop_val("+", pval2)
+
diff --git a/subversion/tests/cmdline/svntest/wc.py b/subversion/tests/cmdline/svntest/wc.py
new file mode 100644
index 0000000..f805dc9
--- /dev/null
+++ b/subversion/tests/cmdline/svntest/wc.py
@@ -0,0 +1,1205 @@
+#
+# wc.py: functions for interacting with a Subversion working copy
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os
+import sys
+import re
+import logging
+import pprint
+
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+ from urllib.parse import quote as urllib_quote
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+ from urllib import quote as urllib_quote
+
+import svntest
+
+logger = logging.getLogger()
+
+
+#
+# 'status -v' output looks like this:
+#
+# "%c%c%c%c%c%c%c %c %6s %6s %-12s %s\n"
+#
+# (Taken from 'print_status' in subversion/svn/status.c.)
+#
+# Here are the parameters. The middle number or string in parens is the
+# match.group(), followed by a brief description of the field:
+#
+# - text status (1) (single letter)
+# - prop status (1) (single letter)
+# - wc-lockedness flag (2) (single letter: "L" or " ")
+# - copied flag (3) (single letter: "+" or " ")
+# - switched flag (4) (single letter: "S", "X" or " ")
+# - repos lock status (5) (single letter: "K", "O", "B", "T", " ")
+# - tree conflict flag (6) (single letter: "C" or " ")
+#
+# [one space]
+#
+# - out-of-date flag (7) (single letter: "*" or " ")
+#
+# [three spaces]
+#
+# - working revision ('wc_rev') (either digits or "-", "?" or " ")
+#
+# [one space]
+#
+# - last-changed revision (either digits or "?" or " ")
+#
+# [one space]
+#
+# - last author (optional string of non-whitespace
+# characters)
+#
+# [spaces]
+#
+# - path ('path') (string of characters until newline)
+#
+# Working revision, last-changed revision, and last author are whitespace
+# only if the item is missing.
+#
+_re_parse_status = re.compile('^([?!MACDRUGXI_~ ][MACDRUG_ ])'
+ '([L ])'
+ '([+ ])'
+ '([SX ])'
+ '([KOBT ])'
+ '([C ]) '
+ '([* ]) +'
+ '((?P<wc_rev>\d+|-|\?) +(\d|-|\?)+ +(\S+) +)?'
+ '(?P<path>.+)$')
+
+_re_parse_status_ex = re.compile('^ ('
+ '( \> moved (from (?P<moved_from>.+)|to (?P<moved_to>.*)))'
+ '|( \> swapped places with (?P<swapped_with>.+).*)'
+ '|(\> (?P<tc>.+))'
+ ')$')
+
+_re_parse_skipped = re.compile("^(Skipped[^']*) '(.+)'( --.*)?\n")
+
+_re_parse_summarize = re.compile("^([MAD ][M ]) (.+)\n")
+
+_re_parse_checkout = re.compile('^([RMAGCUDE_ B][MAGCUDE_ ])'
+ '([B ])'
+ '([CAUD ])\s+'
+ '(.+)')
+_re_parse_co_skipped = re.compile('^(Restored|Skipped|Removed external)'
+ '\s+\'(.+)\'(( --|: ).*)?')
+_re_parse_co_restored = re.compile('^(Restored)\s+\'(.+)\'')
+
+# Lines typically have a verb followed by whitespace then a path.
+_re_parse_commit_ext = re.compile('^(([A-Za-z]+( [a-z]+)*)) \'(.+)\'( --.*)?')
+_re_parse_commit = re.compile('^(\w+( \(bin\))?)\s+(.+)')
+
+#rN: eids 0 15 branches 4
+_re_parse_eid_header = re.compile('^r(-1|[0-9]+): eids ([0-9]+) ([0-9]+) '
+ 'branches ([0-9]+)$')
+# B0.2 root-eid 3
+_re_parse_eid_branch = re.compile('^(B[0-9.]+) root-eid ([0-9]+) num-eids ([0-9]+)( from [^ ]*)?$')
+_re_parse_eid_merge_history = re.compile('merge-history: merge-ancestors ([0-9]+)')
+# e4: normal 6 C
+_re_parse_eid_ele = re.compile('^e([0-9]+): (none|normal|subbranch) '
+ '(-1|[0-9]+) (.*)$')
+
+class State:
+ """Describes an existing or expected state of a working copy.
+
+ The primary metaphor here is a dictionary of paths mapping to instances
+ of StateItem, which describe each item in a working copy.
+
+ Note: the paths should be *relative* to the root of the working copy,
+ using '/' for the separator (see to_relpath()), and the root of the
+ working copy is identified by the empty path: ''.
+ """
+
+ def __init__(self, wc_dir, desc):
+ "Create a State using the specified description."
+ assert isinstance(desc, dict)
+
+ self.wc_dir = wc_dir
+ self.desc = desc # dictionary: path -> StateItem
+
+ def add(self, more_desc):
+ "Add more state items into the State."
+ assert isinstance(more_desc, dict)
+
+ self.desc.update(more_desc)
+
+ def add_state(self, parent, state, strict=False):
+ "Import state items from a State object, reparent the items to PARENT."
+ assert isinstance(state, State)
+
+ for path, item in state.desc.items():
+ if strict:
+ path = parent + path
+ elif path == '':
+ path = parent
+ else:
+ path = parent + '/' + path
+ self.desc[path] = item
+
+ def remove(self, *paths):
+ "Remove PATHS from the state (the paths must exist)."
+ for path in paths:
+ del self.desc[to_relpath(path)]
+
+ def remove_subtree(self, *paths):
+ "Remove PATHS recursively from the state (the paths must exist)."
+ for subtree_path in paths:
+ subtree_path = to_relpath(subtree_path)
+ for path, item in svntest.main.ensure_list(self.desc.items()):
+ if path == subtree_path or path[:len(subtree_path) + 1] == subtree_path + '/':
+ del self.desc[path]
+
+ def copy(self, new_root=None):
+ """Make a deep copy of self. If NEW_ROOT is not None, then set the
+ copy's wc_dir NEW_ROOT instead of to self's wc_dir."""
+ desc = { }
+ for path, item in self.desc.items():
+ desc[path] = item.copy()
+ if new_root is None:
+ new_root = self.wc_dir
+ return State(new_root, desc)
+
+ def tweak(self, *args, **kw):
+ """Tweak the items' values.
+
+ Each argument in ARGS is the path of a StateItem that already exists in
+ this State. Each keyword argument in KW is a modifiable property of
+ StateItem.
+
+ The general form of this method is .tweak([paths...,] key=value...). If
+ one or more paths are provided, then those items' values are
+ modified. If no paths are given, then all items are modified.
+ """
+ if args:
+ for path in args:
+ try:
+ path_ref = self.desc[to_relpath(path)]
+ except KeyError as e:
+ e.args = ["Path '%s' not present in WC state descriptor" % path]
+ raise
+ path_ref.tweak(**kw)
+ else:
+ for item in self.desc.values():
+ item.tweak(**kw)
+
+ def tweak_some(self, filter, **kw):
+ "Tweak the items for which the filter returns true."
+ for path, item in self.desc.items():
+ if list(filter(path, item)):
+ item.tweak(**kw)
+
+ def rename(self, moves):
+ """Change the path of some items.
+
+ MOVES is a dictionary mapping source path to destination
+ path. Children move with moved parents. All subtrees are moved in
+ reverse depth order to temporary storage before being moved in
+ depth order to the final location. This allows nested moves.
+
+ """
+ temp = {}
+ for src, dst in sorted(moves.items(), key=lambda pair: pair[0])[::-1]:
+ temp[src] = {}
+ for path, item in svntest.main.ensure_list(self.desc.items()):
+ if path == src or path[:len(src) + 1] == src + '/':
+ temp[src][path] = item;
+ del self.desc[path]
+ for src, dst in sorted(moves.items(), key=lambda pair: pair[1]):
+ for path, item in temp[src].items():
+ if path == src:
+ new_path = dst
+ else:
+ new_path = dst + path[len(src):]
+ self.desc[new_path] = item
+
+ def subtree(self, subtree_path):
+ """Return a State object which is a deep copy of the sub-tree
+ beneath SUBTREE_PATH (which is assumed to be rooted at the tree of
+ this State object's WC_DIR). Exclude SUBTREE_PATH itself."""
+ desc = { }
+ for path, item in self.desc.items():
+ if path[:len(subtree_path) + 1] == subtree_path + '/':
+ desc[path[len(subtree_path) + 1:]] = item.copy()
+ return State(self.wc_dir, desc)
+
+ def write_to_disk(self, target_dir):
+ """Construct a directory structure on disk, matching our state.
+
+ WARNING: any StateItem that does not have contents (.contents is None)
+ is assumed to be a directory.
+ """
+ if not os.path.exists(target_dir):
+ os.makedirs(target_dir)
+
+ for path, item in self.desc.items():
+ fullpath = os.path.join(target_dir, path)
+ if item.contents is None:
+ # a directory
+ if not os.path.exists(fullpath):
+ os.makedirs(fullpath)
+ else:
+ # a file
+
+ # ensure its directory exists
+ dirpath = os.path.dirname(fullpath)
+ if not os.path.exists(dirpath):
+ os.makedirs(dirpath)
+
+ # write out the file contents now
+ svntest.main.file_write(fullpath, item.contents, 'wb')
+
+ def normalize(self):
+ """Return a "normalized" version of self.
+
+ A normalized version has the following characteristics:
+
+ * wc_dir == ''
+ * paths use forward slashes
+ * paths are relative
+
+ If self is already normalized, then it is returned. Otherwise, a
+ new State is constructed with (shallow) references to self's
+ StateItem instances.
+
+ If the caller needs a fully disjoint State, then use .copy() on
+ the result.
+ """
+ if self.wc_dir == '':
+ return self
+
+ base = to_relpath(os.path.normpath(self.wc_dir))
+
+ desc = dict([(repos_join(base, path), item)
+ for path, item in self.desc.items()])
+
+ for path, item in desc.copy().items():
+ if item.moved_from or item.moved_to:
+ i = item.copy()
+
+ if i.moved_from:
+ i.moved_from = to_relpath(os.path.normpath(
+ repos_join(base, i.moved_from)))
+ if i.moved_to:
+ i.moved_to = to_relpath(os.path.normpath(
+ repos_join(base, i.moved_to)))
+
+ desc[path] = i
+
+ return State('', desc)
+
+ def compare(self, other):
+ """Compare this State against an OTHER State.
+
+ Three new set objects will be returned: CHANGED, UNIQUE_SELF, and
+ UNIQUE_OTHER. These contain paths of StateItems that are different
+ between SELF and OTHER, paths of items unique to SELF, and paths
+ of item that are unique to OTHER, respectively.
+ """
+ assert isinstance(other, State)
+
+ norm_self = self.normalize()
+ norm_other = other.normalize()
+
+ # fast-path the easy case
+ if norm_self == norm_other:
+ fs = frozenset()
+ return fs, fs, fs
+
+ paths_self = set(norm_self.desc.keys())
+ paths_other = set(norm_other.desc.keys())
+ changed = set()
+ for path in paths_self.intersection(paths_other):
+ if norm_self.desc[path] != norm_other.desc[path]:
+ changed.add(path)
+
+ return changed, paths_self - paths_other, paths_other - paths_self
+
+ def compare_and_display(self, label, other):
+ """Compare this State against an OTHER State, and display differences.
+
+ Information will be written to stdout, displaying any differences
+ between the two states. LABEL will be used in the display. SELF is the
+ "expected" state, and OTHER is the "actual" state.
+
+ If any changes are detected/displayed, then SVNTreeUnequal is raised.
+ """
+ norm_self = self.normalize()
+ norm_other = other.normalize()
+
+ changed, unique_self, unique_other = norm_self.compare(norm_other)
+ if not changed and not unique_self and not unique_other:
+ return
+
+ # Use the shortest path as a way to find the "root-most" affected node.
+ def _shortest_path(path_set):
+ shortest = None
+ for path in path_set:
+ if shortest is None or len(path) < len(shortest):
+ shortest = path
+ return shortest
+
+ if changed:
+ path = _shortest_path(changed)
+ display_nodes(label, path, norm_self.desc[path], norm_other.desc[path])
+ elif unique_self:
+ path = _shortest_path(unique_self)
+ default_singleton_handler('actual ' + label, path, norm_self.desc[path])
+ elif unique_other:
+ path = _shortest_path(unique_other)
+ default_singleton_handler('expected ' + label, path,
+ norm_other.desc[path])
+
+ raise svntest.tree.SVNTreeUnequal
+
+ def tweak_for_entries_compare(self):
+ for path, item in self.desc.copy().items():
+ if item.status and path in self.desc:
+ # If this is an unversioned tree-conflict, remove it.
+ # These are only in their parents' THIS_DIR, they don't have entries.
+ if item.status[0] in '!?' and item.treeconflict == 'C' and \
+ item.entry_status is None:
+ del self.desc[path]
+ # Normal externals are not stored in the parent wc, drop the root
+ # and everything in these working copies
+ elif item.status == 'X ' or item.prev_status == 'X ':
+ del self.desc[path]
+ for p, i in self.desc.copy().items():
+ if p.startswith(path + '/'):
+ del self.desc[p]
+ elif item.entry_kind == 'file':
+ # A file has no descendants in svn_wc_entry_t
+ for p, i in self.desc.copy().items():
+ if p.startswith(path + '/'):
+ del self.desc[p]
+ else:
+ # when reading the entry structures, we don't examine for text or
+ # property mods, so clear those flags. we also do not examine the
+ # filesystem, so we cannot detect missing or obstructed files.
+ if item.status[0] in 'M!~':
+ item.status = ' ' + item.status[1]
+ if item.status[1] == 'M':
+ item.status = item.status[0] + ' '
+ # under wc-ng terms, we may report a different revision than the
+ # backwards-compatible code should report. if there is a special
+ # value for compatibility, then use it.
+ if item.entry_rev is not None:
+ item.wc_rev = item.entry_rev
+ item.entry_rev = None
+ # status might vary as well, e.g. when a directory is missing
+ if item.entry_status is not None:
+ item.status = item.entry_status
+ item.entry_status = None
+ if item.entry_copied is not None:
+ item.copied = item.entry_copied
+ item.entry_copied = None
+ if item.writelocked:
+ # we don't contact the repository, so our only information is what
+ # is in the working copy. 'K' means we have one and it matches the
+ # repos. 'O' means we don't have one but the repos says the item
+ # is locked by us, elsewhere. 'T' means we have one, and the repos
+ # has one, but it is now owned by somebody else. 'B' means we have
+ # one, but the repos does not.
+ #
+ # for each case of "we have one", set the writelocked state to 'K',
+ # and clear it to None for the others. this will match what is
+ # generated when we examine our working copy state.
+ if item.writelocked in 'TB':
+ item.writelocked = 'K'
+ elif item.writelocked == 'O':
+ item.writelocked = None
+ item.moved_from = None
+ item.moved_to = None
+ if path == '':
+ item.switched = None
+ item.treeconflict = None
+
+ def old_tree(self):
+ "Return an old-style tree (for compatibility purposes)."
+ nodelist = [ ]
+ for path, item in self.desc.items():
+ nodelist.append(item.as_node_tuple(os.path.join(self.wc_dir, path)))
+
+ tree = svntest.tree.build_generic_tree(nodelist)
+ if 0:
+ check = tree.as_state()
+ if self != check:
+ logger.warn(pprint.pformat(self.desc))
+ logger.warn(pprint.pformat(check.desc))
+ # STATE -> TREE -> STATE is lossy.
+ # In many cases, TREE -> STATE -> TREE is not.
+ # Even though our conversion from a TREE has lost some information, we
+ # may be able to verify that our lesser-STATE produces the same TREE.
+ svntest.tree.compare_trees('mismatch', tree, check.old_tree())
+
+ return tree
+
+ def __str__(self):
+ return str(self.old_tree())
+
+ def __eq__(self, other):
+ if not isinstance(other, State):
+ return False
+ norm_self = self.normalize()
+ norm_other = other.normalize()
+ return norm_self.desc == norm_other.desc
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ @classmethod
+ def from_status(cls, lines, wc_dir=None):
+ """Create a State object from 'svn status' output."""
+
+ def not_space(value):
+ if value and value != ' ':
+ return value
+ return None
+
+ def parse_move(path, wc_dir):
+ if path.startswith('../'):
+ # ../ style paths are relative from the status root
+ return to_relpath(os.path.normpath(repos_join(wc_dir, path)))
+ else:
+ # Other paths are just relative from cwd
+ return to_relpath(path)
+
+ if not wc_dir:
+ wc_dir = ''
+
+ desc = { }
+ last = None
+ for line in lines:
+ if line.startswith('DBG:'):
+ continue
+
+ match = _re_parse_status.search(line)
+ if not match or match.group(10) == '-':
+
+ ex_match = _re_parse_status_ex.search(line)
+
+ if ex_match:
+ if ex_match.group('moved_from'):
+ path = to_relpath(ex_match.group('moved_from'))
+ last.tweak(moved_from = parse_move(path, wc_dir))
+ elif ex_match.group('moved_to'):
+ path = to_relpath(ex_match.group('moved_to'))
+ last.tweak(moved_to = parse_move(path, wc_dir))
+ elif ex_match.group('swapped_with'):
+ path = to_relpath(ex_match.group('swapped_with'))
+ last.tweak(moved_to = parse_move(path, wc_dir))
+ last.tweak(moved_from = parse_move(path, wc_dir))
+
+ # Parse TC description?
+
+ # ignore non-matching lines, or items that only exist on repos
+ continue
+
+ prev_status = None
+ prev_treeconflict = None
+
+ path = to_relpath(match.group('path'))
+ if path == '.':
+ path = ''
+ if path in desc:
+ prev_status = desc[path].status
+ prev_treeconflict = desc[path].treeconflict
+
+ item = StateItem(status=match.group(1),
+ locked=not_space(match.group(2)),
+ copied=not_space(match.group(3)),
+ switched=not_space(match.group(4)),
+ writelocked=not_space(match.group(5)),
+ treeconflict=not_space(match.group(6)),
+ wc_rev=not_space(match.group('wc_rev')),
+ prev_status=prev_status,
+ prev_treeconflict =prev_treeconflict
+ )
+ desc[path] = item
+ last = item
+
+ return cls('', desc)
+
+ @classmethod
+ def from_skipped(cls, lines):
+ """Create a State object from 'Skipped' lines."""
+
+ desc = { }
+ for line in lines:
+ if line.startswith('DBG:'):
+ continue
+
+ match = _re_parse_skipped.search(line)
+ if match:
+ desc[to_relpath(match.group(2))] = StateItem(
+ verb=(match.group(1).strip(':')))
+
+ return cls('', desc)
+
+ @classmethod
+ def from_summarize(cls, lines):
+ """Create a State object from 'svn diff --summarize' lines."""
+
+ desc = { }
+ for line in lines:
+ if line.startswith('DBG:'):
+ continue
+
+ match = _re_parse_summarize.search(line)
+ if match:
+ desc[to_relpath(match.group(2))] = StateItem(status=match.group(1))
+
+ return cls('', desc)
+
+ @classmethod
+ def from_checkout(cls, lines, include_skipped=True):
+ """Create a State object from 'svn checkout' lines."""
+
+ if include_skipped:
+ re_extra = _re_parse_co_skipped
+ else:
+ re_extra = _re_parse_co_restored
+
+ desc = { }
+ for line in lines:
+ if line.startswith('DBG:'):
+ continue
+
+ match = _re_parse_checkout.search(line)
+ if match:
+ if match.group(3) != ' ':
+ treeconflict = match.group(3)
+ else:
+ treeconflict = None
+ path = to_relpath(match.group(4))
+ prev_status = None
+ prev_verb = None
+ prev_treeconflict = None
+
+ if path in desc:
+ prev_status = desc[path].status
+ prev_verb = desc[path].verb
+ prev_treeconflict = desc[path].treeconflict
+
+ desc[path] = StateItem(status=match.group(1),
+ treeconflict=treeconflict,
+ prev_status=prev_status,
+ prev_verb=prev_verb,
+ prev_treeconflict=prev_treeconflict)
+ else:
+ match = re_extra.search(line)
+ if match:
+ path = to_relpath(match.group(2))
+ prev_status = None
+ prev_verb = None
+ prev_treeconflict = None
+
+ if path in desc:
+ prev_status = desc[path].status
+ prev_verb = desc[path].verb
+ prev_treeconflict = desc[path].treeconflict
+
+ desc[path] = StateItem(verb=match.group(1),
+ prev_status=prev_status,
+ prev_verb=prev_verb,
+ prev_treeconflict=prev_treeconflict)
+
+ return cls('', desc)
+
+ @classmethod
+ def from_commit(cls, lines):
+ """Create a State object from 'svn commit' lines."""
+
+ desc = { }
+ for line in lines:
+ if line.startswith('DBG:') or line.startswith('Transmitting'):
+ continue
+
+ if line.startswith('Committing transaction'):
+ continue
+
+ match = _re_parse_commit_ext.search(line)
+ if match:
+ desc[to_relpath(match.group(4))] = StateItem(verb=match.group(1))
+ continue
+
+ match = _re_parse_commit.search(line)
+ if match:
+ desc[to_relpath(match.group(3))] = StateItem(verb=match.group(1))
+
+ return cls('', desc)
+
+ @classmethod
+ def from_wc(cls, base, load_props=False, ignore_svn=True,
+ keep_eol_style=False):
+ """Create a State object from a working copy.
+
+ Walks the tree at PATH, building a State based on the actual files
+ and directories found. If LOAD_PROPS is True, then the properties
+ will be loaded for all nodes (Very Expensive!). If IGNORE_SVN is
+ True, then the .svn subdirectories will be excluded from the State.
+
+ If KEEP_EOL_STYLE is set, don't let Python normalize the EOL when
+ reading working copy contents as text files. It has no effect on
+ binary files.
+ """
+ if not base:
+ # we're going to walk the base, and the OS wants "."
+ base = '.'
+
+ desc = { }
+ dot_svn = svntest.main.get_admin_name()
+
+ for dirpath, dirs, files in os.walk(base):
+ parent = path_to_key(dirpath, base)
+ if ignore_svn and dot_svn in dirs:
+ dirs.remove(dot_svn)
+ for name in dirs + files:
+ node = os.path.join(dirpath, name)
+ if os.path.isfile(node):
+ try:
+ if keep_eol_style:
+ contents = open(node, 'r', newline='').read()
+ else:
+ contents = open(node, 'r').read()
+ except:
+ contents = open(node, 'rb').read()
+ else:
+ contents = None
+ desc[repos_join(parent, name)] = StateItem(contents=contents)
+
+ if load_props:
+ paths = [os.path.join(base, to_ospath(p)) for p in desc.keys()]
+ paths.append(base)
+ all_props = svntest.tree.get_props(paths)
+ for node, props in all_props.items():
+ if node == base:
+ desc['.'] = StateItem(props=props)
+ else:
+ if base == '.':
+ # 'svn proplist' strips './' from the paths. put it back on.
+ node = os.path.join('.', node)
+ desc[path_to_key(node, base)].props = props
+
+ return cls('', desc)
+
+ @classmethod
+ def from_entries(cls, base):
+ """Create a State object from a working copy, via the old "entries" API.
+
+ Walks the tree at PATH, building a State based on the information
+ provided by the old entries API, as accessed via the 'entries-dump'
+ program.
+ """
+ if not base:
+ # we're going to walk the base, and the OS wants "."
+ base = '.'
+
+ if os.path.isfile(base):
+ # a few tests run status on a single file. quick-and-dirty this. we
+ # really should analyze the entry (similar to below) to be general.
+ dirpath, basename = os.path.split(base)
+ entries = svntest.main.run_entriesdump(dirpath)
+ return cls('', {
+ to_relpath(base): StateItem.from_entry(entries[basename]),
+ })
+
+ desc = { }
+ dump_data = svntest.main.run_entriesdump_tree(base)
+
+ if not dump_data:
+ # Probably 'svn status' run on an actual only node
+ # ### Improve!
+ return cls('', desc)
+
+ dirent_join = repos_join
+ if len(base) == 2 and base[1:]==':' and sys.platform=='win32':
+ # We have a win32 drive relative path... Auch. Fix joining
+ def drive_join(a, b):
+ if len(a) == 2:
+ return a+b
+ else:
+ return repos_join(a,b)
+ dirent_join = drive_join
+
+ for parent, entries in sorted(dump_data.items()):
+
+ parent_url = entries[''].url
+
+ for name, entry in entries.items():
+ # if the entry is marked as DELETED *and* it is something other than
+ # schedule-add, then skip it. we can add a new node "over" where a
+ # DELETED node lives.
+ if entry.deleted and entry.schedule != 1:
+ continue
+ # entries that are ABSENT don't show up in status
+ if entry.absent:
+ continue
+ # entries that are User Excluded don't show up in status
+ if entry.depth == -1:
+ continue
+ if name and entry.kind == 2:
+ # stub subdirectory. leave a "missing" StateItem in here. note
+ # that we can't put the status as "! " because that gets tweaked
+ # out of our expected tree.
+ item = StateItem(status=' ', wc_rev='?')
+ desc[dirent_join(parent, name)] = item
+ continue
+ item = StateItem.from_entry(entry)
+ if name:
+ desc[dirent_join(parent, name)] = item
+ implied_url = repos_join(parent_url, svn_uri_quote(name))
+ else:
+ item._url = entry.url # attach URL to directory StateItems
+ desc[parent] = item
+
+ grandpa, this_name = repos_split(parent)
+ if grandpa in desc:
+ implied_url = repos_join(desc[grandpa]._url,
+ svn_uri_quote(this_name))
+ else:
+ implied_url = None
+
+ if implied_url and implied_url != entry.url:
+ item.switched = 'S'
+
+ if entry.file_external:
+ item.switched = 'X'
+
+ return cls('', desc)
+
+ @classmethod
+ def from_eids(cls, lines):
+
+ # Need to read all elements in a branch before we can construct
+ # the full path to an element.
+ # For the full path we use <branch-id>/<path-within-branch>.
+
+ def eid_path(eids, eid):
+ ele = eids[eid]
+ if ele[0] == '-1':
+ return ele[1]
+ parent_path = eid_path(eids, ele[0])
+ if parent_path == '':
+ return ele[1]
+ return parent_path + '/' + ele[1]
+
+ def eid_full_path(eids, eid, branch_id):
+ path = eid_path(eids, eid)
+ if path == '':
+ return branch_id
+ return branch_id + '/' + path
+
+ def add_to_desc(eids, desc, branch_id):
+ for k, v in eids.items():
+ desc[eid_full_path(eids, k, branch_id)] = StateItem(eid=k)
+
+ branch_id = None
+ eids = {}
+ desc = {}
+ for line in lines:
+
+ match = _re_parse_eid_ele.search(line)
+ if match and match.group(2) != 'none':
+ eid = match.group(1)
+ parent_eid = match.group(3)
+ path = match.group(4)
+ if path == '.':
+ path = ''
+ eids[eid] = [parent_eid, path]
+
+ match = _re_parse_eid_branch.search(line)
+ if match:
+ if branch_id:
+ add_to_desc(eids, desc, branch_id)
+ eids = {}
+ branch_id = match.group(1)
+ root_eid = match.group(2)
+
+ match = _re_parse_eid_merge_history.search(line)
+ if match:
+ ### TODO: store the merge history
+ pass
+
+ add_to_desc(eids, desc, branch_id)
+
+ return cls('', desc)
+
+
+class StateItem:
+ """Describes an individual item within a working copy.
+
+ Note that the location of this item is not specified. An external
+ mechanism, such as the State class, will provide location information
+ for each item.
+ """
+
+ def __init__(self, contents=None, props=None,
+ status=None, verb=None, wc_rev=None, entry_kind=None,
+ entry_rev=None, entry_status=None, entry_copied=None,
+ locked=None, copied=None, switched=None, writelocked=None,
+ treeconflict=None, moved_from=None, moved_to=None,
+ prev_status=None, prev_verb=None, prev_treeconflict=None,
+ eid=None):
+ # provide an empty prop dict if it wasn't provided
+ if props is None:
+ props = { }
+
+ ### keep/make these ints one day?
+ if wc_rev is not None:
+ wc_rev = str(wc_rev)
+ if eid is not None:
+ eid = str(eid)
+
+ # Any attribute can be None if not relevant, unless otherwise stated.
+
+ # A string of content (if the node is a file).
+ self.contents = contents
+ # A dictionary mapping prop name to prop value; never None.
+ self.props = props
+ # A two-character string from the first two columns of 'svn status'.
+ self.status = status
+ self.prev_status = prev_status
+ # The action word such as 'Adding' printed by commands like 'svn update'.
+ self.verb = verb
+ self.prev_verb = prev_verb
+ # The base revision number of the node in the WC, as a string.
+ self.wc_rev = wc_rev
+ # If 'file' specifies that the node is a file, and as such has no svn_wc_entry_t
+ # descendants
+ self.entry_kind = None
+ # These will be set when we expect the wc_rev/status to differ from those
+ # found in the entries code.
+ self.entry_rev = entry_rev
+ self.entry_status = entry_status
+ self.entry_copied = entry_copied
+ # For the following attributes, the value is the status character of that
+ # field from 'svn status', except using value None instead of status ' '.
+ self.locked = locked
+ self.copied = copied
+ self.switched = switched
+ self.writelocked = writelocked
+ # Value 'C', 'A', 'D' or ' ', or None as an expected status meaning 'do not check'.
+ self.treeconflict = treeconflict
+ self.prev_treeconflict = prev_treeconflict
+ # Relative paths to the move locations
+ self.moved_from = moved_from
+ self.moved_to = moved_to
+ self.eid = eid
+
+ def copy(self):
+ "Make a deep copy of self."
+ new = StateItem()
+ vars(new).update(vars(self))
+ new.props = self.props.copy()
+ return new
+
+ def tweak(self, **kw):
+ for name, value in kw.items():
+ # Refine the revision args (for now) to ensure they are strings.
+ if value is not None and name == 'wc_rev':
+ value = str(value)
+ if value is not None and name == 'eid':
+ value = str(value)
+ setattr(self, name, value)
+
+ def __eq__(self, other):
+ if not isinstance(other, StateItem):
+ return False
+ v_self = dict([(k, v) for k, v in vars(self).items()
+ if not k.startswith('_') and not k.startswith('entry_')])
+ v_other = dict([(k, v) for k, v in vars(other).items()
+ if not k.startswith('_') and not k.startswith('entry_')])
+
+ if self.wc_rev == '0' and self.status == 'A ':
+ v_self['wc_rev'] = '-'
+ if other.wc_rev == '0' and other.status == 'A ':
+ v_other['wc_rev'] = '-'
+ return v_self == v_other
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def as_node_tuple(self, path):
+ atts = { }
+ if self.status is not None:
+ atts['status'] = self.status
+ if self.prev_status is not None:
+ atts['prev_status'] = self.prev_status
+ if self.verb is not None:
+ atts['verb'] = self.verb
+ if self.prev_verb is not None:
+ atts['prev_verb'] = self.prev_verb
+ if self.wc_rev is not None:
+ atts['wc_rev'] = self.wc_rev
+ if self.locked is not None:
+ atts['locked'] = self.locked
+ if self.copied is not None:
+ atts['copied'] = self.copied
+ if self.switched is not None:
+ atts['switched'] = self.switched
+ if self.writelocked is not None:
+ atts['writelocked'] = self.writelocked
+ if self.treeconflict is not None:
+ atts['treeconflict'] = self.treeconflict
+ if self.prev_treeconflict is not None:
+ atts['prev_treeconflict'] = self.prev_treeconflict
+ if self.moved_from is not None:
+ atts['moved_from'] = self.moved_from
+ if self.moved_to is not None:
+ atts['moved_to'] = self.moved_to
+ if self.eid is not None:
+ atts['eid'] = self.eid
+
+ return (os.path.normpath(path), self.contents, self.props, atts)
+
+ @classmethod
+ def from_entry(cls, entry):
+ status = ' '
+ if entry.schedule == 1: # svn_wc_schedule_add
+ status = 'A '
+ elif entry.schedule == 2: # svn_wc_schedule_delete
+ status = 'D '
+ elif entry.schedule == 3: # svn_wc_schedule_replace
+ status = 'R '
+ elif entry.conflict_old:
+ ### I'm assuming we only need to check one, rather than all conflict_*
+ status = 'C '
+
+ ### is this the sufficient? guessing here w/o investigation.
+ if entry.prejfile:
+ status = status[0] + 'C'
+
+ if entry.locked:
+ locked = 'L'
+ else:
+ locked = None
+
+ if entry.copied:
+ wc_rev = '-'
+ copied = '+'
+ else:
+ if entry.revision == -1:
+ wc_rev = '?'
+ else:
+ wc_rev = entry.revision
+ copied = None
+
+ ### figure out switched
+ switched = None
+
+ if entry.lock_token:
+ writelocked = 'K'
+ else:
+ writelocked = None
+
+ return cls(status=status,
+ wc_rev=wc_rev,
+ locked=locked,
+ copied=copied,
+ switched=switched,
+ writelocked=writelocked,
+ )
+
+
+if os.sep == '/':
+ to_relpath = to_ospath = lambda path: path
+else:
+ def to_relpath(path):
+ """Return PATH but with all native path separators changed to '/'."""
+ return path.replace(os.sep, '/')
+ def to_ospath(path):
+ """Return PATH but with each '/' changed to the native path separator."""
+ return path.replace('/', os.sep)
+
+
+def path_to_key(path, base):
+ """Return the relative path that represents the absolute path PATH under
+ the absolute path BASE. PATH must be a path under BASE. The returned
+ path has '/' separators."""
+ if path == base:
+ return ''
+
+ if base.endswith(os.sep) or base.endswith('/') or base.endswith(':'):
+ # Special path format on Windows:
+ # 'C:/' Is a valid root which includes its separator ('C:/file')
+ # 'C:' is a valid root which isn't followed by a separator ('C:file')
+ #
+ # In this case, we don't need a separator between the base and the path.
+ pass
+ else:
+ # Account for a separator between the base and the relpath we're creating
+ base += os.sep
+
+ assert path.startswith(base), "'%s' is not a prefix of '%s'" % (base, path)
+ return to_relpath(path[len(base):])
+
+
+def repos_split(repos_relpath):
+ """Split a repos path into its directory and basename parts."""
+ idx = repos_relpath.rfind('/')
+ if idx == -1:
+ return '', repos_relpath
+ return repos_relpath[:idx], repos_relpath[idx+1:]
+
+
+def repos_join(base, path):
+ """Join two repos paths. This generally works for URLs too."""
+ if base == '':
+ return path
+ elif path == '':
+ return base
+ elif base[len(base)-1:] == '/':
+ return base + path
+ else:
+ return base + '/' + path
+
+
+def svn_uri_quote(url):
+ # svn defines a different set of "safe" characters than Python does, so
+ # we need to avoid escaping them. see subr/path.c:uri_char_validity[]
+ return urllib_quote(url, "!$&'()*+,-./:=@_~")
+
+
+# ------------
+
+def python_sqlite_can_read_wc():
+ """Check if the Python builtin is capable enough to peek into wc.db"""
+ # Currently enough (1.7-1.9)
+ return svntest.sqlite3.sqlite_version_info >= (3, 6, 18)
+
+def open_wc_db(local_path):
+ """Open the SQLite DB for the WC path LOCAL_PATH.
+ Return (DB object, WC root path, WC relpath of LOCAL_PATH)."""
+ dot_svn = svntest.main.get_admin_name()
+ root_path = local_path
+ relpath = ''
+
+ while True:
+ db_path = os.path.join(root_path, dot_svn, 'wc.db')
+ try:
+ db = svntest.sqlite3.connect(db_path)
+ break
+ except: pass
+ head, tail = os.path.split(root_path)
+ if head == root_path:
+ raise svntest.Failure("No DB for " + local_path)
+ root_path = head
+ relpath = os.path.join(tail, relpath).replace(os.path.sep, '/').rstrip('/')
+
+ return db, root_path, relpath
+
+# ------------
+
+def text_base_path(file_path):
+ """Return the path to the text-base file for the versioned file
+ FILE_PATH."""
+
+ info = svntest.actions.run_and_parse_info(file_path)[0]
+
+ checksum = info['Checksum']
+ db, root_path, relpath = open_wc_db(file_path)
+
+ # Calculate single DB location
+ dot_svn = svntest.main.get_admin_name()
+ fn = os.path.join(root_path, dot_svn, 'pristine', checksum[0:2], checksum)
+
+ # For SVN_WC__VERSION < 29
+ if os.path.isfile(fn):
+ return fn
+
+ # For SVN_WC__VERSION >= 29
+ if os.path.isfile(fn + ".svn-base"):
+ return fn + ".svn-base"
+
+ raise svntest.Failure("No pristine text for " + relpath)
+
+def sqlite_stmt(wc_root_path, stmt):
+ """Execute STMT on the SQLite wc.db in WC_ROOT_PATH and return the
+ results."""
+
+ db = open_wc_db(wc_root_path)[0]
+ c = db.cursor()
+ c.execute(stmt)
+ return c.fetchall()
+
+def sqlite_exec(wc_root_path, stmt):
+ """Execute STMT on the SQLite wc.db in WC_ROOT_PATH and return the
+ results."""
+
+ db = open_wc_db(wc_root_path)[0]
+ c = db.cursor()
+ c.execute(stmt)
+ db.commit()
+
+
+# ------------
+### probably toss these at some point. or major rework. or something.
+### just bootstrapping some changes for now.
+#
+
+def item_to_node(path, item):
+ tree = svntest.tree.build_generic_tree([item.as_node_tuple(path)])
+ while tree.children:
+ assert len(tree.children) == 1
+ tree = tree.children[0]
+ return tree
+
+### yanked from tree.compare_trees()
+def display_nodes(label, path, expected, actual):
+ 'Display two nodes, expected and actual.'
+ expected = item_to_node(path, expected)
+ actual = item_to_node(path, actual)
+
+ o = StringIO()
+ o.write("=============================================================\n")
+ o.write("Expected '%s' and actual '%s' in %s tree are different!\n"
+ % (expected.name, actual.name, label))
+ o.write("=============================================================\n")
+ o.write("EXPECTED NODE TO BE:\n")
+ o.write("=============================================================\n")
+ expected.pprint(o)
+ o.write("=============================================================\n")
+ o.write("ACTUAL NODE FOUND:\n")
+ o.write("=============================================================\n")
+ actual.pprint(o)
+
+ logger.warn(o.getvalue())
+ o.close()
+
+### yanked from tree.py
+def default_singleton_handler(description, path, item):
+ node = item_to_node(path, item)
+ logger.warn("Couldn't find node '%s' in %s tree" % (node.name, description))
+ o = StringIO()
+ node.pprint(o)
+ logger.warn(o.getvalue())
+ o.close()
+ raise svntest.tree.SVNTreeUnequal
diff --git a/subversion/tests/cmdline/svnversion_tests.py b/subversion/tests/cmdline/svnversion_tests.py
new file mode 100755
index 0000000..2ed6e46
--- /dev/null
+++ b/subversion/tests/cmdline/svnversion_tests.py
@@ -0,0 +1,402 @@
+#!/usr/bin/env python
+#
+# svnversion_tests.py: testing the 'svnversion' tool.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os.path
+import tempfile
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+#----------------------------------------------------------------------
+
+def svnversion_test(sbox):
+ "test 'svnversion' on files and directories"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Unmodified
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1\n" ], [])
+
+ # Unmodified, whole wc switched
+ svntest.actions.run_and_verify_svnversion(wc_dir, "some/other/url",
+ [ "1S\n" ], [])
+
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ svntest.main.file_append(mu_path, 'appended mu text')
+
+ # Modified file
+ svntest.actions.run_and_verify_svnversion(mu_path, repo_url + '/A/mu',
+ [ "1M\n" ], [])
+
+ # Text modified
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1M\n" ], [])
+
+ expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ if svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status):
+ raise svntest.Failure
+
+ # Unmodified, mixed
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1:2\n" ], [])
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'blue', 'azul',
+ os.path.join(wc_dir, 'A', 'mu'))
+
+ # Prop modified, mixed
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1:2M\n" ], [])
+
+ iota_path = os.path.join(wc_dir, 'iota')
+ gamma_url = sbox.repo_url + '/A/D/gamma'
+ expected_output = wc.State(wc_dir, {'iota' : Item(status='U ')})
+ expected_status.tweak('A/mu', status=' M')
+ expected_status.tweak('iota', switched='S', wc_rev=2)
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+ expected_disk.tweak('iota',
+ contents=expected_disk.desc['A/D/gamma'].contents)
+ if svntest.actions.run_and_verify_switch(wc_dir, iota_path, gamma_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry'):
+ raise svntest.Failure
+
+ # Prop modified, mixed, part wc switched
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1:2MS\n" ], [])
+
+ # Plain (exported) directory that is a direct subdir of a versioned dir
+ Q_path = os.path.join(wc_dir, 'Q')
+ os.mkdir(Q_path)
+ svntest.actions.run_and_verify_svnversion(Q_path, repo_url,
+ [ "Unversioned directory\n" ], [])
+
+ # Plain (exported) directory that is not a direct subdir of a versioned dir
+ R_path = os.path.join(Q_path, 'Q')
+ os.mkdir(R_path)
+ svntest.actions.run_and_verify_svnversion(R_path, repo_url,
+ [ "Unversioned directory\n" ], [])
+
+ # Switched file
+ svntest.actions.run_and_verify_svnversion(iota_path, repo_url + '/iota',
+ [ "2S\n" ], [])
+
+ # Unversioned file
+ kappa_path = os.path.join(wc_dir, 'kappa')
+ svntest.main.file_write(kappa_path, "This is the file 'kappa'.")
+ svntest.actions.run_and_verify_svnversion(kappa_path, repo_url,
+ [ "Unversioned file\n" ], [])
+
+ # Nonexistent file or directory
+ X_path = os.path.join(wc_dir, 'Q', 'X')
+ svntest.actions.run_and_verify_svnversion(X_path, repo_url,
+ None, [ "'%s' doesn't exist\n"
+ % os.path.abspath(X_path) ])
+
+ # Perform a sparse checkout of under the existing WC, and confirm that
+ # svnversion detects it as a "partial" WC.
+ A_path = os.path.join(wc_dir, "A")
+ A_A_path = os.path.join(A_path, "SPARSE_A")
+ expected_output = wc.State(A_path, {
+ "SPARSE_A" : Item(),
+ "SPARSE_A/mu" : Item(status='A '),
+ })
+ expected_disk = wc.State("", {
+ "mu" : Item(expected_disk.desc['A/mu'].contents),
+ })
+ svntest.actions.run_and_verify_checkout(repo_url + "/A", A_A_path,
+ expected_output, expected_disk,
+ [], "--depth=files")
+
+ # Partial (sparse) checkout
+ svntest.actions.run_and_verify_svnversion(A_A_path,
+ repo_url, [ "2SP\n" ], [])
+
+
+#----------------------------------------------------------------------
+
+@Issue(3816)
+def ignore_externals(sbox):
+ "test 'svnversion' with svn:externals"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Set up an external item
+ C_path = os.path.join(wc_dir, "A", "C")
+ externals_desc = """\
+ext-dir -r 1 %s/A/D/G
+ext-file -r 1 %s/A/D/H/omega
+""" % (repo_url, repo_url)
+ (fd, tmp_f) = tempfile.mkstemp(dir=wc_dir)
+ svntest.main.file_append(tmp_f, externals_desc)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'pset',
+ '-F', tmp_f, 'svn:externals', C_path)
+ os.close(fd)
+ os.remove(tmp_f)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/C', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Update to get it on disk
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ ext_dir_path = os.path.join(C_path, 'ext-dir')
+ ext_file_path = os.path.join(C_path, 'ext-file')
+ expected_infos = [
+ { 'Revision' : '^1$' },
+ { 'Revision' : '^1$' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, ext_dir_path, ext_file_path)
+
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "2\n" ], [])
+
+#----------------------------------------------------------------------
+
+# Test for issue #3461 'excluded subtrees are not detected by svnversion'
+@Issue(3461)
+def svnversion_with_excluded_subtrees(sbox):
+ "test 'svnversion' with excluded subtrees"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ B_path = os.path.join(wc_dir, "A", "B")
+ D_path = os.path.join(wc_dir, "A", "D")
+ psi_path = os.path.join(wc_dir, "A", "D", "H", "psi")
+
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1\n" ], [])
+
+ # Exclude a directory and check that svnversion detects it.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', B_path)
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1P\n" ], [])
+
+ # Exclude a file and check that svnversion detects it. Target the
+ # svnversion command on a subtree that does not contain the excluded
+ # directory to assure we a detecting the switched file.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '--set-depth', 'exclude', psi_path)
+ svntest.actions.run_and_verify_svnversion(D_path, repo_url + '/A/D',
+ [ "1P\n" ], [])
+
+def svnversion_with_structural_changes(sbox):
+ "test 'svnversion' with structural changes"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Test a copy
+ iota_path = os.path.join(wc_dir, 'iota')
+ iota_copy_path = os.path.join(wc_dir, 'iota_copy')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', iota_path, iota_copy_path)
+
+ svntest.actions.run_and_verify_svnversion(iota_copy_path, repo_url +
+ '/iota_copy',
+ [ "Uncommitted local addition, "
+ "copy or move\n" ],
+ [])
+ C_path = os.path.join(wc_dir, 'A', 'C')
+ C_copy_path = os.path.join(wc_dir, 'C_copy')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', C_path, C_copy_path)
+
+ svntest.actions.run_and_verify_svnversion(C_copy_path, repo_url +
+ '/C_copy',
+ [ "Uncommitted local addition, "
+ "copy or move\n" ],
+ [])
+ sbox.simple_commit()
+
+ # Test deletion
+ sbox.simple_rm('iota')
+ svntest.actions.run_and_verify_svnversion(sbox.ospath('iota'),
+ repo_url + '/iota',
+ ["1M\n"],
+ [],
+ )
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1:2M\n" ], [])
+
+def committed_revisions(sbox):
+ "test 'svnversion --committed'"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ sbox.simple_copy('iota', 'iota2')
+ sbox.simple_commit()
+ sbox.simple_update()
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1:2\n" ], [],
+ "--committed")
+
+def non_reposroot_wc(sbox):
+ "test 'svnversion' on a non-repos-root working copy"
+ sbox.build(create_wc=False)
+ wc_dir = sbox.add_wc_path('wc2')
+ repo_url = sbox.repo_url + "/A/B"
+ svntest.main.run_svn(None, 'checkout', repo_url, wc_dir)
+ svntest.actions.run_and_verify_svnversion(wc_dir, repo_url,
+ [ "1\n" ], [])
+
+@Issue(3858)
+def child_switched(sbox):
+ "test svnversion output for switched children"
+ sbox.build()#sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Copy A to A2
+ sbox.simple_copy('A', 'branch')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ ### Target is repos root and WC root.
+
+ # No switches.
+ svntest.actions.run_and_verify_svnversion(wc_dir, None,
+ [ "2\n" ], [])
+
+ # Switch A/B to a sibling.
+ sbox.simple_switch(repo_url + '/A/D', 'A/B')
+
+ # This should detect the switch at A/B.
+ svntest.actions.run_and_verify_svnversion(wc_dir, None,
+ [ "2S\n" ], [])
+
+ ### Target is neither repos root nor WC root.
+
+ # But A/B/G and its children are not switched by itself.
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A/B/G'),
+ None, [ "2\n" ], [])
+
+ # And A/B isn't switched when you look at it directly.
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A/B'),
+ None, [ "2\n" ], [])
+
+ # Switch branch/D to ^/A, then switch branch/D/G back to ^/branch/D/G so
+ # the latter is switched relative to its parent but not the WC root.
+ sbox.simple_switch(repo_url + '/A/D', 'branch/D')
+ sbox.simple_switch(repo_url + '/branch/D/G', 'branch/D/G')
+
+ # This should detect the switch at branch/D and branch/D/G.
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'branch'),
+ None, [ "2S\n" ], [])
+
+ # Directly targeting the switched branch/D should still detect the switch
+ # at branch/D/G even though the latter isn't switched against the root of
+ # the working copy.
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'branch',
+ 'D'),
+ None, [ "2S\n" ], [])
+
+ # Switch A/B to ^/.
+ sbox.simple_switch(repo_url, 'A/B')
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir),
+ None, [ "2S\n" ], [])
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A'),
+ None, [ "2S\n" ], [])
+
+ ### Target is repos root but not WC root.
+
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A', 'B'),
+ None, [ "2\n" ], [])
+
+ # Switch A/B/A/D/G to ^/A/D/H.
+ sbox.simple_switch(repo_url + '/A/D/H', 'A/B/A/D/G')
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A', 'B'),
+ None, [ "2S\n" ], [])
+
+ ### Target is not repos root but is WC root.
+
+ # Switch the root of the working copy to ^/branch, then switch D/G to
+ # ^A/D/G.
+ sbox.simple_switch(repo_url + '/branch', '.')
+ sbox.simple_switch(repo_url + '/A/D/G', 'D/G')
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir,),
+ None, [ "2S\n" ], [])
+
+ ### Target is neither repos root nor WC root.
+
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'D'),
+ None, [ "2S\n" ], [])
+ svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'D', 'H'),
+ None, [ "2\n" ], [])
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ svnversion_test,
+ ignore_externals,
+ svnversion_with_excluded_subtrees,
+ svnversion_with_structural_changes,
+ committed_revisions,
+ non_reposroot_wc,
+ child_switched,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/switch_tests.py b/subversion/tests/cmdline/switch_tests.py
new file mode 100755
index 0000000..bcaaf03
--- /dev/null
+++ b/subversion/tests/cmdline/switch_tests.py
@@ -0,0 +1,2929 @@
+#!/usr/bin/env python
+#
+# switch_tests.py: testing `svn switch'.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import shutil, re, os
+
+# Our testing module
+import svntest
+from svntest import verify, actions, main, deeptrees
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo
+from svntest.deeptrees import do_routine_switching, commit_routine_switching, \
+ get_routine_disk_state, get_routine_status_state
+
+######################################################################
+# Tests
+#
+
+#----------------------------------------------------------------------
+
+def routine_switching(sbox):
+ "test some basic switching operations"
+
+ sbox.build(read_only = True)
+
+ # Setup (and verify) some switched things
+ do_routine_switching(sbox.wc_dir, sbox.repo_url, 1)
+
+
+#----------------------------------------------------------------------
+
+def commit_switched_things(sbox):
+ "commits after some basic switching operations"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # Commit some stuff (and verify)
+ commit_routine_switching(wc_dir, 1)
+
+
+#----------------------------------------------------------------------
+
+def full_update(sbox):
+ "update wc that contains switched things"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # Copy wc_dir to a backup location
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Commit some stuff (don't bother verifying)
+ commit_routine_switching(wc_backup, 0)
+
+ # Some convenient path variables
+ iota_path = sbox.ospath('iota')
+ gamma_path = sbox.ospath('A/D/gamma')
+ Bpi_path = sbox.ospath('A/B/pi')
+ BZ_path = sbox.ospath('A/B/Z')
+ Bzeta_path = sbox.ospath('A/B/Z/zeta')
+ Gpi_path = sbox.ospath('A/D/G/pi')
+ GZ_path = sbox.ospath('A/D/G/Z')
+ Gzeta_path = sbox.ospath('A/D/G/Z/zeta')
+
+ # Create expected output tree for an update of wc_backup.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ 'A/D/gamma' : Item(status='U '),
+ 'A/B/pi' : Item(status='U '),
+ 'A/B/Z' : Item(status='A '),
+ 'A/B/Z/zeta' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='U '),
+ 'A/D/G/Z' : Item(status='A '),
+ 'A/D/G/Z/zeta' : Item(status='A '),
+ })
+
+ # Create expected disk tree for the update
+ expected_disk = get_routine_disk_state(wc_dir)
+ expected_disk.tweak('iota', contents="This is the file 'gamma'.\napple")
+ expected_disk.tweak('A/D/gamma', contents="This is the file 'gamma'.\napple")
+ expected_disk.tweak('A/B/pi', contents="This is the file 'pi'.\nmelon")
+ expected_disk.tweak('A/D/G/pi', contents="This is the file 'pi'.\nmelon")
+ expected_disk.add({
+ 'A/B/Z' : Item(),
+ 'A/B/Z/zeta' : Item(contents="This is the file 'zeta'.\n"),
+ 'A/D/G/Z' : Item(),
+ 'A/D/G/Z/zeta' : Item(contents="This is the file 'zeta'.\n"),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak(wc_rev=2)
+ expected_status.add({
+ 'A/D/G/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2),
+ 'A/B/Z' : Item(status=' ', wc_rev=2),
+ 'A/B/Z/zeta' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.tweak('iota', 'A/B', switched='S')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def full_rev_update(sbox):
+ "reverse update wc that contains switched things"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # Commit some stuff (don't bother verifying)
+ commit_routine_switching(wc_dir, 0)
+
+ # Update to HEAD (tested elsewhere)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Some convenient path variables
+ iota_path = sbox.ospath('iota')
+ gamma_path = sbox.ospath('A/D/gamma')
+ Bpi_path = sbox.ospath('A/B/pi')
+ BZ_path = sbox.ospath('A/B/Z')
+ Gpi_path = sbox.ospath('A/D/G/pi')
+ GZ_path = sbox.ospath('A/D/G/Z')
+
+ # Now, reverse update, back to the pre-commit state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ 'A/D/gamma' : Item(status='U '),
+ 'A/B/pi' : Item(status='U '),
+ 'A/B/Z' : Item(status='D '),
+ 'A/D/G/pi' : Item(status='U '),
+ 'A/D/G/Z' : Item(status='D '),
+ })
+
+ # Create expected disk tree
+ expected_disk = get_routine_disk_state(wc_dir)
+
+ # Create expected status
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak('iota', 'A/B', switched='S')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '1', wc_dir)
+
+#----------------------------------------------------------------------
+
+def update_switched_things(sbox):
+ "update switched wc things to HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # Copy wc_dir to a backup location
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Commit some stuff (don't bother verifying)
+ commit_routine_switching(wc_backup, 0)
+
+ # Some convenient path variables
+ iota_path = sbox.ospath('iota')
+ B_path = sbox.ospath('A/B')
+
+ # Create expected output tree for an update of wc_backup.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ 'A/B/pi' : Item(status='U '),
+ 'A/B/Z' : Item(status='A '),
+ 'A/B/Z/zeta' : Item(status='A '),
+ })
+
+ # Create expected disk tree for the update
+ expected_disk = get_routine_disk_state(wc_dir)
+ expected_disk.tweak('iota', contents="This is the file 'gamma'.\napple")
+
+ expected_disk.tweak('A/B/pi', contents="This is the file 'pi'.\nmelon")
+ expected_disk.add({
+ 'A/B/Z' : Item(),
+ 'A/B/Z/zeta' : Item("This is the file 'zeta'.\n"),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak('iota', 'A/B', switched='S')
+ expected_status.tweak('A/B', 'A/B/pi', 'A/B/rho', 'A/B/tau', 'iota',
+ wc_rev=2)
+ expected_status.add({
+ 'A/B/Z' : Item(status=' ', wc_rev=2),
+ 'A/B/Z/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ B_path,
+ iota_path)
+
+
+#----------------------------------------------------------------------
+
+def rev_update_switched_things(sbox):
+ "reverse update switched wc things to an older rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # Commit some stuff (don't bother verifying)
+ commit_routine_switching(wc_dir, 0)
+
+ # Some convenient path variables
+ iota_path = sbox.ospath('iota')
+ B_path = sbox.ospath('A/B')
+
+ # Update to HEAD (tested elsewhere)
+ svntest.main.run_svn(None, 'up', wc_dir)
+
+ # Now, reverse update, back to the pre-commit state.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ 'A/B/pi' : Item(status='U '),
+ 'A/B/Z' : Item(status='D '),
+ })
+
+ # Create expected disk tree
+ expected_disk = get_routine_disk_state(wc_dir)
+ expected_disk.tweak('A/D/gamma', contents="This is the file 'gamma'.\napple")
+ expected_disk.tweak('A/D/G/pi', contents="This is the file 'pi'.\nmelon")
+ expected_disk.add({
+ 'A/D/G/Z' : Item(),
+ 'A/D/G/Z/zeta' : Item("This is the file 'zeta'.\n"),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = get_routine_status_state(wc_dir)
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('iota', 'A/B', switched='S')
+ expected_status.tweak('A/B', 'A/B/pi', 'A/B/rho', 'A/B/tau', 'iota',
+ wc_rev=1)
+ expected_status.add({
+ 'A/D/G/Z' : Item(status=' ', wc_rev=2),
+ 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '1',
+ B_path,
+ iota_path)
+
+
+#----------------------------------------------------------------------
+
+def log_switched_file(sbox):
+ "show logs for a switched file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Setup some switched things (don't bother verifying)
+ do_routine_switching(wc_dir, sbox.repo_url, 0)
+
+ # edit and commit switched file 'iota'
+ iota_path = sbox.ospath('iota')
+ svntest.main.run_svn(None, 'ps', 'x', 'x', iota_path)
+ svntest.main.run_svn(None,
+ 'ci', '-m',
+ 'set prop on switched iota',
+ iota_path)
+
+ # log switched file 'iota'
+ exit_code, output, error = svntest.main.run_svn(None, 'log', iota_path)
+ for line in output:
+ if line.find("set prop on switched iota") != -1:
+ break
+ else:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def delete_subdir(sbox):
+ "switch that deletes a sub-directory"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+ A2_B_F_url = sbox.repo_url + '/A2/B/F'
+
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'cp', '-m', 'make copy', A_url, A2_url)
+
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'], [],
+ 'rm', '-m', 'delete subdir', A2_B_F_url)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/F')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('A', switched='S')
+ expected_status.remove('A/B/F')
+ expected_status.tweak('', 'iota', wc_rev=1)
+
+ # Used to fail with a 'directory not locked' error for A/B/F
+ svntest.actions.run_and_verify_switch(wc_dir, A_path, A2_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+# Issue 1532: Switch a file to a dir: can't switch it back to the file
+@XFail()
+@Issue(1532)
+def file_dir_file(sbox):
+ "switch a file to a dir and back to the file"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ file_path = sbox.ospath('iota')
+ file_url = sbox.repo_url + '/iota'
+ dir_url = sbox.repo_url + '/A/C'
+
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ '--ignore-ancestry', dir_url, file_path)
+ if not os.path.isdir(file_path):
+ raise svntest.Failure
+
+ # The reason the following switch currently fails is that the node
+ # is determined to be a 'root', because it is switched against its parent.
+ # In this specific case the switch editor is designed to be rooted on the node
+ # itself instead of its ancestor. If you would use sbox.ospath('A') for
+ # file_path the switch works both ways.
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ '--ignore-ancestry', file_url, file_path)
+ if not os.path.isfile(file_path):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Issue 1751: "svn switch --non-recursive" does not switch existing files,
+# and generates the wrong URL for new files.
+
+def nonrecursive_switching(sbox):
+ "non-recursive switch"
+ sbox.build()
+ wc1_dir = sbox.wc_dir
+ wc2_dir = os.path.join(wc1_dir, 'wc2')
+
+ # "Trunk" will be the existing dir "A/", with existing file "mu".
+ # "Branch" will be the new dir "branch/version1/", with added file "newfile".
+ # "wc1" will hold the whole repository (including trunk and branch).
+ # "wc2" will hold the "trunk" and then be switched to the "branch".
+ # It is irrelevant that wc2 is located on disk as a sub-directory of wc1.
+ trunk_url = sbox.repo_url + '/A'
+ branch_url = sbox.repo_url + '/branch'
+ version1_url = branch_url + '/version1'
+ wc1_new_file = os.path.join(wc1_dir, 'branch', 'version1', 'newfile')
+ wc2_new_file = os.path.join(wc2_dir, 'newfile')
+ wc2_mu_file = os.path.join(wc2_dir, 'mu')
+ wc2_B_dir = os.path.join(wc2_dir, 'B')
+ wc2_C_dir = os.path.join(wc2_dir, 'C')
+ wc2_D_dir = os.path.join(wc2_dir, 'D')
+
+ # Check out the trunk as "wc2"
+ svntest.main.run_svn(None, 'co', trunk_url, wc2_dir)
+
+ # Make a branch, and add a new file, in "wc_dir" and repository
+ svntest.main.run_svn(None,
+ 'mkdir', '-m', '', branch_url)
+ svntest.main.run_svn(None,
+ 'cp', '-m', '', trunk_url, version1_url)
+ svntest.main.run_svn(None,
+ 'up', wc1_dir)
+ svntest.main.file_append(wc1_new_file, "This is the file 'newfile'.\n")
+ svntest.main.run_svn(None, 'add', wc1_new_file)
+ sbox.simple_commit()
+
+ # Try to switch "wc2" to the branch (non-recursively)
+ svntest.actions.run_and_verify_svn(None, [], 'switch', '-N',
+ '--ignore-ancestry', version1_url, wc2_dir)
+
+ # Check the URLs of the (not switched) directories.
+ expected_infos = [
+ { 'URL' : '.*/A/B$' },
+ { 'URL' : '.*/A/C$' },
+ { 'URL' : '.*/A/D$' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos,
+ wc2_B_dir, wc2_C_dir, wc2_D_dir)
+
+ # Check the URLs of the switched files.
+ # ("svn status -u" might be a better check: it fails when newfile's URL
+ # is bad, and shows "S" when mu's URL is wrong.)
+ # mu: not switched
+ expected_infos = [
+ { 'URL' : '.*/branch/version1/mu$' },
+ { 'URL' : '.*/branch/version1/newfile$' }, # newfile: wrong URL
+ ]
+ svntest.actions.run_and_verify_info(expected_infos,
+ wc2_mu_file, wc2_new_file)
+
+
+#----------------------------------------------------------------------
+def failed_anchor_is_target(sbox):
+ "anchor=target, try to replace a local-mod file"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Set up a switch from dir H, containing locally-modified file 'psi',
+ # to dir G, containing a directory 'psi'. Expect a tree conflict.
+
+ # Make a directory 'G/psi' in the repository.
+ G_url = sbox.repo_url + '/A/D/G'
+ G_psi_url = G_url + '/psi'
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'mkdir', '-m', 'log msg', G_psi_url)
+
+ # Modify the file 'H/psi' locally.
+ H_path = sbox.ospath('A/D/H')
+ psi_path = os.path.join(H_path, 'psi')
+ svntest.main.file_append(psi_path, "more text")
+
+ # This switch raises a tree conflict on 'psi', because of the local mods.
+ svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [],
+ 'switch', '--ignore-ancestry',
+ G_url, H_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/H', switched='S', wc_rev=2)
+ expected_status.tweak('A/D/H/psi', status='R ', copied='+',
+ wc_rev='-', treeconflict='C')
+ expected_status.remove('A/D/H/chi', 'A/D/H/omega')
+ expected_status.add({
+ 'A/D/H/pi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/tau' : Item(status=' ', wc_rev=2),
+ 'A/D/H/rho' : Item(status=' ', wc_rev=2),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # There was a bug whereby the failed switch left the wrong URL in
+ # the target directory H. Check for that.
+ expected_infos = [
+ { 'URL' : '.*' + G_url + '$' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, H_path)
+
+ # Resolve tree conflict at psi.
+ svntest.actions.run_and_verify_resolved([psi_path])
+
+ # The switch should now be complete.
+ ### Instead of "treeconflict=None" which means "don't check", we should
+ # check "treeconflict=' '" but the test suite doesn't do the right thing.
+ expected_status.tweak('A/D/H/psi', treeconflict=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+# Issue #1826 - svn switch temporarily drops invalid URLs into the entries
+# files (which become not-temporary if the switch fails).
+def bad_intermediate_urls(sbox):
+ "bad intermediate urls in use"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A = sbox.ospath('A')
+ A_Z = sbox.ospath('A/Z')
+ url_A_C = url + '/A/C'
+ url_A_C_A = url + '/A/C/A'
+ url_A_C_A_Z = url + '/A/C/A/Z'
+
+ # We'll be switching our working copy to (a modified) A/C in the Greek tree.
+
+ # First, make an extra subdirectory in C to match one in the root, plus
+ # another one inside of that.
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'mkdir', '-m', 'log msg',
+ url_A_C_A, url_A_C_A_Z)
+
+ # Now, we'll drop a conflicting path under the root.
+ svntest.main.file_append(A_Z, 'Look, Mom, a ... tree conflict.')
+
+ #svntest.factory.make(sbox, """
+ # svn switch url/A/C wc_dir
+ # # svn info A
+ # # check that we can recover from the tree conflict
+ # rm A/Z
+ # svn up
+ # """)
+ #exit(0)
+
+ # svn switch url/A/C wc_dir
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='D '),
+ 'A/Z' : Item(status=' ', treeconflict='C'),
+ 'A/C' : Item(status='D '),
+ 'A/B' : Item(status='D '),
+ 'A/D' : Item(status='D '),
+ 'iota' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota', 'A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/F', 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi',
+ 'A/D/gamma', 'A/mu', 'A/C')
+ expected_disk.add({
+ 'A/Z' : Item(contents="Look, Mom, a ... tree conflict."),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('iota', 'A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/F', 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi',
+ 'A/D/G/tau', 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi',
+ 'A/D/gamma', 'A/mu', 'A/C')
+ expected_status.add({
+ # Obstructed node is currently turned into a delete to allow resolving.
+ 'A/Z' : Item(status='D ', treeconflict='C', wc_rev=2),
+ })
+
+ actions.run_and_verify_switch(wc_dir, wc_dir, url_A_C, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '--ignore-ancestry')
+
+ # However, the URL for wc/A should now reflect ^/A/C/A, not something else.
+ expected_infos = [
+ { 'URL' : '.*/A/C/A$' },
+ ]
+ svntest.actions.run_and_verify_info(expected_infos, A)
+
+
+ # check that we can recover from the tree conflict
+ # rm A/Z
+ os.remove(A_Z)
+ svntest.main.run_svn(None, 'revert', A_Z)
+
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_disk.tweak('A/Z', contents=None)
+
+ expected_status.tweak(status=' ', wc_rev='2')
+ expected_status.tweak('A/Z', treeconflict=None)
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #1825: failed switch may corrupt
+# working copy
+@Issue(1825)
+def obstructed_switch(sbox):
+ "obstructed switch"
+ #svntest.factory.make(sbox, """svn cp -m msgcopy url/A/B/E url/A/B/Esave
+ # svn rm A/B/E/alpha
+ # svn commit
+ # echo "hello" >> A/B/E/alpha
+ # svn switch url/A/B/Esave A/B/E
+ # svn status
+ # svn info A/B/E/alpha""")
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A_B_E = sbox.ospath('A/B/E')
+ A_B_E_alpha = sbox.ospath('A/B/E/alpha')
+ url_A_B_E = url + '/A/B/E'
+ url_A_B_Esave = url + '/A/B/Esave'
+
+ # svn cp -m msgcopy url/A/B/E url/A/B/Esave
+ expected_stdout = [
+ 'Committing transaction...\n',
+ 'Committed revision 2.\n',
+ ]
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', '-m',
+ 'msgcopy', url_A_B_E, url_A_B_Esave)
+
+ # svn rm A/B/E/alpha
+ expected_stdout = ['D ' + A_B_E_alpha + '\n']
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'rm',
+ A_B_E_alpha)
+
+ # svn commit
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+
+ actions.run_and_verify_commit(wc_dir, expected_output, expected_status)
+
+ # echo "hello" >> A/B/E/alpha
+ main.file_append(A_B_E_alpha, 'hello')
+
+ # svn switch url/A/B/Esave A/B/E
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha', contents='hello')
+
+ expected_status.add({
+ 'A/B/E/alpha' : Item(status='D ', treeconflict='C', wc_rev=3),
+ })
+ expected_status.tweak('A/B/E', wc_rev='3', switched='S')
+ expected_status.tweak('A/B/E/beta', wc_rev='3')
+
+ actions.run_and_verify_switch(wc_dir, A_B_E, url_A_B_Esave,
+ expected_output, expected_disk,
+ expected_status,
+ [], False, '--ignore-ancestry')
+
+ # svn status
+ expected_status.add({
+ 'A/B/Esave' : Item(status=' '),
+ 'A/B/Esave/beta' : Item(status=' '),
+ 'A/B/Esave/alpha' : Item(status=' '),
+ })
+
+ actions.run_and_verify_unquiet_status(wc_dir, expected_status)
+
+ # svn info A/B/E/alpha
+ expected_stdout = verify.RegexOutput(
+ ".*local file unversioned, incoming file add upon switch",
+ match_all=False)
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'info',
+ A_B_E_alpha)
+
+
+#----------------------------------------------------------------------
+# Issue 2353.
+def commit_mods_below_switch(sbox):
+ "commit with mods below switch"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C_path = sbox.ospath('A/C')
+ B_url = sbox.repo_url + '/A/B'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/E' : Item(status='A '),
+ 'A/C/E/alpha' : Item(status='A '),
+ 'A/C/E/beta' : Item(status='A '),
+ 'A/C/F' : Item(status='A '),
+ 'A/C/lambda' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/E' : Item(),
+ 'A/C/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/C/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/C/F' : Item(),
+ 'A/C/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/C', switched='S')
+ expected_status.add({
+ 'A/C/E' : Item(status=' ', wc_rev=1),
+ 'A/C/E/alpha' : Item(status=' ', wc_rev=1),
+ 'A/C/E/beta' : Item(status=' ', wc_rev=1),
+ 'A/C/F' : Item(status=' ', wc_rev=1),
+ 'A/C/lambda' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_switch(wc_dir, C_path, B_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+ D_path = sbox.ospath('A/D')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'x', 'x', C_path, D_path)
+
+ expected_status.tweak('A/C', 'A/D', status=' M')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C' : Item(verb='Sending'),
+ 'A/D' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/C', 'A/D', status=' ', wc_rev=2)
+
+ # A/C erroneously classified as a wc root caused the commit to fail
+ # with "'A/C/E' is missing or not locked"
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status,
+ [], C_path, D_path)
+
+#----------------------------------------------------------------------
+# Issue 2306.
+def refresh_read_only_attribute(sbox):
+ "refresh the WC file system read-only attribute "
+
+ # This test will fail when run as root. Since that's normal
+ # behavior, just skip the test.
+ if os.name == 'posix':
+ if os.geteuid() == 0:
+ raise svntest.Skip('Test doesn\'t work as uid 0')
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a branch.
+ url = sbox.repo_url + '/A'
+ branch_url = sbox.repo_url + '/A-branch'
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'cp', '-m', 'svn:needs-lock not set',
+ url, branch_url)
+
+ # Set the svn:needs-lock property on a file from the "trunk".
+ A_path = sbox.ospath('A')
+ mu_path = os.path.join(A_path, 'mu')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:needs-lock', '1', mu_path)
+
+ # Commit the propset of svn:needs-lock.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status,
+ [], mu_path)
+
+ # The file on which svn:needs-lock was set is now expected to be read-only.
+ if os.access(mu_path, os.W_OK):
+ raise svntest.Failure("'%s' expected to be read-only after having had "
+ "its svn:needs-lock property set" % mu_path)
+
+ # Switch to the branch with the WC state from before the propset of
+ # svn:needs-lock.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status=' U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('', wc_rev=1)
+ expected_status.tweak('iota', wc_rev=1)
+ expected_status.tweak('A', switched='S')
+ svntest.actions.run_and_verify_switch(wc_dir, A_path, branch_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+ # The file with we set svn:needs-lock on should now be writable, but
+ # is still read-only!
+ if not os.access(mu_path, os.W_OK):
+ raise svntest.Failure("'%s' expected to be writable after being switched "
+ "to a branch on which its svn:needs-lock property "
+ "is not set" % mu_path)
+
+# Check that switch can't change the repository root.
+def switch_change_repos_root(sbox):
+ "switch shouldn't allow changing repos root"
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ other_repo_url = repo_url
+
+ # Strip trailing slashes and add something bogus to that other URL.
+ while other_repo_url[-1] == '/':
+ other_repos_url = other_repo_url[:-1]
+ other_repo_url = other_repo_url + "_bogus"
+
+ other_A_url = other_repo_url + "/A"
+ A_wc_dir = sbox.ospath('A')
+
+ # Test 1: A switch that changes to a non-existing repo shouldn't work.
+ expected_err = ".*Unable to open repository.*|.*Could not open.*|"\
+ ".*Could not find.*|.*No repository found.*"
+ svntest.actions.run_and_verify_svn(None,
+ expected_err,
+ 'switch', '--ignore-ancestry',
+ other_A_url, A_wc_dir)
+
+ # Test 2: A switch that changes the repo root part of the URL shouldn't work.
+ other_repo_dir, other_repo_url = sbox.add_repo_path('other')
+ other_A_url = other_repo_url + "/A"
+
+ svntest.main.create_repos(other_repo_dir)
+ svntest.actions.run_and_verify_svn(None,
+ ".*UUID.*",
+ 'switch', '--ignore-ancestry',
+ other_A_url, A_wc_dir)
+
+ # Make sure we didn't break the WC.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def forced_switch(sbox):
+ "forced switch tolerates obstructions to adds"
+ sbox.build(read_only = True)
+
+ # Dir obstruction
+ G_path = sbox.ospath('A/B/F/G')
+ os.mkdir(G_path)
+
+ # Faux file obstructions
+ shutil.copyfile(sbox.ospath('A/D/gamma'),
+ sbox.ospath('A/B/F/gamma'))
+ shutil.copyfile(sbox.ospath('A/D/G/tau'),
+ sbox.ospath('A/B/F/G/tau'))
+
+ # Real file obstruction
+ pi_path = sbox.ospath('A/B/F/G/pi')
+ svntest.main.file_write(pi_path,
+ "This is the OBSTRUCTING file 'pi'.\n")
+
+ # Non-obstructing dir and file
+ I_path = sbox.ospath('A/B/F/I')
+ os.mkdir(I_path)
+ upsilon_path = os.path.join(G_path, 'upsilon')
+ svntest.main.file_write(upsilon_path,
+ "This is the unversioned file 'upsilon'.\n")
+
+ # Setup expected results of switch.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A/B/F/gamma" : Item(status='E '),
+ "A/B/F/G" : Item(status='E '),
+ "A/B/F/G/pi" : Item(status='E '),
+ "A/B/F/G/rho" : Item(status='A '),
+ "A/B/F/G/tau" : Item(status='E '),
+ "A/B/F/H" : Item(status='A '),
+ "A/B/F/H/chi" : Item(status='A '),
+ "A/B/F/H/omega" : Item(status='A '),
+ "A/B/F/H/psi" : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ "A/B/F/gamma" : Item("This is the file 'gamma'.\n"),
+ "A/B/F/G" : Item(),
+ "A/B/F/G/pi" : Item("This is the OBSTRUCTING file 'pi'.\n"),
+ "A/B/F/G/rho" : Item("This is the file 'rho'.\n"),
+ "A/B/F/G/tau" : Item("This is the file 'tau'.\n"),
+ "A/B/F/G/upsilon" : Item("This is the unversioned file 'upsilon'.\n"),
+ "A/B/F/H" : Item(),
+ "A/B/F/H/chi" : Item("This is the file 'chi'.\n"),
+ "A/B/F/H/omega" : Item("This is the file 'omega'.\n"),
+ "A/B/F/H/psi" : Item("This is the file 'psi'.\n"),
+ "A/B/F/I" : Item(),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/B/F', switched='S')
+ expected_status.add({
+ "A/B/F/gamma" : Item(status=' ', wc_rev=1),
+ "A/B/F/G" : Item(status=' ', wc_rev=1),
+ "A/B/F/G/pi" : Item(status='M ', wc_rev=1),
+ "A/B/F/G/rho" : Item(status=' ', wc_rev=1),
+ "A/B/F/G/tau" : Item(status=' ', wc_rev=1),
+ "A/B/F/H" : Item(status=' ', wc_rev=1),
+ "A/B/F/H/chi" : Item(status=' ', wc_rev=1),
+ "A/B/F/H/omega" : Item(status=' ', wc_rev=1),
+ "A/B/F/H/psi" : Item(status=' ', wc_rev=1),
+ })
+
+ # Do the switch and check the results in three ways.
+ F_path = sbox.ospath('A/B/F')
+ AD_url = sbox.repo_url + '/A/D'
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, F_path, AD_url,
+ expected_output,
+ expected_disk,
+ expected_status, [], False,
+ '--force', '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+def forced_switch_failures(sbox):
+ "forced switch detects tree conflicts"
+ # svntest.factory.make(sbox,
+ # """
+ # # Add a directory to obstruct a file.
+ # mkdir A/B/F/pi
+ #
+ # # Add a file to obstruct a directory.
+ # echo "The file 'H'" > A/C/H
+ #
+ # # Test three cases where forced switch should cause a tree conflict
+ #
+ # # 1) A forced switch that tries to add a file when an unversioned
+ # # directory of the same name already exists. (Currently fails)
+ # svn switch --force url/A/D A/C
+ #
+ # # 2) A forced switch that tries to add a dir when a file of the same
+ # # name already exists. (Tree conflict)
+ # svn switch --force url/A/D/G A/B/F
+ # svn info A/B/F/pi
+ #
+ # # 3) A forced update that tries to add a directory when a versioned
+ # # directory of the same name already exists.
+ #
+ # # Make dir A/D/H/I in repos.
+ # svn mkdir -m "Log message" url/A/D/H/I
+ #
+ # # Make A/D/G/I and co A/D/H/I into it.
+ # mkdir A/D/G/I
+ # svn co url/A/D/H/I A/D/G/I
+ #
+ # # Try the forced switch. A/D/G/I obstructs the dir A/D/G/I coming
+ # # from the repos, causing an error.
+ # svn switch --force url/A/D/H A/D/G
+ #
+ # # Delete all three obstructions and finish the update.
+ # rm -rf A/D/G/I
+ # rm A/B/F/pi
+ # rm A/C/H
+ #
+ # # A/B/F is switched to A/D/G
+ # # A/C is switched to A/D
+ # # A/D/G is switched to A/D/H
+ # svn up
+ # """)
+ # exit(0)
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ url = sbox.repo_url
+
+ A_B_F = sbox.ospath('A/B/F')
+ A_B_F_pi = sbox.ospath('A/B/F/pi')
+ A_C = sbox.ospath('A/C')
+ A_C_H = sbox.ospath('A/C/H')
+ A_D_G = sbox.ospath('A/D/G')
+ A_D_G_I = sbox.ospath('A/D/G/I')
+ url_A_D = url + '/A/D'
+ url_A_D_G = url + '/A/D/G'
+ url_A_D_H = url + '/A/D/H'
+ url_A_D_H_I = url + '/A/D/H/I'
+
+ # Add a directory to obstruct a file.
+ # mkdir A/B/F/pi
+ os.makedirs(A_B_F_pi)
+
+ # Add a file to obstruct a directory.
+ # echo "The file 'H'" > A/C/H
+ main.file_write(A_C_H, "The file 'H'\n")
+
+ # Test three cases where forced switch should cause a tree conflict
+ # 1) A forced switch that tries to add a directory when an unversioned
+ # file of the same name already exists. (Currently fails)
+ # svn switch --force url/A/D A/C
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/G' : Item(status='A '),
+ 'A/C/G/pi' : Item(status='A '),
+ 'A/C/G/rho' : Item(status='A '),
+ 'A/C/G/tau' : Item(status='A '),
+ 'A/C/gamma' : Item(status='A '),
+ 'A/C/H' : Item(status=' ', treeconflict='C'),
+ 'A/C/H/psi' : Item(status=' ', treeconflict='A'),
+ 'A/C/H/omega' : Item(status=' ', treeconflict='A'),
+ 'A/C/H/chi' : Item(status=' ', treeconflict='A'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/gamma' : Item(contents="This is the file 'gamma'.\n"),
+ 'A/C/G' : Item(),
+ 'A/C/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'A/C/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'A/C/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'A/C/H' : Item(contents="The file 'H'\n"),
+ 'A/B/F/pi' : Item(),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/C/G' : Item(status=' ', wc_rev='1'),
+ 'A/C/G/rho' : Item(status=' ', wc_rev='1'),
+ 'A/C/G/tau' : Item(status=' ', wc_rev='1'),
+ 'A/C/G/pi' : Item(status=' ', wc_rev='1'),
+ 'A/C/H' : Item(status='D ', treeconflict='C', wc_rev='1'),
+ 'A/C/H/psi' : Item(status='D ', wc_rev='1'),
+ 'A/C/H/omega' : Item(status='D ', wc_rev='1'),
+ 'A/C/H/chi' : Item(status='D ', wc_rev='1'),
+ 'A/C/gamma' : Item(status=' ', wc_rev='1'),
+ })
+ expected_status.tweak('A/C', switched='S')
+
+ actions.run_and_verify_switch(wc_dir, A_C, url_A_D, expected_output,
+ expected_disk, expected_status, [], False,
+ '--force',
+ '--ignore-ancestry')
+
+
+ # 2) A forced switch that tries to add a file when a dir of the same
+ # name already exists. (Tree conflict)
+ # svn switch --force url/A/D/G A/B/F
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F/rho' : Item(status='A '),
+ 'A/B/F/pi' : Item(status=' ', treeconflict='C'),
+ 'A/B/F/tau' : Item(status='A '),
+ })
+
+ expected_disk.add({
+ 'A/B/F/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'A/B/F/tau' : Item(contents="This is the file 'tau'.\n"),
+ })
+
+ expected_status.add({
+ 'A/B/F/tau' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/pi' : Item(status='D ', treeconflict='C', wc_rev='1'),
+ 'A/B/F/rho' : Item(status=' ', wc_rev='1'),
+ })
+ expected_status.tweak('A/B/F', switched='S')
+
+ actions.run_and_verify_switch(wc_dir, A_B_F, url_A_D_G, expected_output,
+ expected_disk, expected_status, [], False,
+ '--force',
+ '--ignore-ancestry')
+
+ # svn info A/B/F/pi
+ expected_stdout = verify.ExpectedOutput(
+ 'Tree conflict: local dir unversioned, incoming file add upon switch\n',
+ match_all=False)
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'info',
+ A_B_F_pi)
+
+
+ # 3) A forced update that tries to add a directory when a versioned
+ # directory of the same name already exists.
+ # Make dir A/D/H/I in repos.
+ # svn mkdir -m "Log message" url/A/D/H/I
+ expected_stdout = verify.UnorderedOutput([
+ 'Committing transaction...\n',
+ 'Committed revision 2.\n',
+ ])
+
+ actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir',
+ '-m', 'Log message', url_A_D_H_I)
+
+ # Make A/D/G/I and co A/D/H/I into it.
+ # mkdir A/D/G/I
+ os.makedirs(A_D_G_I)
+
+ # svn co url/A/D/H/I A/D/G/I
+ expected_output = svntest.wc.State(wc_dir, {})
+
+ expected_disk.add({
+ 'A/D/G/I' : Item(),
+ })
+
+ exit_code, so, se = svntest.actions.run_and_verify_svn(
+ ['Checked out revision 2.\n'], [],
+ "co", url_A_D_H_I, A_D_G_I)
+
+ # Try the forced switch. A/D/G/I obstructs the dir A/D/G/I coming
+ # from the repos, causing an error.
+ # svn switch --force url/A/D/H A/D/G
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/chi' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/G/omega' : Item(status='A '),
+ 'A/D/G/psi' : Item(status='A '),
+ 'A/D/G/I' : Item(verb='Skipped'),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/pi' : Item(status='D '),
+ })
+
+ actions.run_and_verify_switch(wc_dir, A_D_G, url_A_D_H, expected_output,
+ None, None, [], False,
+ '--force', '--ignore-ancestry')
+
+ # Delete all three obstructions and finish the update.
+ # rm -rf A/D/G/I
+ main.safe_rmtree(A_D_G_I)
+
+ # rm A/B/F/pi
+ main.safe_rmtree(A_B_F_pi)
+
+ # rm A/C/H
+ os.remove(A_C_H)
+
+ # Resolve the tree conflict on A_C_H and A_B_F_pi
+ svntest.main.run_svn(None, 'resolved', A_C_H)
+ svntest.main.run_svn(None, 'revert', A_B_F_pi)
+
+ # A/B/F is switched to A/D/G
+ # A/C is switched to A/D
+ # A/D/G is switched to A/D/H
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/C/H/I' : Item(status='A '),
+ 'A/D/G/I' : Item(status='A '),
+ 'A/D/H/I' : Item(status='A '),
+ })
+
+ expected_disk.remove('A/D/G/tau', 'A/D/G/rho', 'A/D/G/pi')
+ expected_disk.add({
+ 'A/D/H/I' : Item(),
+ 'A/D/G/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A/D/G/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A/D/G/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A/C/H/I' : Item(),
+ 'A/C/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A/C/H/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A/C/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ })
+ expected_disk.tweak('A/C/H', contents=None)
+ expected_disk.tweak('A/B/F/pi', contents="This is the file 'pi'.\n")
+
+ expected_status.remove('A/D/G/tau', 'A/D/G/rho', 'A/D/G/pi')
+ expected_status.add({
+ 'A/D/G/omega' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/I' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/psi' : Item(status=' ', wc_rev='2'),
+ 'A/D/G/chi' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/I' : Item(status=' ', wc_rev='2'),
+ 'A/C/H/psi' : Item(status=' ', wc_rev='2'),
+ 'A/C/H/omega' : Item(status=' ', wc_rev='2'),
+ 'A/C/H/chi' : Item(status=' ', wc_rev='2'),
+ 'A/C/H/I' : Item(status=' ', wc_rev='2'),
+ })
+ expected_status.tweak(wc_rev='2', status=' ')
+ expected_status.tweak('A/B/F/pi', 'A/C/H', treeconflict=None)
+ expected_status.tweak('A/D/G', switched='S')
+
+ svntest.main.run_svn(None, 'revert', '-R', sbox.ospath('A/C/H'))
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+
+def switch_with_obstructing_local_adds(sbox):
+ "switch tolerates WC adds"
+ sbox.build(read_only = True)
+
+ # Dir obstruction scheduled for addition without history.
+ G_path = sbox.ospath('A/B/F/G')
+ os.mkdir(G_path)
+
+ # File obstructions scheduled for addition without history.
+ # Contents identical to additions from switch.
+ gamma_copy_path = sbox.ospath('A/B/F/gamma')
+ shutil.copyfile(sbox.ospath('A/D/gamma'),
+ gamma_copy_path)
+ shutil.copyfile(sbox.ospath('A/D/G/tau'),
+ sbox.ospath('A/B/F/G/tau'))
+
+ # File obstruction scheduled for addition without history.
+ # Contents conflict with addition from switch.
+ pi_path = sbox.ospath('A/B/F/G/pi')
+ svntest.main.file_write(pi_path,
+ "This is the OBSTRUCTING file 'pi'.\n")
+
+ # Non-obstructing dir and file scheduled for addition without history.
+ I_path = sbox.ospath('A/B/F/I')
+ os.mkdir(I_path)
+ upsilon_path = os.path.join(G_path, 'upsilon')
+ svntest.main.file_write(upsilon_path,
+ "This is the unversioned file 'upsilon'.\n")
+
+ # Add the above obstructions.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', G_path, I_path,
+ gamma_copy_path)
+
+ # Setup expected results of switch.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A/B/F/gamma" : Item(status=' ', treeconflict='C'),
+ "A/B/F/G" : Item(status=' ', treeconflict='C'),
+ 'A/B/F/G/tau' : Item(status=' ', treeconflict='A'),
+ 'A/B/F/G/rho' : Item(status=' ', treeconflict='A'),
+ 'A/B/F/G/pi' : Item(status=' ', treeconflict='A'),
+ "A/B/F/H" : Item(status='A '),
+ "A/B/F/H/chi" : Item(status='A '),
+ "A/B/F/H/omega" : Item(status='A '),
+ "A/B/F/H/psi" : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ "A/B/F/gamma" : Item("This is the file 'gamma'.\n"),
+ "A/B/F/G" : Item(),
+ "A/B/F/G/pi" : Item("This is the OBSTRUCTING file 'pi'.\n"),
+ "A/B/F/G/tau" : Item("This is the file 'tau'.\n"),
+ "A/B/F/G/upsilon" : Item("This is the unversioned file 'upsilon'.\n"),
+ "A/B/F/H" : Item(),
+ "A/B/F/H/chi" : Item("This is the file 'chi'.\n"),
+ "A/B/F/H/omega" : Item("This is the file 'omega'.\n"),
+ "A/B/F/H/psi" : Item("This is the file 'psi'.\n"),
+ "A/B/F/I" : Item(),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/B/F', switched='S')
+ expected_status.add({
+ 'A/B/F/gamma' : Item(status='R ', treeconflict='C', wc_rev='1'),
+ 'A/B/F/G' : Item(status='R ', treeconflict='C', wc_rev='1'),
+ 'A/B/F/G/pi' : Item(status='A ', wc_rev='-', entry_status='R ', entry_rev='1'),
+ 'A/B/F/G/tau' : Item(status='A ', wc_rev='-', entry_status='R ', entry_rev='1'),
+ 'A/B/F/G/upsilon' : Item(status='A ', wc_rev='-', entry_rev='0'),
+ 'A/B/F/G/rho' : Item(status='D ', wc_rev='1'),
+ 'A/B/F/H' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/H/chi' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/H/omega' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/H/psi' : Item(status=' ', wc_rev='1'),
+ 'A/B/F/I' : Item(status='A ', wc_rev='-', entry_rev='0'),
+ })
+
+ # Do the switch and check the results in three ways.
+ F_path = sbox.ospath('A/B/F')
+ D_url = sbox.repo_url + '/A/D'
+
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, F_path, D_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+
+def switch_scheduled_add(sbox):
+ "switch a scheduled-add file"
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ file_path = sbox.ospath('stub_file')
+ switch_url = sbox.repo_url + '/iota'
+ nodo_path = sbox.ospath('nodo')
+
+ svntest.main.file_append(file_path, "")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'add', file_path)
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E200007: Cannot switch '.*file' " +
+ "because it is not in the repository yet",
+ 'switch', '--ignore-ancestry',
+ switch_url, file_path)
+
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E155010: The node '.*nodo' was not",
+ 'switch', '--ignore-ancestry',
+ switch_url, nodo_path)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_switch_elision(sbox):
+ "mergeinfo does not elide post switch"
+
+ # When a switch adds mergeinfo on a path which is identical to
+ # the mergeinfo on one of the path's subtrees, the subtree's mergeinfo
+ # should *not* elide! If it did this could result in the switch of a
+ # pristine tree producing local mods.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ lambda_path = sbox.ospath('A/B_COPY_1/lambda')
+ B_COPY_1_path = sbox.ospath('A/B_COPY_1')
+ B_COPY_2_path = sbox.ospath('A/B_COPY_2')
+ E_COPY_2_path = sbox.ospath('A/B_COPY_2/E')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+
+ # Make branches A/B_COPY_1 and A/B_COPY_2
+ expected_stdout = verify.UnorderedOutput([
+ "A " + sbox.ospath('A/B_COPY_1/lambda') + "\n",
+ "A " + sbox.ospath('A/B_COPY_1/E') + "\n",
+ "A " + sbox.ospath('A/B_COPY_1/E/alpha') + "\n",
+ "A " + sbox.ospath('A/B_COPY_1/E/beta') + "\n",
+ "A " + sbox.ospath('A/B_COPY_1/F') + "\n",
+ "Checked out revision 1.\n",
+ "A " + B_COPY_1_path + "\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy',
+ sbox.repo_url + "/A/B", B_COPY_1_path)
+
+ expected_stdout = verify.UnorderedOutput([
+ "A " + sbox.ospath('A/B_COPY_2/lambda') + "\n",
+ "A " + sbox.ospath('A/B_COPY_2/E') + "\n",
+ "A " + sbox.ospath('A/B_COPY_2/E/alpha') + "\n",
+ "A " + sbox.ospath('A/B_COPY_2/E/beta') + "\n",
+ "A " + sbox.ospath('A/B_COPY_2/F') + "\n",
+ "Checked out revision 1.\n",
+ "A " + B_COPY_2_path + "\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy',
+ sbox.repo_url + "/A/B", B_COPY_2_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B_COPY_1' : Item(verb='Adding'),
+ 'A/B_COPY_2' : Item(verb='Adding')
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "A/B_COPY_1" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/lambda" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/E" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/E/alpha" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/E/beta" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/F" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2/lambda" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2/E" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2/E/alpha" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2/E/beta" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_2/F" : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make some changes under A/B
+
+ # r3 - modify and commit A/B/E/beta
+ svntest.main.file_write(beta_path, "New content")
+ expected_output = svntest.wc.State(wc_dir,
+ {'A/B/E/beta' : Item(verb='Sending')})
+ expected_status.tweak('A/B/E/beta', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r4 - modify and commit A/B/E/alpha
+ svntest.main.file_write(alpha_path, "New content")
+ expected_output = svntest.wc.State(wc_dir,
+ {'A/B/E/alpha' : Item(verb='Sending')})
+ expected_status.tweak('A/B/E/alpha', wc_rev=4)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge r2:4 into A/B_COPY_1
+ expected_output = svntest.wc.State(B_COPY_1_path, {
+ 'E/alpha' : Item(status='U '),
+ 'E/beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = svntest.wc.State(B_COPY_1_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = svntest.wc.State(B_COPY_1_path, {
+ })
+ expected_merge_status = svntest.wc.State(B_COPY_1_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status=' ', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status='M ', wc_rev=2),
+ 'E/beta' : Item(status='M ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ })
+ expected_merge_disk = svntest.wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3-4'}),
+ 'lambda' : Item("This is the file 'lambda'.\n"),
+ 'E' : Item(),
+ 'E/alpha' : Item("New content"),
+ 'E/beta' : Item("New content"),
+ 'F' : Item(),
+ })
+ expected_skip = svntest.wc.State(B_COPY_1_path, { })
+ svntest.actions.run_and_verify_merge(B_COPY_1_path, '2', '4',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_merge_disk,
+ expected_merge_status,
+ expected_skip,
+ check_props=True)
+
+ # r5 - Commit the merge into A/B_COPY_1/E
+ expected_output = svntest.wc.State(
+ wc_dir,
+ {'A/B_COPY_1' : Item(verb='Sending'),
+ 'A/B_COPY_1/E/alpha' : Item(verb='Sending'),
+ 'A/B_COPY_1/E/beta' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/B_COPY_1', wc_rev=5)
+ expected_status.tweak('A/B_COPY_1/E/alpha', wc_rev=5)
+ expected_status.tweak('A/B_COPY_1/E/beta', wc_rev=5)
+ expected_status.tweak('A/B_COPY_1/lambda', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge r2:4 into A/B_COPY_2/E
+ expected_output = svntest.wc.State(E_COPY_2_path, {
+ 'alpha' : Item(status='U '),
+ 'beta' : Item(status='U '),
+ })
+ expected_mergeinfo_output = svntest.wc.State(E_COPY_2_path, {
+ '' : Item(status=' U'),
+ })
+ expected_elision_output = svntest.wc.State(E_COPY_2_path, {
+ })
+ expected_merge_status = svntest.wc.State(E_COPY_2_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'alpha' : Item(status='M ', wc_rev=2),
+ 'beta' : Item(status='M ', wc_rev=2),
+ })
+ expected_merge_disk = svntest.wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:3-4'}),
+ 'alpha' : Item("New content"),
+ 'beta' : Item("New content"),
+ })
+ expected_skip = svntest.wc.State(E_COPY_2_path, { })
+ svntest.actions.run_and_verify_merge(E_COPY_2_path, '2', '4',
+ sbox.repo_url + '/A/B/E', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_merge_disk,
+ expected_merge_status,
+ expected_skip,
+ check_props=True)
+
+ # Switch A/B_COPY_2 to URL of A/B_COPY_1. The local mergeinfo for r1,3-4
+ # on A/B_COPY_2/E is identical to the mergeinfo added to A/B_COPY_2 as a
+ # result of the switch, but we leave the former in place.
+
+ # Setup expected results of switch.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A/B_COPY_2" : Item(status=' U'),
+ "A/B_COPY_2/E/alpha" : Item(status='G '),
+ "A/B_COPY_2/E/beta" : Item(status='G '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak("A/B/E/alpha", contents="New content")
+ expected_disk.tweak("A/B/E/beta", contents="New content")
+ expected_disk.add({
+ "A/B_COPY_1" : Item(props={SVN_PROP_MERGEINFO : '/A/B:3-4'}),
+ "A/B_COPY_1/E" : Item(),
+ "A/B_COPY_1/F" : Item(),
+ "A/B_COPY_1/lambda" : Item("This is the file 'lambda'.\n"),
+ "A/B_COPY_1/E/alpha" : Item("New content"),
+ "A/B_COPY_1/E/beta" : Item("New content"),
+ "A/B_COPY_2" : Item(props={SVN_PROP_MERGEINFO : '/A/B:3-4'}),
+ "A/B_COPY_2/E" : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:3-4'}),
+ "A/B_COPY_2/F" : Item(),
+ "A/B_COPY_2/lambda" : Item("This is the file 'lambda'.\n"),
+ "A/B_COPY_2/E/alpha" : Item("New content"),
+ "A/B_COPY_2/E/beta" : Item("New content"),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak("A/B/E/beta", wc_rev=3)
+ expected_status.tweak("A/B/E/alpha", wc_rev=4)
+ expected_status.add({
+ "A/B_COPY_1" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_1/E" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/F" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/lambda" : Item(status=' ', wc_rev=2),
+ "A/B_COPY_1/E/alpha" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_1/E/beta" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_2" : Item(status=' ', wc_rev=5, switched='S'),
+ "A/B_COPY_2/E" : Item(status=' M', wc_rev=5),
+ "A/B_COPY_2/F" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_2/lambda" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_2/E/alpha" : Item(status=' ', wc_rev=5),
+ "A/B_COPY_2/E/beta" : Item(status=' ', wc_rev=5),
+ })
+
+ svntest.actions.run_and_verify_switch(sbox.wc_dir,
+ B_COPY_2_path,
+ sbox.repo_url + "/A/B_COPY_1",
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--ignore-ancestry')
+
+ # Now check a switch which reverses and earlier switch and leaves
+ # a path in an unswitched state.
+ #
+ # Switch A/B_COPY_1/lambda to iota. Use propset to give A/B_COPY/lambda
+ # the mergeinfo '/A/B/lambda:1,3-4'. Then switch A/B_COPY_1/lambda back
+ # to A/B_COPY_1/lambda. The local mergeinfo for r1,3-4 should remain on
+ # A/B_COPY_1/lambda.
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A/B_COPY_1/lambda" : Item(status='U '),
+ })
+ expected_disk.tweak("A/B_COPY_1/lambda",
+ contents="This is the file 'iota'.\n")
+ expected_status.tweak("A/B_COPY_1/lambda", wc_rev=5, switched='S')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir,
+ lambda_path,
+ sbox.repo_url + "/iota",
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--ignore-ancestry')
+
+ svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO +
+ "' set on '" + lambda_path + "'" +
+ "\n"], [], 'ps', SVN_PROP_MERGEINFO,
+ '/A/B/lambda:3-4', lambda_path)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ "A/B_COPY_1/lambda" : Item(status='U '),
+ })
+ expected_disk.tweak("A/B_COPY_1/lambda",
+ contents="This is the file 'lambda'.\n",
+ props={SVN_PROP_MERGEINFO : '/A/B/lambda:3-4'})
+ expected_status.tweak("A/B_COPY_1/lambda", switched=None, status=' M')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir,
+ lambda_path,
+ sbox.repo_url + "/A/B_COPY_1/lambda",
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+
+def switch_with_depth(sbox):
+ "basic tests to verify switch along with depth"
+
+ sbox.build(read_only = True)
+
+ # Form some paths and URLs required
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+ AD_url = repo_url + '/A/D'
+ AB_url = repo_url + '/A/B'
+ AB_path = sbox.ospath('A/B')
+
+ # Set up expected results of 'switch --depth=empty'
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B', switched='S')
+ expected_status.tweak('A/B/lambda', switched='S')
+ expected_status.tweak('A/B/E', switched='S')
+ expected_status.tweak('A/B/F', switched='S')
+
+ # Do 'switch --depth=empty' and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'empty', '--ignore-ancestry')
+
+ # Set up expected results for reverting 'switch --depth=empty'
+ expected_output = svntest.wc.State(wc_dir, {})
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'empty', '--ignore-ancestry')
+
+ # Set up expected results of 'switch --depth=files'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(status='D '),
+ 'A/B/gamma' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/lambda')
+ expected_disk.add({
+ 'A/B/gamma' : Item("This is the file 'gamma'.\n")
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/lambda')
+ expected_status.add({
+ 'A/B/gamma' : Item(status=' ', wc_rev=1)
+ })
+ expected_status.tweak('A/B', switched='S')
+ expected_status.tweak('A/B/E', switched='S')
+ expected_status.tweak('A/B/F', switched='S')
+
+ # Do 'switch --depth=files' and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'files', '--ignore-ancestry')
+
+ # Set up expected results for reverting 'switch --depth=files'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/gamma' : Item(status='D '),
+ 'A/B/lambda' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'files', '--ignore-ancestry')
+
+ # Putting the depth=immediates stuff in a subroutine, because we're
+ # going to run it at least twice.
+ def sw_depth_imm():
+ # Set up expected results of 'switch --depth=immediates'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(status='D '),
+ 'A/B/E' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ 'A/B/gamma' : Item(status='A '),
+ 'A/B/G' : Item(status='A '),
+ 'A/B/H' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/lambda', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/E', 'A/B/F')
+ expected_disk.add({
+ 'A/B/gamma' : Item("This is the file 'gamma'.\n"),
+ 'A/B/G' : Item(),
+ 'A/B/H' : Item(),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/lambda', 'A/B/E/beta', 'A/B/E/alpha',
+ 'A/B/E', 'A/B/F')
+ expected_status.add({
+ 'A/B/gamma' : Item(status=' ', wc_rev=1),
+ 'A/B/G' : Item(status=' ', wc_rev=1),
+ 'A/B/H' : Item(status=' ', wc_rev=1)
+ })
+ expected_status.tweak('A/B', switched='S')
+
+ # Do 'switch --depth=immediates' and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'immediates',
+ '--ignore-ancestry')
+
+ sw_depth_imm()
+
+ # Set up expected results for reverting 'switch --depth=immediates'.
+ # (Reverting with default [infinite] depth, so that the result is a
+ # standard Greek Tree working copy again.)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/gamma' : Item(status='D '),
+ 'A/B/G' : Item(status='D '),
+ 'A/B/H' : Item(status='D '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--ignore-ancestry')
+
+ # Okay, repeat 'switch --depth=immediates'. (Afterwards we'll
+ # 'switch --depth=infinity', to test going all the way.)
+ sw_depth_imm()
+
+ # Set up expected results of 'switch --depth=infinity'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/gamma' : Item(status='D '),
+ 'A/B/G' : Item(status='D '),
+ 'A/B/H' : Item(status='D '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the 'switch --depth=infinity' and check the results in three ways.
+ svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '--depth', 'infinity',
+ '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+
+def switch_to_dir_with_peg_rev(sbox):
+ "switch to dir@peg where dir doesn't exist in HEAD"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # prepare two dirs X and Y in rev. 2
+ X_path = sbox.ospath('X')
+ Y_path = sbox.ospath('Y')
+ svntest.main.run_svn(None, 'mkdir', X_path, Y_path)
+ sbox.simple_commit(message='log message')
+
+ # change tau in rev. 3
+ ADG_path = sbox.ospath('A/D/G')
+ tau_path = os.path.join(ADG_path, 'tau')
+ svntest.main.file_append(tau_path, "new line\n")
+ sbox.simple_commit(message='log message')
+
+ # delete A/D/G in rev. 4
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.run_svn(None, 'rm', ADG_path)
+ sbox.simple_commit(message='log message')
+
+ # Test 1: switch X to A/D/G@2
+ ADG_url = repo_url + '/A/D/G'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/pi' : Item(status='A '),
+ 'X/rho' : Item(status='A '),
+ 'X/tau' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/pi' : Item("This is the file 'pi'.\n"),
+ 'X/rho' : Item("This is the file 'rho'.\n"),
+ 'X/tau' : Item("This is the file 'tau'.\n"),
+ 'Y' : Item(),
+ })
+ expected_disk.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2, switched='S'),
+ 'X/pi' : Item(status=' ', wc_rev=2),
+ 'X/rho' : Item(status=' ', wc_rev=2),
+ 'X/tau' : Item(status=' ', wc_rev=2),
+ 'Y' : Item(status=' ', wc_rev=3)
+ })
+
+ # Do the switch to rev. 2 of /A/D/G@3.
+ svntest.actions.run_and_verify_switch(wc_dir, X_path, ADG_url + '@3',
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r', '2', '--ignore-ancestry')
+
+def switch_urls_with_spaces(sbox):
+ "switch file and dir to url containing spaces"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # add file and directory with spaces in their names.
+ XYZ_path = sbox.ospath('X Y Z')
+ ABC_path = sbox.ospath('A B C')
+ svntest.main.run_svn(None, 'mkdir', XYZ_path, ABC_path)
+
+ tpm_path = sbox.ospath('tau pau mau')
+ bbb_path = sbox.ospath('bar baz bal')
+ svntest.main.file_write(tpm_path, "This is the file 'tau pau mau'.\n")
+ svntest.main.file_write(bbb_path, "This is the file 'bar baz bal'.\n")
+ svntest.main.run_svn(None, 'add', tpm_path, bbb_path)
+
+ sbox.simple_commit(message='log message')
+
+ # Test 1: switch directory 'A B C' to url 'X Y Z'
+ XYZ_url = repo_url + '/X Y Z'
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X Y Z' : Item(),
+ 'A B C' : Item(),
+ 'tau pau mau' : Item("This is the file 'tau pau mau'.\n"),
+ 'bar baz bal' : Item("This is the file 'bar baz bal'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X Y Z' : Item(status=' ', wc_rev=2),
+ 'A B C' : Item(status=' ', wc_rev=2, switched='S'),
+ 'tau pau mau' : Item(status=' ', wc_rev=2),
+ 'bar baz bal' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_switch(wc_dir, ABC_path, XYZ_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+ # Test 2: switch file 'bar baz bal' to 'tau pau mau'
+ tpm_url = repo_url + '/tau pau mau'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'bar baz bal' : Item(status='U '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X Y Z' : Item(),
+ 'A B C' : Item(),
+ 'tau pau mau' : Item("This is the file 'tau pau mau'.\n"),
+ 'bar baz bal' : Item("This is the file 'tau pau mau'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'X Y Z' : Item(status=' ', wc_rev=2),
+ 'A B C' : Item(status=' ', wc_rev=2, switched='S'),
+ 'tau pau mau' : Item(status=' ', wc_rev=2),
+ 'bar baz bal' : Item(status=' ', wc_rev=2, switched='S'),
+ })
+
+ svntest.actions.run_and_verify_switch(wc_dir, bbb_path, tpm_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+def switch_to_dir_with_peg_rev2(sbox):
+ "switch to old rev of now renamed branch"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # prepare dir X in rev. 2
+ X_path = sbox.ospath('X')
+ svntest.main.run_svn(None, 'mkdir', X_path)
+ sbox.simple_commit(message='log message')
+
+ # make a change in ADG in rev. 3
+ tau_path = sbox.ospath('A/D/G/tau')
+ svntest.main.file_append(tau_path, "extra line\n")
+ sbox.simple_commit(message='log message')
+
+ # Rename ADG to ADY in rev 4
+ svntest.main.run_svn(None, 'up', wc_dir)
+ ADG_path = sbox.ospath('A/D/G')
+ ADY_path = sbox.ospath('A/D/Y')
+ svntest.main.run_svn(None, 'mv', ADG_path, ADY_path)
+ sbox.simple_commit(message='log message')
+
+ # Test switch X to rev 2 of A/D/Y@HEAD
+ ADY_url = sbox.repo_url + '/A/D/Y'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'X/pi' : Item(status='A '),
+ 'X/rho' : Item(status='A '),
+ 'X/tau' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'X' : Item(),
+ 'X/pi' : Item("This is the file 'pi'.\n"),
+ 'X/rho' : Item("This is the file 'rho'.\n"),
+ 'X/tau' : Item("This is the file 'tau'.\n"),
+ 'A/D/Y' : Item(),
+ 'A/D/Y/pi' : Item("This is the file 'pi'.\n"),
+ 'A/D/Y/rho' : Item("This is the file 'rho'.\n"),
+ 'A/D/Y/tau' : Item("This is the file 'tau'.\nextra line\n"),
+ })
+ expected_disk.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status.add({
+ 'X' : Item(status=' ', wc_rev=2, switched='S'),
+ 'X/pi' : Item(status=' ', wc_rev=2),
+ 'X/rho' : Item(status=' ', wc_rev=2),
+ 'X/tau' : Item(status=' ', wc_rev=2),
+ 'A/D/Y' : Item(status=' ', wc_rev=4),
+ 'A/D/Y/pi' : Item(status=' ', wc_rev=4),
+ 'A/D/Y/rho' : Item(status=' ', wc_rev=4),
+ 'A/D/Y/tau' : Item(status=' ', wc_rev=4),
+ })
+
+ svntest.actions.run_and_verify_switch(wc_dir, X_path, ADY_url + '@HEAD',
+ expected_output,
+ expected_disk,
+ expected_status, [], False,
+ '-r', '2', '--ignore-ancestry')
+
+def switch_to_root(sbox):
+ "switch a folder to the root of its repository"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ ADG_path = sbox.ospath('A/D/G')
+
+ # Test switch /A/D/G to /
+ AD_url = sbox.repo_url + '/A/D'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/G/A' : Item(status='A '),
+ 'A/D/G/A/B' : Item(status='A '),
+ 'A/D/G/A/B/lambda' : Item(status='A '),
+ 'A/D/G/A/B/E' : Item(status='A '),
+ 'A/D/G/A/B/E/alpha' : Item(status='A '),
+ 'A/D/G/A/B/E/beta' : Item(status='A '),
+ 'A/D/G/A/B/F' : Item(status='A '),
+ 'A/D/G/A/mu' : Item(status='A '),
+ 'A/D/G/A/C' : Item(status='A '),
+ 'A/D/G/A/D' : Item(status='A '),
+ 'A/D/G/A/D/gamma' : Item(status='A '),
+ 'A/D/G/A/D/G' : Item(status='A '),
+ 'A/D/G/A/D/G/pi' : Item(status='A '),
+ 'A/D/G/A/D/G/rho' : Item(status='A '),
+ 'A/D/G/A/D/G/tau' : Item(status='A '),
+ 'A/D/G/A/D/H' : Item(status='A '),
+ 'A/D/G/A/D/H/chi' : Item(status='A '),
+ 'A/D/G/A/D/H/omega' : Item(status='A '),
+ 'A/D/G/A/D/H/psi' : Item(status='A '),
+ 'A/D/G/iota' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_disk.add_state('A/D/G', svntest.main.greek_state.copy())
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status.add_state('A/D/G',
+ svntest.actions.get_virginal_state(wc_dir + '/A/D/G', 1))
+ expected_status.tweak('A/D/G', switched = 'S')
+ svntest.actions.run_and_verify_switch(wc_dir, ADG_path, sbox.repo_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+# Make sure that switch continue after deleting locally modified
+# directories, as update and merge do.
+
+@Issue(2505)
+def tolerate_local_mods(sbox):
+ "tolerate deletion of a directory with local mods"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ L_path = os.path.join(A_path, 'L')
+ LM_path = os.path.join(L_path, 'local_mod')
+ A_url = sbox.repo_url + '/A'
+ A2_url = sbox.repo_url + '/A2'
+
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'cp', '-m', 'make copy', A_url, A2_url)
+
+ os.mkdir(L_path)
+ svntest.main.run_svn(None, 'add', L_path)
+ sbox.simple_commit(message='Commit added folder')
+
+ # locally modified unversioned file
+ svntest.main.file_write(LM_path, 'Locally modified file.\n', 'w+')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/L' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/L' : Item(),
+ 'A/L/local_mod' : Item(contents='Locally modified file.\n'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('', 'iota', wc_rev=1)
+ expected_status.tweak('A', switched='S')
+ expected_status.add({
+ 'A/L' : Item(status='A ', copied='+', treeconflict='C', wc_rev='-')
+ })
+
+ # Used to fail with locally modified or unversioned files
+ svntest.actions.run_and_verify_switch(wc_dir, A_path, A2_url,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [],
+ False, '--ignore-ancestry')
+
+#----------------------------------------------------------------------
+
+# Detect tree conflicts among files and directories,
+# edited or deleted in a deep directory structure.
+#
+# See use cases 1-3 in notes/tree-conflicts/use-cases.txt for background.
+# Note that we do not try to track renames. The only difference from
+# the behavior of Subversion 1.4 and 1.5 is the conflicted status of the
+# parent directory.
+
+# convenience definitions
+leaf_edit = svntest.deeptrees.deep_trees_leaf_edit
+tree_del = svntest.deeptrees.deep_trees_tree_del
+leaf_del = svntest.deeptrees.deep_trees_leaf_del
+
+disk_after_leaf_edit = svntest.deeptrees.deep_trees_after_leaf_edit
+disk_after_leaf_del = svntest.deeptrees.deep_trees_after_leaf_del
+disk_after_tree_del = svntest.deeptrees.deep_trees_after_tree_del
+
+deep_trees_conflict_output = svntest.deeptrees.deep_trees_conflict_output
+deep_trees_conflict_output_skipped = \
+ svntest.deeptrees.deep_trees_conflict_output_skipped
+deep_trees_status_local_tree_del = \
+ svntest.deeptrees.deep_trees_status_local_tree_del
+deep_trees_status_local_leaf_edit = \
+ svntest.deeptrees.deep_trees_status_local_leaf_edit
+
+DeepTreesTestCase = svntest.deeptrees.DeepTreesTestCase
+
+j = os.path.join
+
+
+def tree_conflicts_on_switch_1_1(sbox):
+ "tree conflicts 1.1: tree del, leaf edit on switch"
+
+ sbox.build()
+
+ # use case 1, as in notes/tree-conflicts/use-cases.txt
+ # 1.1) local tree delete, incoming leaf edit
+
+ expected_output = deep_trees_conflict_output.copy()
+ expected_output.add({
+ 'DDD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3/zeta' : Item(status=' ', treeconflict='A'),
+ 'DD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DD/D1/D2/epsilon' : Item(status=' ', treeconflict='A'),
+ 'DF/D1/beta' : Item(status=' ', treeconflict='U'),
+ 'D/D1/delta' : Item(status=' ', treeconflict='A'),
+ 'DDF/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='U')
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+
+ # The files delta, epsilon, and zeta are incoming additions, but since
+ # they are all within locally deleted trees they should also be schedule
+ # for deletion.
+ expected_status = deep_trees_status_local_tree_del.copy()
+ expected_status.add({
+ 'D/D1/delta' : Item(status='D '),
+ 'DD/D1/D2/epsilon' : Item(status='D '),
+ 'DDD/D1/D2/D3/zeta' : Item(status='D '),
+ })
+ expected_status.tweak('', switched='S')
+
+ # Update to the target rev.
+ expected_status.tweak(wc_rev=3)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file edit upon switch'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .file.*/F/alpha@3$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .dir.*/DF/D1@3$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .dir.*/DDF/D1@3$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .dir.*/D/D1@3$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .dir.*/DD/D1@3$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .dir.*/DDD/D1@3$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_switch(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_edit",
+ tree_del,
+ leaf_edit,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+@Issue(3334)
+def tree_conflicts_on_switch_1_2(sbox):
+ "tree conflicts 1.2: tree del, leaf del on switch"
+
+ sbox.build()
+
+ # 1.2) local tree delete, incoming leaf delete
+
+ expected_output = deep_trees_conflict_output.copy()
+ expected_output.add({
+ 'DD/D1/D2' : Item(status=' ', treeconflict='D'),
+ 'DDF/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='D'),
+ 'DDD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='D'),
+ 'DF/D1/beta' : Item(status=' ', treeconflict='D'),
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+
+ expected_status = deep_trees_status_local_tree_del.copy()
+
+ # Expect the incoming leaf deletes to actually occur. Even though they
+ # are within (or in the case of F/alpha and D/D1 are the same as) the
+ # trees locally scheduled for deletion we must still delete them and
+ # update the scheduled for deletion items to the target rev. Otherwise
+ # once the conflicts are resolved we still have a mixed-rev WC we can't
+ # commit without updating...which, you guessed it, raises tree conflicts
+ # again, repeat ad infinitum - see issue #3334.
+ #
+ # Update to the target rev.
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('F/alpha',
+ 'D/D1',
+ status='! ', wc_rev=None)
+ expected_status.tweak('', switched='S')
+ # Remove the incoming deletes from status and disk.
+ expected_status.remove('DD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DDF/D1/D2/gamma',
+ 'DF/D1/beta')
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon switch'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .dir.*/DF/D1@3$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .dir.*/DDF/D1@3$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .dir.*/DD/D1@3$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon switch'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .dir.*/DDD/D1@3$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_switch(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_del",
+ tree_del,
+ leaf_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+@Issue(3334)
+def tree_conflicts_on_switch_2_1(sbox):
+ "tree conflicts 2.1: leaf edit, tree del on switch"
+
+ # use case 2, as in notes/tree-conflicts/use-cases.txt
+ # 2.1) local leaf edit, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_edit.copy()
+
+ expected_status = deep_trees_status_local_leaf_edit.copy()
+
+ # The expectation on 'alpha' reflects partial progress on issue #3334.
+ expected_status.tweak('D/D1',
+ 'F/alpha',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ status='A ', copied='+', wc_rev='-')
+ # See the status of all the paths *under* the above six subtrees. Only the
+ # roots of the added subtrees show as schedule 'A', these childs paths show
+ # only that history is scheduled with the commit.
+ expected_status.tweak(
+ 'DD/D1/D2',
+ 'DDD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DF/D1/beta',
+ 'DDF/D1/D2',
+ 'DDF/D1/D2/gamma',
+ copied='+', wc_rev='-')
+ expected_status.tweak('', switched='S')
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file edit, incoming file delete or move upon switch'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ ### D/D1/delta is locally-added during leaf_edit. when tree_del executes,
+ ### it will delete D/D1, and the switch reschedules local D/D1 for
+ ### local-copy from its original revision. however, right now, we cannot
+ ### denote that delta is a local-add rather than a child of that D/D1 copy.
+ ### thus, it appears in the status output as a (M)odified child.
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_switch(sbox,
+ [ DeepTreesTestCase("local_leaf_edit_incoming_tree_del",
+ leaf_edit,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+def tree_conflicts_on_switch_2_2(sbox):
+ "tree conflicts 2.2: leaf del, tree del on switch"
+
+ # 2.2) local leaf delete, incoming tree delete
+
+ ### Current behaviour fails to show conflicts when deleting
+ ### a directory tree that has modifications. (Will be solved
+ ### when dirs_same_p() is implemented)
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = svntest.wc.State('', {
+ 'DDF/D1/D2' : Item(),
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF/D1' : Item(),
+ 'DD/D1' : Item(),
+ 'DDD/D1/D2' : Item(),
+ })
+
+ expected_status = svntest.deeptrees.deep_trees_virginal_state.copy()
+ expected_status.add({'' : Item(),
+ 'F/alpha' : Item()})
+ expected_status.tweak(contents=None, status=' ', wc_rev=3)
+ expected_status.tweak('', switched='S')
+
+ # Expect the incoming tree deletes and the local leaf deletes to mean
+ # that all deleted paths are *really* gone, not simply scheduled for
+ # deletion.
+ expected_status.tweak('DD/D1', 'DF/D1', 'DDF/D1', 'DDD/D1',
+ status='A ', copied='+', treeconflict='C',
+ wc_rev='-')
+ expected_status.tweak('DDF/D1/D2', 'DDD/D1/D2',
+ copied='+', wc_rev='-')
+ expected_status.tweak('DD/D1/D2', 'DF/D1/beta', 'DDD/D1/D2/D3',
+ 'DDF/D1/D2/gamma',
+ status='D ', copied='+', wc_rev='-')
+ expected_status.tweak('F/alpha', 'D/D1',
+ status='! ', treeconflict='C', wc_rev=None)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon switch'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_switch(sbox,
+ [ DeepTreesTestCase("local_leaf_del_incoming_tree_del",
+ leaf_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+def tree_conflicts_on_switch_3(sbox):
+ "tree conflicts 3: tree del, tree del on switch"
+
+ # use case 3, as in notes/tree-conflicts/use-cases.txt
+ # local tree delete, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+
+ expected_status = deep_trees_status_local_tree_del.copy()
+ expected_status.tweak('', switched='S')
+
+ # Expect the incoming tree deletes and the local tree deletes to mean
+ # that all deleted paths are *really* gone, not simply scheduled for
+ # deletion.
+ expected_status.tweak('F/alpha',
+ 'D/D1',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ status='! ', wc_rev=None)
+ # Remove from expected status and disk everything below the deleted paths.
+ expected_status.remove('DD/D1/D2',
+ 'DF/D1/beta',
+ 'DDD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DDF/D1/D2',
+ 'DDF/D1/D2/gamma',)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon switch'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon switch'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_switch(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_tree_del",
+ tree_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+def copy_with_switched_subdir(sbox):
+ "copy directory with switched subdir"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ D = sbox.ospath('A/D')
+ G = os.path.join(D, 'G')
+
+ E_url = sbox.repo_url + '/A/B/E'
+ R = sbox.ospath('R')
+
+ state = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Verify before switching
+ svntest.actions.run_and_verify_status(wc_dir, state)
+
+ # Switch A/D/G
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ '--ignore-ancestry', E_url, G)
+
+ state.tweak('A/D/G', switched='S')
+ state.remove('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ state.add({
+ 'A/D/G/alpha' : Item(status=' ', wc_rev=1),
+ 'A/D/G/beta' : Item(status=' ', wc_rev=1),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, state)
+
+ # And now copy A/D and everything below it to R
+ svntest.actions.run_and_verify_svn(None, [], 'cp', D, R)
+
+ state.add({
+ 'R' : Item(status='A ', copied='+', wc_rev='-'),
+ 'R/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/G/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/G/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'R/G' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, state)
+
+ sbox.simple_commit(message='Commit added folder')
+
+ # Additional test, it should commit to R/G/alpha.
+ svntest.main.run_svn(None, 'up', wc_dir)
+ svntest.main.file_append(sbox.ospath('R/G/alpha'), "apple")
+ sbox.simple_commit(message='Commit changed file')
+
+ # Checkout working copy to verify result
+ svntest.main.safe_rmtree(wc_dir, 1)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ sbox.repo_url, wc_dir)
+
+ # Switch A/D/G again to recreate state
+ svntest.actions.run_and_verify_svn(None, [], 'switch',
+ '--ignore-ancestry', E_url, G)
+
+ # Clear the statuses
+ state.tweak(status=' ', copied=None, wc_rev='3', entry_status=None)
+ # But reset the switched state
+ state.tweak('A/D/G', switched='S')
+
+ svntest.actions.run_and_verify_status(wc_dir, state)
+
+@Issue(3871)
+def up_to_old_rev_with_subtree_switched_to_root(sbox):
+ "up to old rev with subtree switched to root"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about.
+ A_path = sbox.ospath('A')
+ branch_path = sbox.ospath('branch')
+
+ # Starting with a vanilla greek tree, create a branch of A, switch
+ # that branch to the root of the repository, then update the WC to
+ # r1.
+ svntest.actions.run_and_verify_svn(None, [], 'copy', A_path,
+ branch_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir,
+ '-m', 'Create a branch')
+ svntest.actions.run_and_verify_svn(None, [], 'sw', sbox.repo_url,
+ branch_path, '--ignore-ancestry')
+
+ # Now update the WC to r1.
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r1', wc_dir)
+
+def different_node_kind(sbox):
+ "switch to a different node kind"
+ sbox.build(read_only = True)
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ pristine_disk = svntest.main.greek_state
+ pristine_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_disk = pristine_disk.copy()
+ expected_status = pristine_status.copy()
+
+ def switch_to_dir(sbox, rel_url, rel_path):
+ full_url = sbox.repo_url + '/' + rel_url
+ full_path = sbox.ospath(rel_path)
+ expected_disk.remove(rel_path)
+ expected_disk.add({ rel_path : pristine_disk.desc[rel_url] })
+ expected_disk.add_state(rel_path, pristine_disk.subtree(rel_url))
+ expected_status.tweak(rel_path, switched='S')
+ expected_status.add_state(rel_path, pristine_status.subtree(rel_url))
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, full_path, full_url,
+ None, expected_disk, expected_status,
+ [], False,
+ '--ignore-ancestry')
+ svntest.actions.run_and_verify_svn(None, [], 'info', full_path)
+ if not os.path.isdir(full_path):
+ raise svntest.Failure
+
+ def switch_to_file(sbox, rel_url, rel_path):
+ full_url = sbox.repo_url + '/' + rel_url
+ full_path = sbox.ospath(rel_path)
+ expected_disk.remove_subtree(rel_path)
+ expected_disk.add({ rel_path : pristine_disk.desc[rel_url] })
+ expected_status.remove_subtree(rel_path)
+ expected_status.add({ rel_path : pristine_status.desc[rel_url] })
+ expected_status.tweak(rel_path, switched='S')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, full_path, full_url,
+ None, expected_disk, expected_status,
+ [], False,
+ '--ignore-ancestry')
+ svntest.actions.run_and_verify_svn(None, [], 'info', full_path)
+ if not os.path.isfile(full_path):
+ raise svntest.Failure
+
+ # Switch two files to dirs and two dirs to files.
+ # 'A/C' is an empty dir; 'A/D/G' is a non-empty dir.
+ switch_to_dir(sbox, 'A/C', 'iota')
+ switch_to_dir(sbox, 'A/D/G', 'A/D/gamma')
+ switch_to_file(sbox, 'iota', 'A/C')
+ switch_to_file(sbox, 'A/D/gamma', 'A/D/G')
+
+@Issue(3332, 3333)
+def switch_to_spaces(sbox):
+ "switch to a directory with spaces in its name"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ # Paths are normalized in the command processing, so %20 is equivalent to ' '
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', repo_url + '/A',
+ repo_url + '/A%20with space',
+ '-m', '')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', repo_url + '/A%20with space',
+ repo_url + '/A with%20more spaces',
+ '-m', '')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.tweak('A', switched='S')
+ expected_status.tweak('', 'iota', wc_rev=1)
+
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A'),
+ repo_url + '/A%20with more%20spaces',
+ None, None, expected_status)
+
+def switch_across_replacement(sbox):
+ "switch across a node replacement"
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+ sbox.wc_dir = ''
+
+ # replacement
+ sbox.simple_rm('A/mu')
+ sbox.simple_append('A/mu', "This is the file 'mu'.\n", truncate=True)
+ sbox.simple_add('A/mu')
+ sbox.simple_commit() # r2
+
+ # When 'switch' of a dir brings in a replacement of a child file with no
+ # textual difference and ignoring ancestry, the switch doesn't report any
+ # incoming change at all, (and so won't raise a tree conflict if there is
+ # a local mod). 'update' on the other hand does report the replacement
+ # as expected.
+
+ # This test FAILs when using a Subversion 1.0-1.7 svnserve.
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/mu' : Item(status='A ', prev_status='D '),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '-r1')
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ expected_output, None, None,
+ [], False,
+ '-r2')
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A'), '^/A',
+ expected_output, None, None,
+ [], False,
+ '-r1')
+
+@Issue(1975)
+def switch_keywords(sbox):
+ "switch and svn:keywords"
+ sbox.build()
+ gamma_path = sbox.ospath('A/D/gamma')
+ psi_path = sbox.ospath('A/D/H/psi')
+
+ sbox.simple_propset('svn:keywords', 'URL', 'A/D/gamma')
+ svntest.main.file_write(gamma_path, "$URL$\n")
+ sbox.simple_propset('svn:keywords', 'URL', 'A/D/H/psi')
+ svntest.main.file_write(psi_path, "$URL$\n")
+ sbox.simple_commit()
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/gamma',
+ contents="$URL: %s/A/D/gamma $\n" % sbox.repo_url)
+ expected_disk.tweak('A/D/H/psi',
+ contents="$URL: %s/A/D/H/psi $\n" % sbox.repo_url)
+
+ svntest.actions.run_and_verify_update(sbox.wc_dir,
+ None, expected_disk, None)
+ sbox.simple_copy('A', 'A_copy')
+ sbox.simple_commit()
+ sbox.simple_update()
+
+ # Next, we're going to switch A to A_copy, and expect keywords
+ # in the switched files gamma and psi to be updated accordingly.
+
+ expected_disk.add({
+ 'A_copy/D/H/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A_copy/D/H/psi' : Item(contents="$URL: %s/A_copy/D/H/psi $\n"
+ % sbox.repo_url),
+ 'A_copy/D/H/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A_copy/D/G/pi' : Item(contents="This is the file 'pi'.\n"),
+ 'A_copy/D/G/tau' : Item(contents="This is the file 'tau'.\n"),
+ 'A_copy/D/G/rho' : Item(contents="This is the file 'rho'.\n"),
+ 'A_copy/D/gamma' : Item(contents="$URL: %s/A_copy/D/gamma $\n"
+ % sbox.repo_url),
+ 'A_copy/B/F' : Item(),
+ 'A_copy/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A_copy/B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A_copy/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'A_copy/mu' : Item(contents="This is the file 'mu'.\n"),
+ 'A_copy/C' : Item(),
+ })
+
+ # update expected URL for switched gamma
+ expected_disk.tweak('A/D/gamma',
+ contents="$URL: %s/A_copy/D/gamma $\n" % sbox.repo_url)
+
+ # leave gamma unmodified, locally modify psi
+ svntest.main.file_write(psi_path, "$URL$\nnew line\n")
+ # update expected URL for switched psi
+ expected_disk.tweak('A/D/H/psi',
+ contents="$URL: %s/A_copy/D/H/psi $\nnew line\n"
+ % sbox.repo_url)
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 3)
+ expected_status.add({
+ 'A_copy' : Item(status=' ', wc_rev='3'),
+ 'A_copy/mu' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/H' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/H/psi' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/H/chi' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/H/omega' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/gamma' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/G' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/G/rho' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/G/tau' : Item(status=' ', wc_rev='3'),
+ 'A_copy/D/G/pi' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B/E' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B/E/alpha' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B/E/beta' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B/F' : Item(status=' ', wc_rev='3'),
+ 'A_copy/B/lambda' : Item(status=' ', wc_rev='3'),
+ 'A_copy/C' : Item(status=' ', wc_rev='3'),
+ })
+ expected_status.tweak('A', switched='S')
+ expected_status.tweak('A/D/H/psi', status='M ')
+
+ # both gamma and psi should have update URLs after the switch
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A'), '^/A_copy',
+ None, expected_disk, expected_status)
+
+@Issue(4524)
+def switch_moves(sbox):
+ "switch moves on wc checkpoint"
+
+ sbox.build()
+
+ sbox.simple_move('A/B', 'B')
+ sbox.simple_rm('A')
+
+ branch_url = sbox.repo_url + '/branch'
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', sbox.wc_dir, branch_url,
+ '-m', '')
+
+ expected_disk = svntest.wc.State('', {
+ 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'B/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'B/F' : Item(),
+ 'iota' : Item(contents="This is the file 'iota'.\n"),
+ })
+
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(status=' ', wc_rev='2'),
+ 'B' : Item(status='R ', copied='+', treeconflict='C', wc_rev='-'),
+ 'B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A' : Item(status='! ', treeconflict='C'),
+ 'iota' : Item(status=' ', wc_rev='2'),
+ })
+
+ # In Subversion 1.8 this scenario causes an Sqlite row not found error.
+ # It would be nice if we could handle the tree conflict more intelligent, as
+ # the working copy matches the incomming change.
+ svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath(''), branch_url,
+ None, expected_disk, expected_status)
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ routine_switching,
+ commit_switched_things,
+ full_update,
+ full_rev_update,
+ update_switched_things,
+ rev_update_switched_things,
+ log_switched_file,
+ delete_subdir,
+ file_dir_file,
+ nonrecursive_switching,
+ failed_anchor_is_target,
+ bad_intermediate_urls,
+ obstructed_switch,
+ commit_mods_below_switch,
+ refresh_read_only_attribute,
+ switch_change_repos_root,
+ forced_switch,
+ forced_switch_failures,
+ switch_scheduled_add,
+ mergeinfo_switch_elision,
+ switch_with_obstructing_local_adds,
+ switch_with_depth,
+ switch_to_dir_with_peg_rev,
+ switch_urls_with_spaces,
+ switch_to_dir_with_peg_rev2,
+ switch_to_root,
+ tolerate_local_mods,
+ tree_conflicts_on_switch_1_1,
+ tree_conflicts_on_switch_1_2,
+ tree_conflicts_on_switch_2_1,
+ tree_conflicts_on_switch_2_2,
+ tree_conflicts_on_switch_3,
+ copy_with_switched_subdir,
+ up_to_old_rev_with_subtree_switched_to_root,
+ different_node_kind,
+ switch_to_spaces,
+ switch_across_replacement,
+ switch_keywords,
+ switch_moves,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/theta.bin b/subversion/tests/cmdline/theta.bin
new file mode 100644
index 0000000..95c0ce3
--- /dev/null
+++ b/subversion/tests/cmdline/theta.bin
Binary files differ
diff --git a/subversion/tests/cmdline/trans_tests.py b/subversion/tests/cmdline/trans_tests.py
new file mode 100755
index 0000000..0cab75e
--- /dev/null
+++ b/subversion/tests/cmdline/trans_tests.py
@@ -0,0 +1,978 @@
+#!/usr/bin/env python
+#
+# trans_tests.py: testing eol conversion and keyword substitution
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os, re, logging, sys
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+
+
+######################################################################
+# THINGS TO TEST
+#
+# *** Perhaps something like commit_tests.py:make_standard_slew_of_changes
+# is in order here in this file as well. ***
+#
+# status level 1:
+# enable translation, status
+# (now throw local text mods into the picture)
+#
+# commit level 1:
+# enable translation, commit
+# (now throw local text mods into the picture)
+#
+# checkout:
+# checkout stuff with translation enabled
+#
+# status level 2:
+# disable translation, status
+# change newline conversion to different style, status
+# (now throw local text mods into the picture)
+#
+# commit level 2:
+# disable translation, commit
+# change newline conversion to different style, commit
+# (now throw local text mods into the picture)
+# (now throw local text mods with tortured line endings into the picture)
+#
+# update:
+# update files from disabled translation to enabled translation
+# update files from enabled translation to disabled translation
+# update files with newline conversion style changes
+# (now throw local text mods into the picture)
+# (now throw conflicting local property mods into the picture)
+#
+####
+
+
+
+# Paths that the tests test.
+author_rev_unexp_path = ''
+author_rev_exp_path = ''
+bogus_keywords_path = ''
+embd_author_rev_unexp_path = ''
+embd_author_rev_exp_path = ''
+embd_bogus_keywords_path = ''
+
+def check_keywords(actual_kw, expected_kw, name):
+ """A Helper function to compare two keyword lists"""
+
+ if len(actual_kw) != len(expected_kw):
+ logger.warn("Keyword lists are different by size")
+ raise svntest.Failure
+
+ for i in range(0,len(actual_kw)):
+ if actual_kw[i] != expected_kw[i]:
+ logger.warn('%s item %s, Expected: %s', name, i, expected_kw[i][:-1])
+ logger.warn('%s item %s, Got: %s', name, i, actual_kw[i][:-1])
+ raise svntest.Failure
+
+def setup_working_copy(wc_dir, value_len):
+ """Setup a standard test working copy, then create (but do not add)
+ various files for testing translation."""
+
+ global author_rev_unexp_path
+ global author_rev_exp_path
+ global url_unexp_path
+ global url_exp_path
+ global id_unexp_path
+ global id_exp_path
+ global header_unexp_path
+ global header_exp_path
+ global bogus_keywords_path
+ global embd_author_rev_unexp_path
+ global embd_author_rev_exp_path
+ global embd_bogus_keywords_path
+ global fixed_length_keywords_path
+ global id_with_space_path
+ global id_exp_with_dollar_path
+
+ # NOTE: Only using author and revision keywords in tests for now,
+ # since they return predictable substitutions.
+
+ # Unexpanded, expanded, and bogus keywords; sometimes as the only
+ # content of the files, sometimes embedded in non-keyword content.
+ author_rev_unexp_path = os.path.join(wc_dir, 'author_rev_unexp')
+ author_rev_exp_path = os.path.join(wc_dir, 'author_rev_exp')
+ url_unexp_path = os.path.join(wc_dir, 'url_unexp')
+ url_exp_path = os.path.join(wc_dir, 'url_exp')
+ id_unexp_path = os.path.join(wc_dir, 'id_unexp')
+ id_exp_path = os.path.join(wc_dir, 'id_exp')
+ header_unexp_path = os.path.join(wc_dir, 'header_unexp')
+ header_exp_path = os.path.join(wc_dir, 'header_exp')
+ bogus_keywords_path = os.path.join(wc_dir, 'bogus_keywords')
+ embd_author_rev_unexp_path = os.path.join(wc_dir, 'embd_author_rev_unexp')
+ embd_author_rev_exp_path = os.path.join(wc_dir, 'embd_author_rev_exp')
+ embd_bogus_keywords_path = os.path.join(wc_dir, 'embd_bogus_keywords')
+ fixed_length_keywords_path = os.path.join(wc_dir, 'fixed_length_keywords')
+ id_with_space_path = os.path.join(wc_dir, 'id with space')
+ id_exp_with_dollar_path = os.path.join(wc_dir, 'id_exp with_$_sign')
+
+ svntest.main.file_append(author_rev_unexp_path, "$Author$\n$Rev$")
+ svntest.main.file_append(author_rev_exp_path, "$Author: blah $\n$Rev: 0 $")
+ svntest.main.file_append(url_unexp_path, "$URL$")
+ svntest.main.file_append(url_exp_path, "$URL: blah $")
+ svntest.main.file_append(id_unexp_path, "$Id$")
+ svntest.main.file_append(id_exp_path, "$Id: blah $")
+ svntest.main.file_append(header_unexp_path, "$Header$")
+ svntest.main.file_append(header_exp_path, "$Header: blah $")
+ svntest.main.file_append(bogus_keywords_path, "$Arthur$\n$Rev0$")
+ svntest.main.file_append(embd_author_rev_unexp_path,
+ "one\nfish\n$Author$ two fish\n red $Rev$\n fish")
+ svntest.main.file_append(embd_author_rev_exp_path,
+ "blue $Author: blah $ fish$Rev: 0 $\nI fish")
+ svntest.main.file_append(embd_bogus_keywords_path,
+ "you fish $Arthur$then\n we$Rev0$ \n\nchew fish")
+
+ keyword_test_targets = [
+ # User tries to shoot him or herself on the foot
+ "$URL::$\n",
+ "$URL:: $\n",
+ "$URL:: $\n",
+ # Following are valid entries
+ "$URL:: $\n",
+ "$URL:: %s $\n" % (' ' * (value_len-1)),
+ "$URL:: %s $\n" % (' ' * value_len),
+ # Check we will clean the truncate marker when the value fits exactly
+ "$URL:: %s#$\n" % ('a' * value_len),
+ "$URL:: %s $\n" % (' ' * (value_len+1)),
+ # These are syntactically wrong
+ "$URL::x%s $\n" % (' ' * value_len),
+ "$URL:: %sx$\n" % (' ' * value_len),
+ "$URL::x%sx$\n" % (' ' * value_len)
+ ]
+
+ for i in keyword_test_targets:
+ svntest.main.file_append(fixed_length_keywords_path, i)
+
+ svntest.main.file_append(id_with_space_path, "$Id$")
+ svntest.main.file_append(id_exp_with_dollar_path,
+ "$Id: id_exp with_$_sign 1 2006-06-10 11:10:00Z jrandom $")
+
+
+### Helper functions for setting/removing properties
+
+# Set the property keyword for PATH. Turn on all possible keywords.
+### todo: Later, take list of keywords to set.
+def keywords_on(path):
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ "svn:keywords",
+ "Author Rev Date URL Id Header",
+ path)
+
+# Delete property NAME from versioned PATH in the working copy.
+### todo: Later, take list of keywords to remove from the propval?
+def keywords_off(path):
+ svntest.actions.run_and_verify_svn(None, [], 'propdel',
+ "svn:keywords", path)
+
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+
+### This test is know to fail when Subversion is built in very deep
+### directory structures, caused by SVN_KEYWORD_MAX_LEN being defined
+### as 255.
+def keywords_from_birth(sbox):
+ "commit new files with keywords active from birth"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ canonical_repo_url = svntest.main.canonicalize_url(sbox.repo_url)
+ if canonical_repo_url[-1:] != '/':
+ url_expand_test_data = canonical_repo_url + '/fixed_length_keywords'
+ else:
+ url_expand_test_data = canonical_repo_url + 'fixed_length_keywords'
+
+ setup_working_copy(wc_dir, len(url_expand_test_data))
+
+ # Add all the files
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'author_rev_unexp' : Item(status='A ', wc_rev=0),
+ 'author_rev_exp' : Item(status='A ', wc_rev=0),
+ 'url_unexp' : Item(status='A ', wc_rev=0),
+ 'url_exp' : Item(status='A ', wc_rev=0),
+ 'id_unexp' : Item(status='A ', wc_rev=0),
+ 'id_exp' : Item(status='A ', wc_rev=0),
+ 'header_unexp' : Item(status='A ', wc_rev=0),
+ 'header_exp' : Item(status='A ', wc_rev=0),
+ 'bogus_keywords' : Item(status='A ', wc_rev=0),
+ 'embd_author_rev_unexp' : Item(status='A ', wc_rev=0),
+ 'embd_author_rev_exp' : Item(status='A ', wc_rev=0),
+ 'embd_bogus_keywords' : Item(status='A ', wc_rev=0),
+ 'fixed_length_keywords' : Item(status='A ', wc_rev=0),
+ 'id with space' : Item(status='A ', wc_rev=0),
+ 'id_exp with_$_sign' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.main.run_svn(None, 'add', author_rev_unexp_path)
+ svntest.main.run_svn(None, 'add', author_rev_exp_path)
+ svntest.main.run_svn(None, 'add', url_unexp_path)
+ svntest.main.run_svn(None, 'add', url_exp_path)
+ svntest.main.run_svn(None, 'add', id_unexp_path)
+ svntest.main.run_svn(None, 'add', id_exp_path)
+ svntest.main.run_svn(None, 'add', header_unexp_path)
+ svntest.main.run_svn(None, 'add', header_exp_path)
+ svntest.main.run_svn(None, 'add', bogus_keywords_path)
+ svntest.main.run_svn(None, 'add', embd_author_rev_unexp_path)
+ svntest.main.run_svn(None, 'add', embd_author_rev_exp_path)
+ svntest.main.run_svn(None, 'add', embd_bogus_keywords_path)
+ svntest.main.run_svn(None, 'add', fixed_length_keywords_path)
+ svntest.main.run_svn(None, 'add', id_with_space_path)
+ svntest.main.run_svn(None, 'add', id_exp_with_dollar_path)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Add the keyword properties.
+ keywords_on(author_rev_unexp_path)
+ keywords_on(url_unexp_path)
+ keywords_on(url_exp_path)
+ keywords_on(id_unexp_path)
+ keywords_on(id_exp_path)
+ keywords_on(header_unexp_path)
+ keywords_on(header_exp_path)
+ keywords_on(embd_author_rev_exp_path)
+ keywords_on(fixed_length_keywords_path)
+ keywords_on(id_with_space_path)
+ keywords_on(id_exp_with_dollar_path)
+
+ # Commit.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'author_rev_unexp' : Item(verb='Adding'),
+ 'author_rev_exp' : Item(verb='Adding'),
+ 'url_unexp' : Item(verb='Adding'),
+ 'url_exp' : Item(verb='Adding'),
+ 'id_unexp' : Item(verb='Adding'),
+ 'id_exp' : Item(verb='Adding'),
+ 'header_unexp' : Item(verb='Adding'),
+ 'header_exp' : Item(verb='Adding'),
+ 'bogus_keywords' : Item(verb='Adding'),
+ 'embd_author_rev_unexp' : Item(verb='Adding'),
+ 'embd_author_rev_exp' : Item(verb='Adding'),
+ 'embd_bogus_keywords' : Item(verb='Adding'),
+ 'fixed_length_keywords' : Item(verb='Adding'),
+ 'id with space' : Item(verb='Adding'),
+ 'id_exp with_$_sign' : Item(verb='Adding'),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ None)
+
+ # Make sure the unexpanded URL keyword got expanded correctly.
+ fp = open(url_unexp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$URL: (http|https|file|svn|svn\\+ssh)://",
+ lines[0]))):
+ logger.warn("URL expansion failed for %s", url_unexp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Make sure the preexpanded URL keyword got reexpanded correctly.
+ fp = open(url_exp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$URL: (http|https|file|svn|svn\\+ssh)://",
+ lines[0]))):
+ logger.warn("URL expansion failed for %s", url_exp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Make sure the unexpanded Id keyword got expanded correctly.
+ fp = open(id_unexp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Id: id_unexp", lines[0]))):
+ logger.warn("Id expansion failed for %s", id_exp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Make sure the preexpanded Id keyword got reexpanded correctly.
+ fp = open(id_exp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Id: id_exp", lines[0]))):
+ logger.warn("Id expansion failed for %s", id_exp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Make sure the unexpanded Header keyword got expanded correctly.
+ fp = open(header_unexp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Header: (https?|file|svn|svn\\+ssh)://.* jrandom",
+ lines[0]))):
+ logger.warn("Header expansion failed for %s", header_unexp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Make sure the preexpanded Header keyword got reexpanded correctly.
+ fp = open(header_exp_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Header: (https?|file|svn|svn\\+ssh)://.* jrandom",
+ lines[0]))):
+ logger.warn("Header expansion failed for %s", header_exp_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Check fixed length keywords.
+ kw_workingcopy = [
+ '$URL::$\n',
+ '$URL:: $\n',
+ '$URL:: $\n',
+ '$URL:: %s#$\n' % url_expand_test_data[0:1],
+ '$URL:: %s#$\n' % url_expand_test_data[:-1],
+ '$URL:: %s $\n' % url_expand_test_data,
+ '$URL:: %s $\n' % url_expand_test_data,
+ '$URL:: %s $\n'% url_expand_test_data,
+ '$URL::x%s $\n' % (' ' * len(url_expand_test_data)),
+ '$URL:: %sx$\n' % (' ' * len(url_expand_test_data)),
+ '$URL::x%sx$\n' % (' ' * len(url_expand_test_data))
+ ]
+
+ fp = open(fixed_length_keywords_path, 'r')
+ actual_workingcopy_kw = fp.readlines()
+ fp.close()
+ check_keywords(actual_workingcopy_kw, kw_workingcopy, "working copy")
+
+ # Check text base for fixed length keywords.
+ kw_textbase = [
+ '$URL::$\n',
+ '$URL:: $\n',
+ '$URL:: $\n',
+ '$URL:: $\n',
+ '$URL:: %s $\n' % (' ' * len(url_expand_test_data[:-1])),
+ '$URL:: %s $\n' % (' ' * len(url_expand_test_data)),
+ '$URL:: %s $\n' % (' ' * len(url_expand_test_data)),
+ '$URL:: %s $\n'% (' ' * len(url_expand_test_data)),
+ '$URL::x%s $\n' % (' ' * len(url_expand_test_data)),
+ '$URL:: %sx$\n' % (' ' * len(url_expand_test_data)),
+ '$URL::x%sx$\n' % (' ' * len(url_expand_test_data))
+ ]
+
+ fp = open(svntest.wc.text_base_path(fixed_length_keywords_path), 'r')
+ actual_textbase_kw = fp.readlines()
+ fp.close()
+ check_keywords(actual_textbase_kw, kw_textbase, "text base")
+
+ # Check the Id keyword for filename with spaces.
+ fp = open(id_with_space_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Id: .*id with space", lines[0]))):
+ logger.warn("Id expansion failed for %s", id_with_space_path)
+ raise svntest.Failure
+ fp.close()
+
+ # Check the Id keyword for filename with_$_signs.
+ fp = open(id_exp_with_dollar_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$Id: .*id_exp with_\$_sign [^$]* jrandom \$",
+ lines[0]))):
+ logger.warn("Id expansion failed for %s", id_exp_with_dollar_path)
+
+ raise svntest.Failure
+ fp.close()
+
+#----------------------------------------------------------------------
+
+#def enable_translation(sbox):
+# "enable translation, check status, commit"
+
+ # TODO: Turn on newline conversion and/or keyword substitution for all
+ # sorts of files, with and without local mods, and verify that
+ # status shows the right stuff. The, commit those mods.
+
+#----------------------------------------------------------------------
+
+#def checkout_translated():
+# "checkout files that have translation enabled"
+
+ # TODO: Checkout a tree which contains files with translation
+ # enabled.
+
+#----------------------------------------------------------------------
+
+#def disable_translation():
+# "disable translation, check status, commit"
+
+ # TODO: Disable translation on files which have had it enabled,
+ # with and without local mods, check status, and commit.
+
+#----------------------------------------------------------------------
+
+# Regression test for bug discovered by Vladmir Prus <ghost@cs.msu.csu>.
+# This is a slight rewrite of his test, to use the run_and_verify_* API.
+# This is for issue #631.
+
+@Issue(631)
+def update_modified_with_translation(sbox):
+ "update modified file with eol-style 'native'"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Replace contents of rho and set eol translation to 'native'
+ rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
+ svntest.main.file_write(rho_path, "1\n2\n3\n4\n5\n6\n7\n8\n9\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:eol-style', 'native',
+ rho_path)
+
+ # Create expected output and status trees of a commit.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ # rho has props
+ expected_status.tweak('A/D/G/rho', wc_rev=2, status=' ')
+
+ # Commit revision 2: it has the new rho.
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [], rho_path)
+
+ # Change rho again
+ svntest.main.file_write(rho_path, "1\n2\n3\n4\n4.5\n5\n6\n7\n8\n9\n")
+
+ # Commit revision 3
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/D/G/rho', wc_rev=3, status=' ')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status,
+ [], rho_path)
+
+ # Locally modify rho again.
+ svntest.main.file_write(rho_path, "1\n2\n3\n4\n4.5\n5\n6\n7\n8\n9\n10\n")
+
+ # Prepare trees for an update to rev 1.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/rho' : Item(status='CU'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/D/G/rho',
+ contents="\n".join(["<<<<<<< .mine",
+ "1",
+ "2",
+ "3",
+ "4",
+ "4.5",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "10",
+ "||||||| .r3",
+ "1",
+ "2",
+ "3",
+ "4",
+ "4.5",
+ "5",
+ "6",
+ "7",
+ "8",
+ "9",
+ "=======",
+ "This is the file 'rho'.",
+ ">>>>>>> .r1",
+ ""]))
+
+ # Updating back to revision 1 should not error; the merge should
+ # work, with eol-translation turned on.
+ extra_files = ['rho.r1', 'rho.r3', 'rho.mine']
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ '-r', '1', wc_dir,
+ extra_files=extra_files)
+
+
+#----------------------------------------------------------------------
+
+# Regression test for issue #1085, whereby setting the eol-style to a
+# fixed platform-incorrect value on a file whose line endings are
+# platform-correct causes repository insanity (the eol-style prop
+# claims one line ending style, the file is in another). This test
+# assumes that this can be testing by verifying that a) new file
+# contents are transmitted to the server during commit, and b) that
+# after the commit, the file and its text-base have been changed to
+# have the new line-ending style.
+@Issue(1085)
+def eol_change_is_text_mod(sbox):
+ "committing eol-style change forces text send"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ # add a new file to the working copy.
+ foo_path = os.path.join(wc_dir, 'foo')
+ f = open(foo_path, 'wb')
+ if svntest.main.windows:
+ f.write(b"1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9\r\n")
+ else:
+ f.write(b"1\n2\n3\n4\n5\n6\n7\n8\n9\n")
+ f.close()
+
+ # commit the file
+ svntest.actions.run_and_verify_svn(None, [], 'add', foo_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg',
+ foo_path)
+
+ if svntest.main.windows:
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:eol-style', 'LF', foo_path)
+ else:
+ svntest.actions.run_and_verify_svn(None, [], 'propset',
+ 'svn:eol-style', 'CRLF', foo_path)
+
+ # check 1: did new contents get transmitted?
+ expected_output = ["Sending " + foo_path + "\n",
+ "Transmitting file data .done\n",
+ "Committing transaction...\n",
+ "Committed revision 3.\n"]
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'ci', '-m', 'log msg', foo_path)
+
+ # check 2: do the files have the right contents now?
+ contents = open(foo_path, 'rb').read()
+ if svntest.main.windows:
+ if contents != b"1\n2\n3\n4\n5\n6\n7\n8\n9\n":
+ raise svntest.Failure
+ else:
+ if contents != b"1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9\r\n":
+ raise svntest.Failure
+
+ foo_base_path = svntest.wc.text_base_path(foo_path)
+ base_contents = open(foo_base_path, 'rb').read()
+ if contents != base_contents:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Regression test for issue #1151. A single file in a directory
+# didn't get keywords expanded on checkout.
+@Issue(1151)
+def keyword_expanded_on_checkout(sbox):
+ "keyword expansion for lone file in directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # The bug didn't occur if there were multiple files in the
+ # directory, so setup an empty directory.
+ Z_path = os.path.join(wc_dir, 'Z')
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', Z_path)
+
+ # Add the file that has the keyword to be expanded
+ url_path = os.path.join(Z_path, 'url')
+ svntest.main.file_append(url_path, "$URL$")
+ svntest.actions.run_and_verify_svn(None, [], 'add', url_path)
+ keywords_on(url_path)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', wc_dir)
+
+ other_wc_dir = sbox.add_wc_path('other')
+ other_url_path = os.path.join(other_wc_dir, 'Z', 'url')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url,
+ other_wc_dir)
+
+ # Check keyword got expanded (and thus the mkdir, add, ps, commit
+ # etc. worked)
+ fp = open(other_url_path, 'r')
+ lines = fp.readlines()
+ if not ((len(lines) == 1)
+ and (re.match("\$URL: (http|https|file|svn|svn\\+ssh)://",
+ lines[0]))):
+ logger.warn("URL expansion failed for %s", other_url_path)
+ raise svntest.Failure
+ fp.close()
+
+
+#----------------------------------------------------------------------
+def cat_keyword_expansion(sbox):
+ "keyword expanded on cat"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ lambda_path = os.path.join(wc_dir, 'A', 'B', 'lambda')
+
+ # Set up A/mu to do $Rev$ keyword expansion
+ svntest.main.file_append(mu_path , "$Rev$\n$Author$")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords', 'Rev Author',
+ mu_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Change the author to value which will get truncated on expansion
+ full_author = "x" * 400
+ key_author = "x" * 244
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', '--revprop', '-r2',
+ 'svn:author', full_author,
+ sbox.wc_dir)
+ svntest.actions.run_and_verify_svn([ full_author ], [],
+ 'propget', '--revprop', '-r2',
+ 'svn:author', '--no-newline',
+ sbox.wc_dir)
+
+ # Make another commit so that the last changed revision for A/mu is
+ # not HEAD.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'bar', lambda_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/B/lambda' : Item(verb='Sending'),
+ })
+ expected_status.tweak('A/B/lambda', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # At one stage the keywords were expanded to values for the requested
+ # revision, not to those committed revision
+ svntest.actions.run_and_verify_svn([ "This is the file 'mu'.\n",
+ "$Rev: 2 $\n",
+ "$Author: " + key_author + " $"], [],
+ 'cat', '-r', 'HEAD', mu_path)
+
+
+#----------------------------------------------------------------------
+def copy_propset_commit(sbox):
+ "copy, propset svn:eol-style, commit"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+ mu2_path = os.path.join(wc_dir, 'A', 'mu2')
+
+ # Copy and propset
+ svntest.actions.run_and_verify_svn(None, [], 'copy', mu_path, mu2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:eol-style', 'native',
+ mu2_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/mu2' : Item(status='A ', wc_rev='-', copied='+')
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Commit, at one stage this dumped core
+ expected_output = wc.State(wc_dir, {
+ 'A/mu2' : Item(verb='Adding'),
+ })
+ expected_status.tweak('A/mu2', status=' ', wc_rev=2, copied=None)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+#----------------------------------------------------------------------
+# Create a greek tree, commit a keyword into one file,
+# then commit a keyword property (i.e., turn on keywords), then
+# try to check out head somewhere else.
+# This should not cause seg fault
+def propset_commit_checkout_nocrash(sbox):
+ "propset, commit, check out into another wc"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ # Put a keyword in A/mu, commit
+ svntest.main.file_append(mu_path, "$Rev$")
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Set property to do keyword expansion on A/mu, commit.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords', 'Rev', mu_path)
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=3)
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ # Check out into another wc dir
+ other_wc_dir = sbox.add_wc_path('other')
+ mu_other_path = os.path.join(other_wc_dir, 'A', 'mu')
+
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url,
+ other_wc_dir)
+
+ mu_other_contents = open(mu_other_path).read()
+ if mu_other_contents != "This is the file 'mu'.\n$Rev: 3 $":
+ logger.warn("'%s' does not have the expected contents", mu_other_path)
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+# Add the keyword property to a file, svn revert the file
+# This should not display any error message
+def propset_revert_noerror(sbox):
+ "propset, revert"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ mu_path = os.path.join(wc_dir, 'A', 'mu')
+
+ # Set the Rev keyword for the mu file
+ # could use the keywords_on()/keywords_off() functions to
+ # set/del all svn:keywords
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'svn:keywords', 'Rev', mu_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', status=' M')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Revert the propset
+ svntest.actions.run_and_verify_svn(None, [], 'revert', mu_path)
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def props_only_file_update(sbox):
+ "retranslation occurs on a props-only update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = os.path.join(wc_dir, 'iota')
+ content = ["This is the file 'iota'.\n",
+ "$Author$\n",
+ ]
+ content_expanded = ["This is the file 'iota'.\n",
+ "$Author: jrandom $\n",
+ ]
+
+ # Create r2 with iota's contents and svn:keywords modified
+ open(iota_path, 'w').writelines(content)
+ svntest.main.run_svn(None, 'propset', 'svn:keywords', 'Author', iota_path)
+
+ expected_output = wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Create r3 that drops svn:keywords
+
+ # put the content back to its untranslated form
+ open(iota_path, 'w').writelines(content)
+
+ svntest.main.run_svn(None, 'propdel', 'svn:keywords', iota_path)
+
+ expected_status.tweak('iota', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Now, go back to r2. iota should have the Author keyword expanded.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=''.join(content_expanded))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, None, expected_status,
+ [], False,
+ wc_dir, '-r', '2')
+
+ if open(iota_path).read() != ''.join(content_expanded):
+ raise svntest.Failure("$Author$ is not expanded in 'iota'")
+
+ # Update to r3. this should retranslate iota, dropping the keyword expansion
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents=''.join(content))
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, expected_disk, expected_status)
+
+ if open(iota_path).read() != ''.join(content):
+ raise svntest.Failure("$Author$ is not contracted in 'iota'")
+
+ # We used to leave some temporary files around. Make sure that we don't.
+ temps = os.listdir(os.path.join(wc_dir, svntest.main.get_admin_name(), 'tmp'))
+ if os.path.exists(os.path.join(wc_dir, svntest.main.get_admin_name(),
+ 'tmp', 'props')):
+ temps.remove('prop-base')
+ temps.remove('props')
+ if temps:
+ logger.warn('Temporary files leftover: %s', (', '.join(temps),))
+ raise svntest.Failure
+
+@XFail()
+@Issues(4327)
+def autoprops_inconsistent_eol(sbox):
+ "able to handle inconsistent eols on add"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ text = 'line with NL\n' + \
+ 'line with CR\r' + \
+ 'line with CRLF\r\n' + \
+ 'line with LFCR (or is that not a line? ;-)\n\r'
+
+ # Compensate for python smartness
+ if sys.platform == 'win32':
+ expected_text = text.replace('\r\n', '\n')
+ else:
+ expected_text = text
+
+ sbox.simple_add_text(text, 'add.c')
+ sbox.simple_add_text(text, 'add-force.c')
+
+ svntest.actions.run_and_verify_svn(None, '.*inconsistent newlines.*',
+ 'ps', 'svn:eol-style', 'native',
+ sbox.ospath('add.c'))
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ps', 'svn:eol-style', 'native', '--force',
+ sbox.ospath('add.c'))
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_disk.add({
+ 'add-force.c' : Item(contents=expected_text),
+ 'add.c' : Item(contents=expected_text),
+ })
+
+ # Verify that both add and add-force haven't been changed
+ svntest.actions.verify_disk(wc_dir, expected_disk)
+
+ sbox.simple_propset('svn:auto-props', '*.c = svn:eol-style=native', '')
+
+
+ svntest.main.file_write(sbox.ospath('auto.c'), text, mode='wb')
+
+ expected_output = ['A %s\n' % sbox.ospath('auto.c')]
+
+ # Fails with svn: E200009: File '.*auto.c' has inconsistent newlines
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'add', sbox.ospath('auto.c'))
+
+@XFail()
+@Issues(4327)
+def autoprops_inconsistent_mime(sbox):
+ "able to handle inconsistent mime on add"
+
+ sbox.build(read_only = True)
+
+ sbox.simple_propset('svn:auto-props',
+ '*.c = svn:eol-style=native\n'
+ 'c.* = svn:mime-type=application/octet-stream', '')
+
+ sbox.simple_append('c.iota.c', '')
+
+ expected_output = ['A %s\n' % sbox.ospath('c.iota.c')]
+
+ # Fails with svn: E200009: File '.*c.iota.c' has binary mime type property
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'add', sbox.ospath('c.iota.c'))
+
+
+########################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ keywords_from_birth,
+ # enable_translation,
+ # checkout_translated,
+ # disable_translation,
+ update_modified_with_translation,
+ eol_change_is_text_mod,
+ keyword_expanded_on_checkout,
+ cat_keyword_expansion,
+ copy_propset_commit,
+ propset_commit_checkout_nocrash,
+ propset_revert_noerror,
+ props_only_file_update,
+ autoprops_inconsistent_eol,
+ autoprops_inconsistent_mime,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/tree_conflict_tests.py b/subversion/tests/cmdline/tree_conflict_tests.py
new file mode 100755
index 0000000..b3335b2
--- /dev/null
+++ b/subversion/tests/cmdline/tree_conflict_tests.py
@@ -0,0 +1,1544 @@
+#!/usr/bin/env python
+#
+# tree_conflict_tests.py: testing tree-conflict cases.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, stat, traceback
+
+# Our testing module
+import svntest
+from svntest import main, wc, verify
+from svntest.actions import run_and_verify_svn
+from svntest.actions import run_and_verify_commit
+from svntest.actions import run_and_verify_resolved
+from svntest.actions import run_and_verify_update
+from svntest.actions import run_and_verify_status
+from svntest.actions import run_and_verify_info
+from svntest.actions import get_virginal_state
+import shutil
+import logging
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+AnyOutput = svntest.verify.AnyOutput
+RegexOutput = svntest.verify.RegexOutput
+RegexListOutput = svntest.verify.RegexListOutput
+UnorderedOutput = svntest.verify.UnorderedOutput
+AlternateOutput = svntest.verify.AlternateOutput
+
+logger = logging.getLogger()
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+#----------------------------------------------------------------------
+
+# The tests in this file are for cases where a tree conflict is to be raised.
+# (They do not check that conflicts are not raised in other cases.)
+
+# Note: Delete, Replace and Move are presently tested together but probably
+# will eventually need to be tested separately.
+
+# A tree conflict being raised means:
+# - the conflict is reported initially
+# - the conflict is persistently visible
+# - the conflict blocks commits until resolved
+# - the conflict blocks (some?) further merges
+# Desired:
+# - interactive conflict resolution
+
+# A "tree conflict on file P/F" means:
+# - the operation reports action code "C" on path P/F
+# - "svn status" reports status code "C" on path P/F
+# - "svn info" reports details of the conflict on path P/F
+# - "svn commit" fails if the user-requested targets include path P/F
+# - "svn merge/update/switch" fails if it tries to modify P/F in any way
+
+# A "tree conflict on dir P/D" means:
+# - the operation reports action code "C" on path P/D
+# - "svn status" reports status code "C" on path P/D
+# - "svn info" reports details of the conflict on P/D
+# - "svn commit" fails if it includes any part of the P/D sub-tree
+# - "svn merge/up/sw" fails if it modifies any part of the P/D sub-tree
+
+#----------------------------------------------------------------------
+
+# Two sets of paths. The paths to be used for the destination of a copy
+# or move must differ between the incoming change and the local mods,
+# otherwise scenarios involving a move onto a move would conflict on the
+# destination node as well as on the source, and we only want to be testing
+# one thing at a time in most tests.
+def incoming_paths(root_dir, parent_dir):
+ """Create a set of paths in which the victims of tree conflicts are
+ children of PARENT_DIR. ROOT_DIR should be a shallower directory
+ in which items "F1" and "D1" can pre-exist and be shared across
+ multiple parent dirs."""
+ return {
+ 'F1' : os.path.join(root_dir, "F1"),
+ 'F' : os.path.join(parent_dir, "F"),
+ 'F2' : os.path.join(parent_dir, "F2-in"),
+ 'F3' : os.path.join(root_dir, "F3"),
+ 'D1' : os.path.join(root_dir, "D1"),
+ 'D' : os.path.join(parent_dir, "D"),
+ 'D2' : os.path.join(parent_dir, "D2-in"),
+ }
+def localmod_paths(root_dir, parent_dir):
+ """Create a set of paths in which the victims of tree conflicts are
+ children of PARENT_DIR. ROOT_DIR should be a shallower directory
+ in which items "F1" and "D1" can pre-exist and be shared across
+ multiple parent dirs."""
+ return {
+ 'F1' : os.path.join(root_dir, "F1"),
+ 'F' : os.path.join(parent_dir, "F"),
+ 'F2' : os.path.join(parent_dir, "F2-local"),
+ 'F3' : os.path.join(root_dir, "F3"),
+ 'D1' : os.path.join(root_dir, "D1"),
+ 'D' : os.path.join(parent_dir, "D"),
+ 'D2' : os.path.join(parent_dir, "D2-local"),
+ }
+
+# Perform the action MODACTION on the WC items given by PATHS. The
+# available actions can be seen within this function.
+def modify(modaction, paths, is_init=True):
+ F1 = paths['F1'] # existing file to copy from
+ F3 = paths['F3'] # existing file to copy from
+ F = paths['F'] # target file
+ F2 = paths['F2'] # non-existing file to copy/move to
+ D1 = paths['D1'] # existing dir to copy from
+ D = paths['D'] # target dir
+ D2 = paths['D2'] # non-existing dir to copy/move to
+
+ # print " Mod: '" + modaction + "' '" + P + "'"
+
+ if modaction == 'ft': # file text-mod
+ assert os.path.exists(F)
+ main.file_append(F, "This is a text-mod of file F.\n")
+ elif modaction == 'fP': # file Prop-mod
+ assert os.path.exists(F)
+ main.run_svn(None, 'pset', 'fprop1', 'A prop set on file F.', F)
+ elif modaction == 'dP': # dir Prop-mod
+ assert os.path.exists(D)
+ main.run_svn(None, 'pset', 'dprop1', 'A prop set on dir D.', D)
+ elif modaction == 'fD': # file Delete
+ assert os.path.exists(F)
+ main.run_svn(None, 'del', F)
+ elif modaction == 'dD': # dir Delete
+ assert os.path.exists(D)
+ main.run_svn(None, 'del', D)
+ elif modaction == 'fA': # file Add (new)
+ assert os.path.exists(F)
+ main.run_svn(None, 'add', F)
+ main.run_svn(None, 'pset', 'fprop2', 'A prop of added file F.', F)
+ elif modaction == 'dA': # dir Add (new)
+ assert os.path.exists(D)
+ main.run_svn(None, 'add', D)
+ main.run_svn(None, 'pset', 'dprop2', 'A prop of added dir D.', D)
+ elif modaction == 'fC': # file Copy (from F1)
+ if is_init:
+ main.run_svn(None, 'copy', F1, F)
+ else:
+ main.run_svn(None, 'copy', F3, F)
+ elif modaction == 'dC': # dir Copy (from D1)
+ main.run_svn(None, 'copy', D1, D)
+ elif modaction == 'fM': # file Move (to F2)
+ main.run_svn(None, 'rename', F, F2)
+ elif modaction == 'dM': # dir Move (to D2)
+ main.run_svn(None, 'rename', D, D2)
+ elif modaction == 'fa': # file add (new) on disk
+ assert not os.path.exists(F)
+ main.file_write(F, "This is file F.\n")
+ elif modaction == 'da': # dir add (new) on disk
+ assert not os.path.exists(D)
+ os.mkdir(D)
+ elif modaction == 'fd': # file delete from disk
+ assert os.path.exists(F)
+ os.remove(F)
+ elif modaction == 'dd': # dir delete from disk
+ assert os.path.exists(D)
+ os.remove(D)
+ else:
+ raise Exception("unknown modaction: '" + modaction + "'")
+
+#----------------------------------------------------------------------
+
+# Lists of change scenarios
+#
+# Each scenario expresses a change in terms of the client commands
+# (including "move") that create that change. The change may exist in a
+# repository, or may be applied to a WC by an "update" or "switch" or
+# "merge", or may exist in a WC as a local modification.
+#
+# In addition, each scenario may include some local-modification actions
+# that, if performed on the WC after this change, will make the disk state
+# incompatible with the version-controlled state - e.g. by deleting a file
+# that metadata says is present or vice-versa.
+
+# File names:
+# F1 = any existing file
+# F3 = any existing file
+# F = the file-path being acted on
+# F2 = any non-existent file-path
+# D1 = any existing dir
+# D = the dir-path being acted on
+# D2 = any non-existent dir-path
+# P = the parent dir of F and of D
+
+# Format of a change scenario:
+# (
+# list of actions to create the file/directory to be changed later,
+# list of actions to make the change
+# )
+
+# Action lists to initialise the repository with a file or directory absent
+# or present, to provide the starting point from which we perform the changes
+# that are to be tested.
+absent_f = []
+absent_d = []
+create_f = ['fa','fA']
+create_d = ['da','dA']
+
+# Scenarios that start with no existing versioned item
+#
+# CREATE:
+# file-add(F) = add-new(F) or copy(F1,F)(and modify?)
+# dir-add(D) = add-new(D)(deep?) or copy(D1,D)(and modify?)
+
+f_adds = [
+ #( absent_f, ['fa','fA'] ), ### local add-without-history: not a tree conflict
+ ( absent_f, ['fC'] ),
+ ( absent_f, ['fC','ft'] ), ### Fails because update seems to assume that the
+ ### local file is unmodified (same as issue 1736?).
+ #( absent_f, ['fC','fP'] ), # don't test all combinations, just because it's slow
+]
+d_adds = [
+ #( absent_d, ['da','dA'] ), ### local add-without-history: not a tree conflict
+ ( absent_d, ['dC'] ),
+ #( absent_d, ['dC','dP'] ), # not yet
+]
+
+# Scenarios that start with an existing versioned item
+#
+# GO-AWAY: node is no longer at the path where it was.
+# file-del(F) = del(F)
+# file-move(F) = move(F,F2)
+# dir-del(D) = del(D) or move(D,D2)
+# Note: file-move(F) does not conflict with incoming edit
+#
+# REPLACE: node is no longer at the path where it was, but another node is.
+# file-rpl(F) = file-del(F) + file-add(F)
+# dir-rpl(D) = dir-del(D) + dir-add(D)
+# Note: Schedule replace-by-different-node-type is unsupported in WC.
+#
+# MODIFY:
+# file-mod(F) = text-mod(F) and/or prop-mod(F)
+# dir-mod(D) = prop-mod(D) and/or file-mod(child-F) and/or dir-mod(child-D)
+
+f_dels = [
+ ( create_f, ['fD'] ),
+]
+
+f_moves = [
+ ( create_f, ['fM'] ),
+]
+
+d_dels = [
+ ( create_d, ['dD'] ),
+]
+
+d_moves = [
+ ( create_d, ['dM'] ),
+]
+
+f_rpls = [
+ # Don't test all possible combinations, just because it's slow
+ ( create_f, ['fD','fa','fA'] ),
+ ( create_f, ['fM','fC'] ),
+]
+d_rpls = [
+ # We're not testing directory replacements yet.
+ # Don't test all possible combinations, just because it's slow
+ #( create_d, ['dD','dA'] ),
+ #( create_d, ['dM','dC'] ),
+ # Note that directory replacement differs from file replacement: the
+ # schedule-delete dir is still on disk and is re-used for the re-addition.
+]
+f_rpl_d = [
+ # File replaced by directory: not yet testable
+]
+d_rpl_f = [
+ # Directory replaced by file: not yet testable
+]
+
+f_mods = [
+ ( create_f, ['ft'] ),
+ ( create_f, ['fP'] ),
+ #( create_f, ['ft','fP'] ), # don't test all combinations, just because it's slow
+]
+d_mods = [
+ ( create_d, ['dP'] ),
+ # These test actions for operating on a child of the directory are not yet implemented:
+ #( create_d, ['f_fA'] ),
+ #( create_d, ['f_ft'] ),
+ #( create_d, ['f_fP'] ),
+ #( create_d, ['f_fD'] ),
+ #( create_d, ['d_dP'] ),
+ #( create_d, ['d_f_fA'] ),
+]
+
+#----------------------------------------------------------------------
+
+# Set up all of the given SCENARIOS in their respective unique paths.
+# This means committing their initialisation actions in r2, and then
+# committing their change actions in r3 (assuming the repos was at r1).
+# (See also the somewhat related svntest.actions.build_greek_tree_conflicts()
+# and tree-conflicts tests using deep_trees in various other .py files.)
+# SCENARIOS is a list of scenario tuples: (init_actions, change_actions).
+# WC_DIR is a local path of an existing WC.
+# BR_DIR is a nonexistent path within WC_DIR.
+# BR_DIR and any necessary parent directories will be created, and then the
+# scenario will be set up within it, and committed to the repository.
+def set_up_repos(wc_dir, br_dir, scenarios):
+
+ if not os.path.exists(br_dir):
+ main.run_svn(None, "mkdir", "--parents", br_dir)
+
+ # create the file F1 and dir D1 which the tests regard as pre-existing
+ paths = incoming_paths(wc_dir, wc_dir) # second arg is bogus but unimportant
+ F1 = paths['F1'] # existing file to copy from
+ F3 = paths['F3'] # existing file to copy from
+ main.file_write(F1, "This is initially file F1.\n")
+ main.file_write(F3, "This is initially file F3.\n")
+ main.run_svn(None, 'add', F1, F3)
+ D1 = paths['D1'] # existing dir to copy from
+ main.run_svn(None, 'mkdir', D1)
+
+ # create the initial parent dirs, and each file or dir unless to-be-added
+ for init_mods, action_mods in scenarios:
+ path = "_".join(action_mods)
+ P = os.path.join(br_dir, path) # parent of items to be tested
+ main.run_svn(None, 'mkdir', '--parents', P)
+ for modaction in init_mods:
+ modify(modaction, incoming_paths(wc_dir, P))
+ run_and_verify_svn(AnyOutput, [],
+ 'commit', '-m', 'Initial set-up.', wc_dir)
+ # Capture the revision number
+ init_rev = 2 ### hard-coded
+
+ # modify all files and dirs in their various ways
+ for _path, action_mods in scenarios:
+ path = "_".join(action_mods)
+ P = os.path.join(br_dir, path) # parent
+ for modaction in action_mods:
+ modify(modaction, incoming_paths(wc_dir, P))
+
+ # commit all the modifications
+ run_and_verify_svn(AnyOutput, [],
+ 'commit', '-m', 'Action.', wc_dir)
+ # Capture the revision number
+ changed_rev = 3 ### hard-coded
+
+ return (init_rev, changed_rev)
+
+#----------------------------------------------------------------------
+
+# Apply each of the changes in INCOMING_SCENARIOS to each of the local
+# modifications in LOCALMOD_SCENARIOS.
+# Ensure that the result in each case includes a tree conflict on the parent.
+# OPERATION = 'update' or 'switch' or 'merge'
+# If COMMIT_LOCAL_MODS is true, the LOCALMOD_SCENARIOS will be committed to
+# the target branch before applying the INCOMING_SCENARIOS.
+def ensure_tree_conflict(sbox, operation,
+ incoming_scenarios, localmod_scenarios,
+ commit_local_mods=False):
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ def url_of(repo_relative_path):
+ return sbox.repo_url + '/' + repo_relative_path
+
+ logger.debug("")
+ logger.debug("=== Starting a set of '" + operation + "' tests.")
+
+ # Path to source branch, relative to wc_dir.
+ # Source is where the "incoming" mods are made.
+ source_br = "branch1"
+
+ logger.debug("--- Creating changes in repos")
+ source_wc_dir = os.path.join(wc_dir, source_br)
+ source_left_rev, source_right_rev = set_up_repos(wc_dir, source_wc_dir,
+ incoming_scenarios)
+ head_rev = source_right_rev ### assumption
+
+ # Local mods are the outer loop because cleaning up the WC is slow
+ # ('svn revert' isn't sufficient because it leaves unversioned files)
+ for _loc_init_mods, loc_action in localmod_scenarios:
+ # Determine the branch (directory) in which local mods will be made.
+ if operation == 'update':
+ # Path to target branch (where conflicts are raised), relative to wc_dir.
+ target_br = source_br
+ target_start_rev = source_left_rev
+ else: # switch/merge
+ # Make, and work in, a "branch2" that is a copy of "branch1".
+ target_br = "branch2"
+ run_and_verify_svn(AnyOutput, [],
+ 'copy', '-r', str(source_left_rev), url_of(source_br),
+ url_of(target_br),
+ '-m', 'Create target branch.')
+ head_rev += 1
+ target_start_rev = head_rev
+
+ main.run_svn(None, 'checkout', '-r', str(target_start_rev), sbox.repo_url,
+ wc_dir)
+
+ saved_cwd = os.getcwd()
+ os.chdir(wc_dir)
+
+ for _inc_init_mods, inc_action in incoming_scenarios:
+ scen_name = "_".join(inc_action)
+ source_url = url_of(source_br + '/' + scen_name)
+ target_path = os.path.join(target_br, scen_name)
+
+ logger.debug("=== " + str(inc_action) + " onto " + str(loc_action))
+
+ logger.debug("--- Making local mods")
+ for modaction in loc_action:
+ modify(modaction, localmod_paths(".", target_path), is_init=False)
+ if commit_local_mods:
+ run_and_verify_svn(AnyOutput, [],
+ 'commit', target_path,
+ '-m', 'Mods in target branch.')
+ head_rev += 1
+
+ # For update, verify the pre-condition that WC is out of date.
+ # For switch/merge, there is no such precondition.
+ if operation == 'update':
+ logger.debug("--- Trying to commit (expecting 'out-of-date' error)")
+ run_and_verify_commit(".", None, None, ".*Commit failed.*",
+ target_path)
+
+ if modaction.startswith('f'):
+ victim_name = 'F'
+ else:
+ victim_name = 'D'
+ victim_path = os.path.join(target_path, victim_name)
+
+ # Perform the operation that tries to apply incoming changes to the WC.
+ # The command is expected to do something (and give some output),
+ # and it should raise a conflict but not an error.
+ expected_stdout = svntest.verify.ExpectedOutput(" C " + victim_path
+ + "\n",
+ match_all=False)
+ # Do the main action
+ if operation == 'update':
+ logger.debug("--- Updating")
+ run_and_verify_svn(expected_stdout, [],
+ 'update', target_path, '--accept=postpone')
+ elif operation == 'switch':
+ logger.debug("--- Switching")
+ run_and_verify_svn(expected_stdout, [],
+ 'switch', source_url, target_path)
+ elif operation == 'merge':
+ logger.debug("--- Merging")
+ run_and_verify_svn(expected_stdout, [],
+ 'merge',
+ '--allow-mixed-revisions',
+ '-r', str(source_left_rev) + ':' + str(source_right_rev),
+ source_url, target_path)
+ else:
+ raise Exception("unknown operation: '" + operation + "'")
+
+ logger.debug("--- Checking that 'info' reports the conflict")
+ if operation == 'update' or operation == 'switch':
+ incoming_left_rev = target_start_rev
+ else:
+ incoming_left_rev = source_left_rev
+ if operation == 'update' or operation == 'merge':
+ incoming_right_rev = source_right_rev
+ else:
+ incoming_right_rev = head_rev
+ expected_info = { 'Tree conflict' : '.* upon ' + operation +
+ r'.* \((none|(file|dir).*' +
+ re.escape(victim_name + '@' + str(incoming_left_rev)) + r')' +
+ r'.* \((none|(file|dir).*' +
+ re.escape(victim_name + '@' + str(incoming_right_rev)) + r')' }
+ run_and_verify_info([expected_info], victim_path)
+
+ logger.debug("--- Trying to commit (expecting 'conflict' error)")
+ ### run_and_verify_commit() requires an "output_tree" argument, but
+ # here we get away with passing None because we know an implementation
+ # detail: namely that it's not going to look at that argument if it
+ # gets the stderr that we're expecting.
+ run_and_verify_commit(".", None, None, ".*conflict.*", victim_path)
+
+ logger.debug("--- Checking that 'status' reports the conflict")
+ expected_stdout = AlternateOutput([
+ RegexListOutput([
+ "^......C.* " + re.escape(victim_path) + "$",
+ "^ > .* upon " + operation] +
+ svntest.main.summary_of_conflicts(tree_conflicts=1)),
+ RegexListOutput([
+ "^......C.* " + re.escape(victim_path) + "$",
+ "^ > moved to .*",
+ "^ > .* upon " + operation] +
+ svntest.main.summary_of_conflicts(tree_conflicts=1))
+ ])
+ run_and_verify_svn(expected_stdout, [],
+ 'status', victim_path)
+
+ logger.debug("--- Resolving the conflict")
+ # Make sure resolving the parent does nothing.
+ run_and_verify_resolved([], os.path.dirname(victim_path))
+ # The real resolved call.
+ run_and_verify_resolved([victim_path])
+
+ logger.debug("--- Checking that 'status' does not report a conflict")
+ exitcode, stdout, stderr = run_and_verify_svn(None, [],
+ 'status', victim_path)
+ for line in stdout:
+ if line[6] == 'C': # and line.endswith(victim_path + '\n'):
+ raise svntest.Failure("unexpected status C") # on victim_path
+
+ # logger.debug("--- Committing (should now succeed)")
+ # run_and_verify_svn(None, [],
+ # 'commit', '-m', '', target_path)
+ # target_start_rev += 1
+
+ logger.debug("")
+
+ os.chdir(saved_cwd)
+
+ # Clean up the target branch and WC
+ main.run_svn(None, 'revert', '-R', wc_dir)
+ main.safe_rmtree(wc_dir)
+ if operation != 'update':
+ run_and_verify_svn(AnyOutput, [],
+ 'delete', url_of(target_br),
+ '-m', 'Delete target branch.')
+ head_rev += 1
+
+#----------------------------------------------------------------------
+
+# Tests for update/switch affecting a file, where the incoming change
+# conflicts with a scheduled change in the WC.
+#
+# WC state: as scheduled (no obstruction)
+
+def up_sw_file_mod_onto_del(sbox):
+ "up/sw file: modify onto del/rpl"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', f_mods,
+ f_dels + f_rpls)
+ ensure_tree_conflict(sbox2, 'switch', f_mods,
+ f_dels + f_rpls)
+ # Note: See UC1 in notes/tree-conflicts/use-cases.txt.
+
+def up_sw_file_del_onto_mod(sbox):
+ "up/sw file: del/rpl/mv onto modify"
+ # Results: tree-conflict on F
+ # no other change to WC (except possibly other half of move)
+ # ### OR (see Nico's email <>):
+ # schedule-delete but leave F on disk (can only apply with
+ # text-mod; prop-mod can't be preserved in this way)
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', f_dels + f_moves + f_rpls,
+ f_mods)
+ ensure_tree_conflict(sbox2, 'switch', f_dels + f_moves + f_rpls,
+ f_mods)
+ # Note: See UC2 in notes/tree-conflicts/use-cases.txt.
+
+def up_sw_file_del_onto_del(sbox):
+ "up/sw file: del/rpl/mv onto del/rpl/mv"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', f_dels + f_moves + f_rpls,
+ f_dels + f_rpls)
+ ensure_tree_conflict(sbox2, 'switch', f_dels + f_moves + f_rpls,
+ f_dels + f_rpls)
+ # Note: See UC3 in notes/tree-conflicts/use-cases.txt.
+
+def up_sw_file_add_onto_add(sbox):
+ "up/sw file: add onto add"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', f_adds, f_adds)
+ ensure_tree_conflict(sbox2, 'switch', f_adds, f_adds)
+
+#----------------------------------------------------------------------
+
+# Tests for update/switch affecting a dir, where the incoming change
+# conflicts with a scheduled change in the WC.
+
+def up_sw_dir_mod_onto_del(sbox):
+ "up/sw dir: modify onto del/rpl/mv"
+ # WC state: any (D necessarily exists; children may have any state)
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', d_mods,
+ d_dels + d_rpls)
+ ensure_tree_conflict(sbox2, 'switch', d_mods,
+ d_dels + d_rpls)
+
+def up_sw_dir_del_onto_mod(sbox):
+ "up/sw dir: del/rpl/mv onto modify"
+ # WC state: any (D necessarily exists; children may have any state)
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', d_dels + d_moves + d_rpls,
+ d_mods)
+ ensure_tree_conflict(sbox2, 'switch', d_dels + d_moves + d_rpls,
+ d_mods)
+
+def up_sw_dir_del_onto_del(sbox):
+ "up/sw dir: del/rpl/mv onto del/rpl/mv"
+ # WC state: any (D necessarily exists; children may have any state)
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', d_dels + d_moves + d_rpls,
+ d_dels + d_rpls)
+ ensure_tree_conflict(sbox2, 'switch', d_dels + d_moves + d_rpls,
+ d_dels + d_rpls)
+
+# This is currently set as XFail over ra_dav because it hits
+# issue #3314 'DAV can overwrite directories during copy'
+#
+# TRUNK@35827.DBG>svn st -v branch1
+# 2 2 jrandom branch1
+# 2 2 jrandom branch1\dC
+# A + - 2 jrandom branch1\dC\D
+#
+# TRUNK@35827.DBG>svn log -r2:HEAD branch1 -v
+# ------------------------------------------------------------------------
+# r2 | jrandom | 2009-02-12 09:26:52 -0500 (Thu, 12 Feb 2009) | 1 line
+# Changed paths:
+# A /D1
+# A /F1
+# A /branch1
+# A /branch1/dC
+#
+# Initial set-up.
+# ------------------------------------------------------------------------
+# r3 | jrandom | 2009-02-12 09:26:52 -0500 (Thu, 12 Feb 2009) | 1 line
+# Changed paths:
+# A /branch1/dC/D (from /D1:2)
+#
+# Action.
+# ------------------------------------------------------------------------
+#
+# TRUNK@35827.DBG>svn ci -m "Should be ood" branch1
+# Adding branch1\dC\D
+#
+# Committed revision 4.
+@Issue(3314)
+def up_sw_dir_add_onto_add(sbox):
+ "up/sw dir: add onto add"
+ # WC state: as scheduled (no obstruction)
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'update', d_adds, d_adds)
+ ensure_tree_conflict(sbox2, 'switch', d_adds, d_adds)
+
+#----------------------------------------------------------------------
+
+# Tests for merge affecting a file, where the incoming change
+# conflicts with the target.
+
+def merge_file_mod_onto_not_file(sbox):
+ "merge file: modify onto not-file"
+ sbox2 = sbox.clone_dependent()
+ # Test merges where the "local mods" are committed to the target branch.
+ ensure_tree_conflict(sbox, 'merge', f_mods, f_dels + f_moves + f_rpl_d,
+ commit_local_mods=True)
+ # Test merges where the "local mods" are uncommitted mods in the WC.
+ ensure_tree_conflict(sbox2, 'merge', f_mods, f_dels + f_moves)
+ # Note: See UC4 in notes/tree-conflicts/use-cases.txt.
+
+def merge_file_del_onto_not_same(sbox):
+ "merge file: del/rpl/mv onto not-same"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', f_dels + f_moves + f_rpls, f_mods,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', f_dels + f_moves + f_rpls, f_mods)
+ # Note: See UC5 in notes/tree-conflicts/use-cases.txt.
+
+def merge_file_del_onto_not_file(sbox):
+ "merge file: del/rpl/mv onto not-file"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', f_dels + f_moves + f_rpls,
+ f_dels + f_moves + f_rpl_d,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', f_dels + f_moves + f_rpls,
+ f_dels + f_moves)
+ # Note: See UC6 in notes/tree-conflicts/use-cases.txt.
+
+def merge_file_add_onto_not_none(sbox):
+ "merge file: add onto not-none"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', f_adds, f_adds,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', f_adds, f_adds)
+ # TODO: Also test directory adds at path "F"?
+
+#----------------------------------------------------------------------
+
+# Tests for merge affecting a dir, where the incoming change
+# conflicts with the target branch.
+
+def merge_dir_mod_onto_not_dir(sbox):
+ "merge dir: modify onto not-dir"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', d_mods, d_dels + d_moves + d_rpl_f,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', d_mods, d_dels + d_moves)
+
+# Test for issue #3150 'tree conflicts with directories as victims'.
+@Issue(3150)
+def merge_dir_del_onto_not_same(sbox):
+ "merge dir: del/rpl/mv onto not-same"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', d_dels + d_moves + d_rpls, d_mods,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', d_dels + d_moves + d_rpls, d_mods)
+
+def merge_dir_del_onto_not_dir(sbox):
+ "merge dir: del/rpl/mv onto not-dir"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', d_dels + d_moves + d_rpls,
+ d_dels + d_moves + d_rpl_f,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', d_dels + d_moves + d_rpls,
+ d_dels + d_moves)
+
+def merge_dir_add_onto_not_none(sbox):
+ "merge dir: add onto not-none"
+ sbox2 = sbox.clone_dependent()
+ ensure_tree_conflict(sbox, 'merge', d_adds, d_adds,
+ commit_local_mods=True)
+ ensure_tree_conflict(sbox2, 'merge', d_adds, d_adds)
+ # TODO: also try with file adds at path "D"?
+
+#----------------------------------------------------------------------
+
+@Issue(3805)
+def force_del_tc_inside(sbox):
+ "--force del on dir with TCs inside"
+
+ # A/C <- delete with --force
+ # A + C A/C/dir
+ # A + C A/C/file
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C = os.path.join(wc_dir, "A", "C")
+ dir = os.path.join(wc_dir, "A", "C", "dir")
+ file = os.path.join(wc_dir, "A", "C", "file")
+
+ # Add dir
+ main.run_svn(None, 'mkdir', dir)
+
+ # Add file
+ content = "This is the file 'file'.\n"
+ main.file_append(file, content)
+ main.run_svn(None, 'add', file)
+
+ main.run_svn(None, 'commit', '-m', 'Add dir and file', wc_dir)
+
+ # Remove dir and file in r3.
+ main.run_svn(None, 'delete', dir, file)
+ main.run_svn(None, 'commit', '-m', 'Remove dir and file', wc_dir)
+
+ # Warp back to -r2, dir and file coming back.
+ main.run_svn(None, 'update', '-r2', wc_dir)
+
+ # Set a meaningless prop on each dir and file
+ run_and_verify_svn(["property 'propname' set on '" + dir + "'\n"],
+ [], 'ps', 'propname', 'propval', dir)
+ run_and_verify_svn(["property 'propname' set on '" + file + "'\n"],
+ [], 'ps', 'propname', 'propval', file)
+
+ # Update WC to HEAD, tree conflicts result dir and file
+ # because there are local mods on the props.
+ expected_output = wc.State(wc_dir, {
+ 'A/C/dir' : Item(status=' ', treeconflict='C'),
+ 'A/C/file' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/dir' : Item(props={'propname' : 'propval'}),
+ 'A/C/file' : Item(contents=content, props={'propname' : 'propval'}),
+ })
+
+ expected_status = get_virginal_state(wc_dir, 2)
+ expected_status.tweak(wc_rev='3')
+ expected_status.add({
+ 'A/C/dir' : Item(status='A ', wc_rev='-', copied='+', treeconflict='C'),
+ 'A/C/file' : Item(status='A ', wc_rev='-', copied='+', treeconflict='C'),
+ })
+ run_and_verify_update(wc_dir,
+ expected_output, expected_disk, expected_status,
+ check_props=True)
+
+ # Delete A/C with --force, in effect disarming the tree-conflicts.
+ run_and_verify_svn(verify.UnorderedOutput(['D ' + C + '\n',
+ 'D ' + dir + '\n',
+ 'D ' + file + '\n']),
+ [], 'delete', C, '--force')
+
+ # Verify deletion status
+ # Note: the tree conflicts are removed because we forced the delete.
+ expected_status.tweak('A/C', status='D ')
+ expected_status.remove('A/C/dir', 'A/C/file')
+
+ run_and_verify_status(wc_dir, expected_status)
+
+ # Commit, remove the "disarmed" tree-conflict.
+ expected_output = wc.State(wc_dir, { 'A/C' : Item(verb='Deleting') })
+
+ expected_status.remove('A/C')
+
+ run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+#----------------------------------------------------------------------
+
+@Issue(3805)
+def force_del_tc_is_target(sbox):
+ "--force del on tree-conflicted targets"
+ # A/C
+ # A + C A/C/dir <- delete with --force
+ # A + C A/C/file <- delete with --force
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ C = os.path.join(wc_dir, "A", "C")
+ dir = os.path.join(wc_dir, "A", "C", "dir")
+ file = os.path.join(wc_dir, "A", "C", "file")
+
+ # Add dir
+ main.run_svn(None, 'mkdir', dir)
+
+ # Add file
+ content = "This is the file 'file'.\n"
+ main.file_append(file, content)
+ main.run_svn(None, 'add', file)
+
+ main.run_svn(None, 'commit', '-m', 'Add dir and file', wc_dir)
+
+ # Remove dir and file in r3.
+ main.run_svn(None, 'delete', dir, file)
+ main.run_svn(None, 'commit', '-m', 'Remove dir and file', wc_dir)
+
+ # Warp back to -r2, dir and file coming back.
+ main.run_svn(None, 'update', '-r2', wc_dir)
+
+ # Set a meaningless prop on each dir and file
+ run_and_verify_svn(["property 'propname' set on '" + dir + "'\n"],
+ [], 'ps', 'propname', 'propval', dir)
+ run_and_verify_svn(["property 'propname' set on '" + file + "'\n"],
+ [], 'ps', 'propname', 'propval', file)
+
+ # Update WC to HEAD, tree conflicts result dir and file
+ # because there are local mods on the props.
+ expected_output = wc.State(wc_dir, {
+ 'A/C/dir' : Item(status=' ', treeconflict='C'),
+ 'A/C/file' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = main.greek_state.copy()
+ expected_disk.add({
+ 'A/C/dir' : Item(props={'propname' : 'propval'}),
+ 'A/C/file' : Item(contents=content, props={'propname' : 'propval'}),
+ })
+
+ expected_status = get_virginal_state(wc_dir, 2)
+ expected_status.tweak(wc_rev='3')
+ expected_status.add({
+ 'A/C/dir' : Item(status='A ', wc_rev='-', copied='+', treeconflict='C'),
+ 'A/C/file' : Item(status='A ', wc_rev='-', copied='+', treeconflict='C'),
+ })
+ run_and_verify_update(wc_dir,
+ expected_output, expected_disk, expected_status,
+ check_props=True)
+
+ # Delete nodes with --force, in effect disarming the tree-conflicts.
+ run_and_verify_svn(['D ' + dir + '\n',
+ 'D ' + file + '\n'],
+ [],
+ 'delete', dir, file, '--force')
+
+ # The rm --force now removes the nodes and the tree conflicts on them
+ expected_status.remove('A/C/dir', 'A/C/file')
+ run_and_verify_status(wc_dir, expected_status)
+
+ # Commit, remove the "disarmed" tree-conflict.
+ expected_output = wc.State(wc_dir, {})
+
+ run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+#----------------------------------------------------------------------
+
+# A regression test to check that "rm --keep-local" on a tree-conflicted
+# node leaves the WC in a valid state in which simple commands such as
+# "status" do not error out. At one time the command left the WC in an
+# invalid state. (Before r989189, "rm --keep-local" used to have the effect
+# of "disarming" the conflict in the sense that "commit" would ignore the
+# conflict.)
+
+def query_absent_tree_conflicted_dir(sbox):
+ "query an unversioned tree-conflicted dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ C_path = os.path.join(wc_dir, "A", "C")
+ C_C_path = os.path.join(wc_dir, "A", "C", "C")
+
+ # Add a directory A/C/C as r2.
+ main.run_svn(None, 'mkdir', C_C_path)
+ main.run_svn(None, 'commit', '-m', 'Add directory A/C/C', wc_dir)
+
+ # Remove that directory A/C/C as r3.
+ main.run_svn(None, 'delete', C_C_path)
+ main.run_svn(None, 'commit', '-m', 'Remove directory A/C/C', wc_dir)
+
+ # Warp back to -r2 with the directory added.
+ main.run_svn(None, 'update', '-r2', wc_dir)
+
+ # Set a meaningless prop on A/C/C
+ run_and_verify_svn(["property 'propname' set on '" + C_C_path + "'\n"],
+ [], 'ps', 'propname', 'propval', C_C_path)
+
+ # Update WC to HEAD, a tree conflict results on A/C/C because of the
+ # working prop on A/C/C.
+ expected_output = wc.State(wc_dir, {
+ 'A/C/C' : Item(status=' ', treeconflict='C'),
+ })
+ expected_disk = main.greek_state.copy()
+ expected_disk.add({'A/C/C' : Item(props={'propname' : 'propval'})})
+ expected_status = get_virginal_state(wc_dir, 1)
+ expected_status.tweak(wc_rev='3')
+ expected_status.add({'A/C/C' : Item(status='A ',
+ wc_rev='-',
+ copied='+',
+ treeconflict='C')})
+ run_and_verify_update(wc_dir,
+ expected_output, expected_disk, expected_status,
+ check_props=True)
+
+ # Delete A/C with --keep-local.
+ run_and_verify_svn(verify.UnorderedOutput(['D ' + C_C_path + '\n',
+ 'D ' + C_path + '\n']),
+ [],
+ 'delete', C_path, '--keep-local')
+
+ expected_status.tweak('A/C', status='D ')
+ expected_status.remove('A/C/C')
+ run_and_verify_status(wc_dir, expected_status)
+
+ # Try to access the absent tree-conflict as explicit target.
+ # These used to fail like this:
+ ## CMD: svn status -v -u -q
+ ## [...]
+ ## subversion/svn/status-cmd.c:248: (apr_err=155035)
+ ## subversion/svn/util.c:953: (apr_err=155035)
+ ## subversion/libsvn_client/status.c:270: (apr_err=155035)
+ ## subversion/libsvn_wc/lock.c:607: (apr_err=155035)
+ ## subversion/libsvn_wc/entries.c:1607: (apr_err=155035)
+ ## subversion/libsvn_wc/wc_db.c:3288: (apr_err=155035)
+ ## svn: Expected node '/.../tree_conflict_tests-20/A/C' to be added.
+
+ # A/C/C is now unversioned, using status:
+ expected_output = wc.State(wc_dir, {
+ })
+ run_and_verify_status(C_C_path, expected_output)
+
+ # using info:
+ run_and_verify_svn(None, ".*W155010.*The node.*was not found.*",
+ 'info', C_C_path)
+
+#----------------------------------------------------------------------
+
+@Issue(3608)
+def up_add_onto_add_revert(sbox):
+ "issue #3608: reverting an add onto add conflict"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ wc2_dir = sbox.add_wc_path('wc2')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url, wc2_dir)
+
+ file1 = os.path.join(wc_dir, 'newfile')
+ file2 = os.path.join(wc2_dir, 'newfile')
+
+ dir1 = os.path.join(wc_dir, 'NewDir')
+ dir2 = os.path.join(wc2_dir, 'NewDir')
+
+ main.run_svn(None, 'cp', os.path.join(wc_dir, 'iota'), file1)
+ main.run_svn(None, 'cp', os.path.join(wc2_dir, 'iota'), file2)
+
+ main.run_svn(None, 'cp', os.path.join(wc_dir, 'A/C'), dir1)
+ main.run_svn(None, 'cp', os.path.join(wc2_dir, 'A/C'), dir2)
+
+ sbox.simple_commit(message='Added file')
+
+ expected_disk = main.greek_state.copy()
+ expected_disk.add({
+ 'newfile' : Item(contents="This is the file 'iota'.\n"),
+ 'NewDir' : Item(),
+ })
+
+ expected_status = get_virginal_state(wc2_dir, 2)
+ expected_status.add({
+ 'newfile' : Item(status='R ', copied='+', treeconflict='C', wc_rev='-'),
+ 'NewDir' : Item(status='R ', copied='+', treeconflict='C', wc_rev='-'),
+ })
+
+ run_and_verify_update(wc2_dir,
+ None, expected_disk, expected_status,
+ check_props=True)
+
+ # Currently (r927086), this removes dir2 and file2 in a way that
+ # they don't reappear after update.
+ main.run_svn(None, 'revert', file2)
+ main.run_svn(None, 'revert', dir2)
+
+ expected_status = get_virginal_state(wc2_dir, 2)
+ expected_status.add({
+ 'newfile' : Item(status=' ', wc_rev='2'),
+ 'NewDir' : Item(status=' ', wc_rev='2'),
+ })
+
+ # Expected behavior is that after revert + update the tree matches
+ # the repository
+ run_and_verify_update(wc2_dir,
+ None, expected_disk, expected_status,
+ check_props=True)
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #3525 and #3533
+#
+@Issues(3525,3533)
+def lock_update_only(sbox):
+ "lock status update shouldn't flag tree conflict"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a second copy of the working copy
+ wc_b = sbox.add_wc_path('_b')
+ svntest.actions.duplicate_dir(wc_dir, wc_b)
+
+ fname = 'iota'
+ file_path = os.path.join(sbox.wc_dir, fname)
+ file_path_b = os.path.join(wc_b, fname)
+
+ # Lock a file as wc_author, and schedule the file for deletion.
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', file_path)
+ svntest.main.run_svn(None, 'delete', file_path)
+
+ # In our other working copy, steal that lock.
+ svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock',
+ '-m', '', '--force', file_path)
+
+ # Now update the first working copy. It should appear as a no-op.
+ expected_disk = main.greek_state.copy()
+ expected_disk.remove('iota')
+ expected_status = get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='D ', writelocked='K')
+ run_and_verify_update(wc_dir,
+ None, expected_disk, expected_status,
+ check_props=True)
+
+
+#----------------------------------------------------------------------
+@Issue(3469)
+def at_directory_external(sbox):
+ "tree conflict at directory external"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2: create a directory external: ^/E -> ^/A
+ svntest.main.run_svn(None, 'ps', 'svn:externals', '^/A E', wc_dir)
+ svntest.main.run_svn(None, 'commit', '-m', 'ps', wc_dir)
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+ # r3: modify ^/A/B/E/alpha
+ open(sbox.ospath('A/B/E/alpha'), 'a').write('This is still A/B/E/alpha.\n')
+ svntest.main.run_svn(None, 'commit', '-m', 'file mod', wc_dir)
+ svntest.main.run_svn(None, 'update', wc_dir)
+ merge_rev = svntest.main.youngest(sbox.repo_dir)
+
+ # r4: create ^/A/B/E/alpha2
+ open(sbox.ospath('A/B/E/alpha2'), 'a').write("This is the file 'alpha2'.\n")
+ svntest.main.run_svn(None, 'add', sbox.ospath('A/B/E/alpha2'))
+ svntest.main.run_svn(None, 'commit', '-m', 'file add', wc_dir)
+ svntest.main.run_svn(None, 'update', wc_dir)
+ merge_rev2 = svntest.main.youngest(sbox.repo_dir)
+
+ # r5: merge those
+ svntest.main.run_svn(None, "merge", '-c', merge_rev, '^/A/B', wc_dir)
+ svntest.main.run_svn(None, "merge", '-c', merge_rev2, '^/A/B', wc_dir)
+
+#----------------------------------------------------------------------
+@Issue(3779)
+### This test currently passes on the current behaviour.
+### However in many cases it is unclear whether the current behaviour is
+### correct. Review is still required.
+def actual_only_node_behaviour(sbox):
+ "test behaviour with actual-only nodes"
+
+ sbox.build()
+ A_url = sbox.repo_url + '/A'
+ A_copy_url = sbox.repo_url + '/A_copy'
+ wc_dir = sbox.wc_dir
+ foo_path = sbox.ospath('A/foo', wc_dir)
+
+ # r2: copy ^/A -> ^/A_copy
+ sbox.simple_repo_copy('A', 'A_copy')
+
+ # r3: add a file foo on ^/A_copy branch
+ wc2_dir = sbox.add_wc_path('wc2')
+ foo2_path = sbox.ospath('foo', wc2_dir)
+ svntest.main.run_svn(None, "checkout", A_copy_url, wc2_dir)
+ svntest.main.file_write(foo2_path, "This is initially file foo.\n")
+ svntest.main.run_svn(None, "add", foo2_path)
+ svntest.main.run_svn(None, "commit", '-m', svntest.main.make_log_msg(),
+ foo2_path)
+
+ # r4: make a change to foo
+ svntest.main.file_append(foo2_path, "This is a new line in file foo.\n")
+ svntest.main.run_svn(None, "commit", '-m', svntest.main.make_log_msg(),
+ wc2_dir)
+
+ # cherry-pick r4 to ^/A -- the resulting tree conflict creates
+ # an actual-only node for 'A/foo'
+ sbox.simple_update()
+ svntest.main.run_svn(None, "merge", '-c', '4', A_copy_url,
+ os.path.join(wc_dir, 'A'))
+
+ # Attempt running various commands on foo and verify expected behavior
+
+ # add
+ expected_stdout = None
+ expected_stderr = ".*foo.*is an existing item in conflict.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "add", foo_path)
+
+ # add (with an existing obstruction of foo)
+ svntest.main.file_write(foo_path, "This is an obstruction of foo.\n")
+ expected_stdout = None
+ expected_stderr = ".*foo.*is an existing item in conflict.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "add", foo_path)
+ os.remove(foo_path) # remove obstruction
+
+ # blame (praise, annotate, ann)
+ expected_stdout = None
+ expected_stderr = ".*foo.*not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "blame", foo_path)
+
+ # cat
+ expected_stdout = None
+ expected_stderr = ".*foo.*not under version control.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "cat", foo_path)
+
+ # cat -rBASE
+ expected_stdout = None
+ expected_stderr = ".*foo.*not under version control.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "cat", "-r", "BASE", foo_path)
+ # changelist (cl)
+ expected_stdout = None
+ expected_stderr = ".*svn: warning: W155010: The node '.*foo' was not found."
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "changelist", "my_changelist", foo_path)
+
+ # checkout (co)
+ ### this does not error out -- needs review
+ expected_stdout = None
+ expected_stderr = []
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "checkout", A_copy_url, foo_path)
+ ### for now, ignore the fact that checkout succeeds and remove the nested
+ ### working copy so we can test more commands
+ def onerror(function, path, execinfo):
+ os.chmod(path, stat.S_IREAD | stat.S_IWRITE)
+ os.remove(path)
+ shutil.rmtree(foo_path, onerror=onerror)
+
+ # cleanup
+ expected_stdout = None
+ expected_stderr = ".*foo.*is not a working copy directory"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "cleanup", foo_path)
+ # commit (ci)
+ expected_stdout = None
+ expected_stderr = ".*foo.*remains in conflict.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "commit", foo_path)
+ # copy (cp)
+ expected_stdout = None
+ expected_stderr = ".*foo.*does not exist.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "copy", foo_path, foo_path + ".copy")
+
+ # delete (del, remove, rm)
+ expected_stdout = None
+ expected_stderr = ".*foo.*is not under version control.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "delete", foo_path)
+
+ # diff (di)
+ expected_stdout = None
+ expected_stderr = ".*E155.*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "diff", foo_path)
+ # export
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "export", foo_path, sbox.get_tempname())
+ # import
+ expected_stdout = None
+ expected_stderr = ".*(foo.*does not exist|Can't stat.*foo).*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "import", '-m', svntest.main.make_log_msg(),
+ foo_path, sbox.repo_url + '/foo_imported')
+
+ # info
+ expected_info = {
+ 'Tree conflict': 'local missing or deleted or moved away, incoming file edit upon merge.*',
+ 'Name': 'foo',
+ 'Schedule': 'normal',
+ 'Node Kind': 'none',
+ 'Path': re.escape(sbox.ospath('A/foo')),
+ }
+ run_and_verify_info([expected_info], foo_path)
+
+ # list (ls)
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "list", foo_path)
+
+ # lock
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "lock", foo_path)
+ # log
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "log", foo_path)
+ # merge
+ # note: this is intentionally a no-op merge that does not record mergeinfo
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "merge", '--ignore-ancestry', '-c', '4',
+ A_copy_url + '/mu', foo_path)
+
+ # mergeinfo
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "mergeinfo", A_copy_url + '/foo', foo_path)
+ # mkdir
+ expected_stdout = None
+ expected_stderr = ".*foo.*is an existing item in conflict.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "mkdir", foo_path)
+
+ # move (mv, rename, ren)
+ expected_stdout = None
+ expected_stderr = ".*foo.*does not exist.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "move", foo_path, foo_path + ".moved")
+ # patch
+ expected_stdout = None
+ expected_stderr = ".*foo.*does not exist.*"
+ patch_path = sbox.get_tempname()
+ f = open(patch_path, 'w')
+ patch_data = [
+ "--- foo (revision 2)\n"
+ "+++ foo (working copy)\n"
+ "@@ -1 +1,2 @@\n"
+ " foo\n"
+ " +foo\n"
+ ]
+ for line in patch_data:
+ f.write(line)
+ f.close()
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "patch", patch_path, sbox.ospath("A/foo"))
+
+ # propdel (pdel, pd)
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "propdel", "svn:eol-style", foo_path)
+
+ # propget (pget, pg)
+ expected_stdout = None
+ expected_stderr = ".*foo.*is not under version control.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "propget", "svn:eol-style", foo_path)
+
+ # proplist (plist, pl)
+ expected_stdout = None
+ expected_stderr = ".*foo.*is not under version control.*"
+ svntest.actions.run_and_verify_svn(expected_stdout, expected_stderr,
+ "proplist", foo_path)
+
+ # propset (pset, ps)
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "propset", "svn:eol-style", "native", foo_path)
+
+ # relocate
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "relocate", A_copy_url + "/foo", foo_path)
+
+ # resolve
+ expected_stdout = "Tree conflict at.*foo.*marked as resolved"
+ expected_stderr = []
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "resolve", "--accept", "working", foo_path)
+
+ # revert the entire working copy and repeat the merge so we can test
+ # more commands
+ svntest.main.run_svn(None, "revert", "-R", wc_dir)
+ svntest.main.run_svn(None, "merge", '-c', '4', A_copy_url,
+ os.path.join(wc_dir, 'A'))
+
+ # revert
+ expected_stdout = "Reverted.*foo.*"
+ expected_stderr = []
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "revert", foo_path)
+
+ # revert the entire working copy and repeat the merge so we can test
+ # more commands
+ svntest.main.run_svn(None, "revert", "-R", wc_dir)
+ svntest.main.run_svn(None, "merge", '-c', '4', A_copy_url,
+ os.path.join(wc_dir, 'A'))
+
+ # revert
+ expected_stdout = "Reverted.*foo.*"
+ expected_stderr = []
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "revert", "-R", foo_path)
+
+ # revert the entire working copy and repeat the merge so we can test
+ # more commands
+ svntest.main.run_svn(None, "revert", "-R", wc_dir)
+ svntest.main.run_svn(None, "merge", '-c', '4', A_copy_url,
+ os.path.join(wc_dir, 'A'))
+
+ # status (stat, st)
+ expected_status = wc.State(foo_path, {
+ '' : Item(status='! ', treeconflict='C'),
+ })
+ run_and_verify_status(foo_path, expected_status)
+
+ # switch (sw)
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "switch", A_copy_url + "/foo", foo_path)
+
+ # unlock
+ expected_stdout = None
+ expected_stderr = ".*foo.*was not found.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "unlock", foo_path)
+
+ # update (up)
+ # This doesn't skip because the update is anchored at the parent of A,
+ # the parent of A is not in conflict, and the update doesn't attempt to
+ # change foo itself.
+ expected_stdout = [
+ "Updating '" + foo_path + "':\n", "At revision 4.\n"]
+ expected_stderr = []
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "update", foo_path)
+
+ # upgrade
+ expected_stdout = None
+ expected_stderr = ".*Can't upgrade.*foo.*"
+ run_and_verify_svn(expected_stdout, expected_stderr,
+ "upgrade", foo_path)
+
+#----------------------------------------------------------------------
+# Regression test for an issue #3526 variant
+#
+@Issues(3526)
+def update_dir_with_not_present(sbox):
+ "lock status update shouldn't flag tree conflict"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ newtxt = sbox.ospath('A/B/new.txt')
+
+ main.file_write(newtxt, 'new.txt')
+ sbox.simple_add('A/B/new.txt')
+ sbox.simple_commit()
+
+ sbox.simple_move('A/B/new.txt', 'A/C/newer.txt')
+ sbox.simple_commit()
+ sbox.simple_rm('A/B')
+
+ # We can't commit this without updating (ra_svn produces its own error)
+ run_and_verify_svn(None,
+ "svn: (E155011|E160028|E170004): (Dir|Item).*B.*out of date",
+ 'ci', '-m', '', wc_dir)
+
+ # So we run update
+ run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # And now we can commit
+ run_and_verify_svn(None, [],
+ 'ci', '-m', '', wc_dir)
+
+def update_delete_mixed_rev(sbox):
+ "update that deletes mixed-rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.simple_move('A/B/E/alpha', 'A/B/E/alpha2')
+ sbox.simple_commit()
+ sbox.simple_update()
+ sbox.simple_rm('A/B')
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+ sbox.simple_update(target='A/B/E', revision=2)
+ sbox.simple_mkdir('A/B/E2')
+
+ # Update raises a tree conflict on A/B due to local mod A/B/E2
+ expected_output = wc.State(wc_dir, {
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ })
+ expected_disk = main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E/alpha2' : Item(contents='This is the file \'alpha\'.\n'),
+ })
+ expected_disk.remove('A/B/E/alpha')
+ expected_status = get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', wc_rev='-'),
+ 'A/B/E/alpha2' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ expected_status.tweak('A/B',
+ status='A ', copied='+', treeconflict='C', wc_rev='-')
+ expected_status.tweak('A/B/F', 'A/B/E', 'A/B/E/beta', 'A/B/lambda',
+ copied='+', wc_rev='-')
+
+ # The entries world doesn't see a changed revision as another add
+ # while the WC-NG world does...
+ expected_status.tweak('A/B/E', status='A ', entry_status=' ')
+ run_and_verify_update(wc_dir,
+ expected_output, expected_disk, expected_status,
+ check_props=True)
+
+ # Resolving to working state should give a mixed-revision copy that
+ # gets committed as multiple copies
+ run_and_verify_resolved([sbox.ospath('A/B')], sbox.ospath('A/B'))
+ expected_output = wc.State(wc_dir, {
+ 'A/B' : Item(verb='Adding'),
+ 'A/B/E' : Item(verb='Replacing'),
+ 'A/B/E2' : Item(verb='Adding'),
+ })
+ expected_status.tweak('A/B', 'A/B/E', 'A/B/E2', 'A/B/F', 'A/B/E/alpha2',
+ 'A/B/E/beta', 'A/B/lambda',
+ status=' ', wc_rev=4, copied=None, treeconflict=None)
+ run_and_verify_commit(wc_dir,
+ expected_output, expected_status)
+
+ expected_info = {
+ 'Name': 'alpha2',
+ 'Node Kind': 'file',
+ }
+ run_and_verify_info([expected_info], sbox.repo_url + '/A/B/E/alpha2')
+
+#######################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ up_sw_file_mod_onto_del,
+ up_sw_file_del_onto_mod,
+ up_sw_file_del_onto_del,
+ up_sw_file_add_onto_add,
+ up_sw_dir_mod_onto_del,
+ up_sw_dir_del_onto_mod,
+ up_sw_dir_del_onto_del,
+ up_sw_dir_add_onto_add,
+ merge_file_mod_onto_not_file,
+ merge_file_del_onto_not_same,
+ merge_file_del_onto_not_file,
+ merge_file_add_onto_not_none,
+ merge_dir_mod_onto_not_dir,
+ merge_dir_del_onto_not_same,
+ merge_dir_del_onto_not_dir,
+ merge_dir_add_onto_not_none,
+ force_del_tc_inside,
+ force_del_tc_is_target,
+ query_absent_tree_conflicted_dir,
+ up_add_onto_add_revert,
+ lock_update_only,
+ at_directory_external,
+ actual_only_node_behaviour,
+ update_dir_with_not_present,
+ update_delete_mixed_rev,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/tree_conflict_tests.txt b/subversion/tests/cmdline/tree_conflict_tests.txt
new file mode 100644
index 0000000..62eeed5
--- /dev/null
+++ b/subversion/tests/cmdline/tree_conflict_tests.txt
@@ -0,0 +1,161 @@
+ -*- text -*-
+
+ TREE CONFLICT TESTING STRATEGY
+
+
+This document describes how we are testing the code that detects,
+reports and resolves tree conflicts. We'd like to make the testing,
+and the tree conflicts feature itself, more transparent and open to
+contributions.
+
+For tree conflicts, there already exist cmdline tests for the update,
+switch, merge, commit, status, info and revert commands. We've added
+tree_conflicts_tests.py, not to replace the other tests, but rather to
+complement them by offering a generic way to create lots of
+tree-conflict detection test scenarios. The generic framework is not
+yet finished, but we think it will be useful as we extend the
+tree-conflict feature beyond its original use cases.
+
+================
+The Declarations
+================
+
+The new tree-conflict testing framework offers a compact, declarative
+format for test definitions. Elementary actions are combined into
+scenarios, scenarios are bundled into sets, and the sets are fed into
+a generic tree-conflict-maker. A scenario can be committed to the
+test repository and then applied to the working copy by an update,
+switch or merge operation. In another test, the same scenario can
+modify the working copy prior to an update or switch operation.
+
+An advantage of this abstraction is that it allows us to create
+additional tests easily through code reuse. It also helps us to see
+beyond our 6 original use cases.
+
+A disadvantage is that test failures are rather opaque, but that could
+probably be fixed with some Python wizardry.
+
+The changes that can cause tree conflicts are composed from a set of
+elementary actions, each named according to its function. For
+instance, fD signifies running 'svn delete' on a file.
+
+The first character of an action name specifies the type of the item
+acted upon. The names of the items are fixed.
+
+ f_ Item is the file 'F'
+ d_ Item is the directory 'D'
+
+The second character of an action name can specify an svn operation.
+
+ _P - Change a Property.
+ (Note: Presently just sets a particular property value, even
+ if that property already had that value.)
+
+ _A - Create the item by Adding an unversioned item
+ (and set a property on it as well).
+ (Suggestion: Don't set a property here; let the user do so
+ explicitly with '_P' when desired. We'd have to ensure that
+ a subsequent '_P' later in the same test would still cause
+ a change; presently it would not.)
+ _C - Create the item by Copying from an existing one.
+
+ _M - Move the item to a different name.
+ _D - Delete the item.
+
+Alternately, the second character can specify a non-svn filesystem
+operation.
+
+ _t Append text to the file.
+
+ _a Create the item on disk.
+
+ _d Delete the item from disk.
+
+To help detect bugs in the test scenarios, each action, except for _a,
+first asserts that the item (F or D) exists on disk.
+
+Some actions operate on 2 items. _C copies F1 to F (or D1 to D) and
+_M moves F to F2 (or D to D2). The items F, F1, D and D1 are created,
+added and committed by a generic test-setup function. F2 and D2 do not
+exist at the start of a test.
+
+The arguments for copy and move are not symmetrical because we are
+interested only in the destination of a copy and the source of a move.
+The source of a copy is uninteresting, and the destination of a move
+is the same as that of a copy.
+
+The elementary actions are combined to form "scenarios". A scenario
+is a literal Python tuple containing two lists of actions: the first
+list creates the starting point for the change to be tested, and the
+second being the actual change to be tested. For example, this
+scenario represents a simple deletion of a file:
+
+ ( ['fa','fA'], ['fD'] )
+
+=================
+Behind the Scenes
+=================
+
+How are the scenarios actually used?
+
+The generic tree-conflict-maker is ensure_tree_conflict(). This
+function applies two sets of scenarios. The "incoming" set is
+applied to the repository, and the "localmod" set is applied to the
+working copy. Each possible combination of incoming and localmod
+scenarios is tested as an independent subtest.
+
+The incoming scenarios are prepared as follows.
+
+1. Run the usual Subversion test setup, sbox.build(), which creates a
+test repository containing the "greek tree" (as revision 1) and checks
+out a working copy of it.
+
+2. For each incoming scenario, create the scenario path via 'svn
+mkdir'.
+
+3. For each incoming scenario, execute its initialisation actions which
+will typically create the file or directory that will be acted on later.
+
+4. Commit as revision 2.
+
+5. For each incoming scenario, execute its actions on the F or D in
+its scenario path.
+
+6. Commit as revision 3.
+
+Now the repository is loaded with all of the incoming scenarios. To
+run the actual subtests, each incoming scenario must be applied to
+each localmod scenario.
+
+1. Check out a fresh working copy at revision 2.
+
+2. Execute the localmod scenario's actions on the F or D in its
+scenario path.
+
+3. For each incoming scenario, run the given svn command (e.g. update)
+on the incoming scenario's path, then run 'svn status' on the same
+path. If the path is tree-conflicted, we're happy.
+
+The working copy is deleted and the steps are repeated for next
+localmod scenario. If any failure occurs, the whole test is marked as
+a failure in the test output.
+
+Each test scenario is executed in a unique path created from the actions
+in the action list, concatenated with "_" between them. For the above
+example, that path would be simply "fD". The use of a unique path could
+allow running many of them in parallel. Currently, we run the scenarios
+one-by-one, each in a fresh working copy.
+
+==============
+Current Status
+==============
+
+The following features are sketched out in the scenario data, but not
+tested:
+
+ Obstructions
+
+ Replacement (file->file, dir->dir)
+
+ 'svn switch'
+
diff --git a/subversion/tests/cmdline/update_tests.py b/subversion/tests/cmdline/update_tests.py
new file mode 100755
index 0000000..16c7237
--- /dev/null
+++ b/subversion/tests/cmdline/update_tests.py
@@ -0,0 +1,6956 @@
+#!/usr/bin/env python
+#
+# update_tests.py: testing update cases.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import sys, re, os, subprocess
+import time
+import logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc, actions, verify, deeptrees
+from svntest.mergetrees import expected_merge_output
+from svntest.mergetrees import set_up_branch
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = svntest.wc.StateItem
+exp_noop_up_out = svntest.actions.expected_noop_update_output
+
+from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+
+def update_binary_file(sbox):
+ "update a locally-modified binary file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Add a binary file to the project.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+ # Write PNG file data into 'A/theta'.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit the new binary file, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make a backup copy of the working copy.
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+ theta_backup_path = os.path.join(wc_backup, 'A', 'theta')
+
+ # Make a change to the binary file in the original working copy
+ svntest.main.file_append(theta_path, "revision 3 text")
+ theta_contents_r3 = theta_contents + b"revision 3 text"
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ })
+
+ # Commit original working copy again, creating revision 3.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now start working in the backup working copy:
+
+ # Make a local mod to theta
+ svntest.main.file_append(theta_backup_path, "extra theta text")
+ theta_contents_local = theta_contents + b"extra theta text"
+
+ # Create expected output tree for an update of wc_backup.
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A/theta' : Item(status='C '),
+ })
+
+ # Create expected disk tree for the update --
+ # look! binary contents, and a binary property!
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item(theta_contents_local,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 3)
+ expected_status.add({
+ 'A/theta' : Item(status='C ', wc_rev=3),
+ })
+
+ extra_files = ['theta.r2', 'theta.r3']
+
+ # Do the update and check the results in three ways. Pass our
+ # custom singleton handler to verify the .orig file; this handler
+ # will verify the existence (and contents) of both binary files
+ # after the update finishes.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ extra_files=extra_files)
+
+#----------------------------------------------------------------------
+
+def update_binary_file_2(sbox):
+ "update to an old revision of a binary files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Suck up contents of a test .png file.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+
+ # 102400 is svn_txdelta_window_size. We're going to make sure we
+ # have at least 102401 bytes of data in our second binary file (for
+ # no reason other than we have had problems in the past with getting
+ # svndiff data out of the repository for files > 102400 bytes).
+ # How? Well, we'll just keep doubling the binary contents of the
+ # original theta.png until we're big enough.
+ zeta_contents = theta_contents
+ while(len(zeta_contents) < 102401):
+ zeta_contents = zeta_contents + zeta_contents
+
+ # Write our two files' contents out to disk, in A/theta and A/zeta.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+ zeta_path = sbox.ospath('A/zeta')
+ svntest.main.file_write(zeta_path, zeta_contents, 'wb')
+
+ # Now, `svn add' those two files.
+ svntest.main.run_svn(None, 'add', theta_path, zeta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ 'A/zeta' : Item(verb='Adding (bin)'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ 'A/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit the new binary filea, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make some mods to the binary files.
+ svntest.main.file_append(theta_path, "foobar")
+ new_theta_contents = theta_contents + b"foobar"
+ svntest.main.file_append(zeta_path, "foobar")
+ new_zeta_contents = zeta_contents + b"foobar"
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ 'A/zeta' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ 'A/zeta' : Item(status=' ', wc_rev=3),
+ })
+
+ # Commit original working copy again, creating revision 3.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update to rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(status='U '),
+ 'A/zeta' : Item(status='U '),
+ })
+
+ # Create expected disk tree for the update --
+ # look! binary contents, and a binary property!
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item(theta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ 'A/zeta' : Item(zeta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ 'A/zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Do an update from revision 2 and make sure that our binary file
+ # gets reverted to its original contents.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '2', wc_dir)
+
+
+#----------------------------------------------------------------------
+
+@Issue(4128)
+def update_binary_file_3(sbox):
+ "update locally modified file to equal versions"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Suck up contents of a test .png file.
+ theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read()
+
+ # Write our files contents out to disk, in A/theta.
+ theta_path = sbox.ospath('A/theta')
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ # Now, `svn add' that file.
+ svntest.main.run_svn(None, 'add', theta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Adding (bin)'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit the new binary file, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make some mods to the binary files.
+ svntest.main.file_append(theta_path, "foobar")
+ new_theta_contents = theta_contents + b"foobar"
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=3),
+ })
+
+ # Commit modified working copy, creating revision 3.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now we locally modify the file back to the old version.
+ svntest.main.file_write(theta_path, theta_contents, 'wb')
+
+ # Create expected output tree for an update to rev 2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/theta' : Item(status='G '),
+ })
+
+ # Create expected disk tree for the update
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/theta' : Item(theta_contents,
+ props={'svn:mime-type' : 'application/octet-stream'}),
+ })
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/theta' : Item(status=' ', wc_rev=2),
+ })
+
+ # Do an update from revision 2 and make sure that our binary file
+ # gets reverted to its original contents.
+ # This used to raise a conflict.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '2', wc_dir)
+
+#----------------------------------------------------------------------
+
+def update_missing(sbox):
+ "update missing items (by name) in working copy"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Remove some files and dirs from the working copy.
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ E_path = sbox.ospath('A/B/E')
+ H_path = sbox.ospath('A/D/H')
+
+ # remove two files to verify that they get restored
+ os.remove(mu_path)
+ os.remove(rho_path)
+
+ ### FIXME I think directories work because they generate 'A'
+ ### feedback, is this the correct feedback?
+ svntest.main.safe_rmtree(E_path)
+ svntest.main.safe_rmtree(H_path)
+
+ # In single-db mode all missing items will just be restored
+ A_or_Restored = Item(verb='Restored')
+
+ # Create expected output tree for an update of the missing items by name
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Restored'),
+ 'A/D/G/rho' : Item(verb='Restored'),
+ 'A/B/E' : A_or_Restored,
+ 'A/B/E/alpha' : A_or_Restored,
+ 'A/B/E/beta' : A_or_Restored,
+ 'A/D/H' : A_or_Restored,
+ 'A/D/H/chi' : A_or_Restored,
+ 'A/D/H/omega' : A_or_Restored,
+ 'A/D/H/psi' : A_or_Restored,
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ mu_path, rho_path,
+ E_path, H_path)
+
+#----------------------------------------------------------------------
+
+def update_ignores_added(sbox):
+ "update should not munge adds or replaces"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Commit something so there's actually a new revision to update to.
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(rho_path, "More stuff in rho.\n")
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg', rho_path)
+
+ # Create a new file, 'zeta', and schedule it for addition.
+ zeta_path = sbox.ospath('A/B/zeta')
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'.\n")
+ svntest.main.run_svn(None, 'add', zeta_path)
+
+ # Schedule another file, say, 'gamma', for replacement.
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'delete', gamma_path)
+ svntest.main.file_append(gamma_path, "This is a new 'gamma' now.\n")
+ svntest.main.run_svn(None, 'add', gamma_path)
+
+ # Now update. "zeta at revision 0" should *not* be reported at all,
+ # so it should remain scheduled for addition at revision 0. gamma
+ # was scheduled for replacement, so it also should remain marked as
+ # such, and maintain its revision of 1.
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = svntest.wc.State(wc_dir, { })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/zeta' : Item("This is the file 'zeta'.\n"),
+ })
+ expected_disk.tweak('A/D/gamma', contents="This is a new 'gamma' now.\n")
+ expected_disk.tweak('A/D/G/rho',
+ contents="This is the file 'rho'.\nMore stuff in rho.\n")
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+
+ # Before WC-NG we couldn't bump the wc_rev for gamma from 1 to 2 because it could
+ # be replaced with history and we couldn't store all the revision information.
+ # WC-NG just bumps the revision as it can easily store different revisions.
+ expected_status.tweak('A/D/gamma', wc_rev=2, status='R ')
+ expected_status.add({
+ 'A/B/zeta' : Item(status='A ', wc_rev=0),
+ })
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+
+#----------------------------------------------------------------------
+
+def update_to_rev_zero(sbox):
+ "update to revision 0"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+ A_path = sbox.ospath('A')
+
+ # Create expected output tree for an update to rev 0
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='D '),
+ 'A' : Item(status='D '),
+ })
+
+ # Create expected disk tree for the update to rev 0
+ expected_disk = svntest.wc.State(wc_dir, { })
+
+ # Do the update and check the results.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None, [], False,
+ '-r', '0', wc_dir)
+
+#----------------------------------------------------------------------
+
+def receive_overlapping_same_change(sbox):
+ "overlapping identical changes should not conflict"
+
+ ### (See http://subversion.tigris.org/issues/show_bug.cgi?id=682.)
+ ###
+ ### How this test works:
+ ###
+ ### Create working copy foo, modify foo/iota. Duplicate foo,
+ ### complete with locally modified iota, to bar. Now we should
+ ### have:
+ ###
+ ### $ svn st foo
+ ### M foo/iota
+ ### $ svn st bar
+ ### M bar/iota
+ ### $
+ ###
+ ### Commit the change from foo, then update bar. The repository
+ ### change should get folded into bar/iota with no conflict, since
+ ### the two modifications are identical.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Modify iota.
+ iota_path = sbox.ospath('iota')
+ svntest.main.file_append(iota_path, "A change to iota.\n")
+
+ # Duplicate locally modified wc, giving us the "other" wc.
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+ other_iota_path = os.path.join(other_wc, 'iota')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ # Commit the change, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Expected output tree for update of other_wc.
+ expected_output = svntest.wc.State(other_wc, {
+ 'iota' : Item(status='G '),
+ })
+
+ # Expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota',
+ contents="This is the file 'iota'.\nA change to iota.\n")
+
+ # Expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(other_wc, 2)
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(other_wc,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def update_to_resolve_text_conflicts(sbox):
+ "delete files and update to resolve text conflicts"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files which will be committed
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(mu_path, 'Original appended text for mu\n')
+ svntest.main.file_append(rho_path, 'Original appended text for rho\n')
+ svntest.main.run_svn(None, 'propset', 'Kubla', 'Khan', rho_path)
+
+ # Make a couple of local mods to files which will be conflicted
+ mu_path_backup = os.path.join(wc_backup, 'A', 'mu')
+ rho_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(mu_path_backup,
+ 'Conflicting appended text for mu\n')
+ svntest.main.file_append(rho_path_backup,
+ 'Conflicting appended text for rho\n')
+ svntest.main.run_svn(None, 'propset', 'Kubla', 'Xanadu', rho_path_backup)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but mu and rho should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('A/D/G/rho', wc_rev=2, status=' ')
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='C '),
+ 'A/D/G/rho' : Item(status='CC'),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/mu',
+ contents="\n".join(["This is the file 'mu'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for mu",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for mu",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/D/G/rho',
+ contents="\n".join(["This is the file 'rho'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for rho",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for rho",
+ ">>>>>>> .r2",
+ ""]))
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('A/mu', status='C ')
+ expected_status.tweak('A/D/G/rho', status='CC')
+
+ # "Extra" files that we expect to result from the conflicts.
+ # These are expressed as list of regexps. What a cool system! :-)
+ extra_files = ['mu.*\.r1', 'mu.*\.r2', 'mu.*\.mine',
+ 'rho.*\.r1', 'rho.*\.r2', 'rho.*\.mine', 'rho.*\.prej']
+
+ # Do the update and check the results in three ways.
+ # All "extra" files are passed to detect_conflict_files().
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+ # remove the conflicting files to clear text conflict but not props conflict
+ os.remove(mu_path_backup)
+ os.remove(rho_path_backup)
+
+ ### TODO: Can't get run_and_verify_update to work here :-( I get
+ # the error "Unequal Types: one Node is a file, the other is a
+ # directory". Use run_svn and then run_and_verify_status instead
+ exit_code, stdout_lines, stdout_lines = svntest.main.run_svn(None, 'up',
+ wc_backup)
+ if len (stdout_lines) > 0:
+ logger.warn("update 2 failed")
+ raise svntest.Failure
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('A/D/G/rho', status=' C')
+
+ svntest.actions.run_and_verify_status(wc_backup, expected_status)
+
+#----------------------------------------------------------------------
+
+def update_delete_modified_files(sbox):
+ "update that deletes modified files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete a file
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', alpha_path)
+
+ # Delete a directory containing files
+ G_path = sbox.ospath('A/D/G')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', G_path)
+
+ # Commit
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', 'log msg', wc_dir)
+
+ ### Update before backdating to avoid obstructed update error for G
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # Backdate to restore deleted items
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1', wc_dir)
+
+ # Modify the file to be deleted, and a file in the directory to be deleted
+ svntest.main.file_append(alpha_path, 'appended alpha text\n')
+ pi_path = os.path.join(G_path, 'pi')
+ svntest.main.file_append(pi_path, 'appended pi text\n')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E/alpha', 'A/D/G/pi', status='M ')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now update to 'delete' modified items -- that is, remove them from
+ # version control, but leave them on disk. It used to be we would
+ # expect an 'obstructed update' error (see issue #1196), then we
+ # expected success (see issue #1806), and now we expect tree conflicts
+ # (see issue #2282) on the missing or unversioned items.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='C'),
+ 'A/D/G' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A/B/E/alpha',
+ contents=\
+ "This is the file 'alpha'.\nappended alpha text\n")
+ expected_disk.tweak('A/D/G/pi',
+ contents=\
+ "This is the file 'pi'.\nappended pi text\n")
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ # A/B/E/alpha and the subtree rooted at A/D/G had local modificiations
+ # prior to the update. So there is a tree conflict and both A/B/E/alpha
+ # A/D/G remain after the update, scheduled for addition as copies of
+ # themselves from r1, along with the local modifications.
+ expected_status.tweak('A/B/E/alpha', status='A ', copied='+', wc_rev='-',
+ treeconflict='C')
+ expected_status.tweak('A/D/G/pi', status='M ')
+ expected_status.tweak('A/D/G/pi', status='M ', copied='+', wc_rev='-')
+ expected_status.tweak('A/D/G/rho', 'A/D/G/tau', status=' ', copied='+',
+ wc_rev='-')
+ expected_status.tweak('A/D/G', status='A ', copied='+', wc_rev='-',
+ treeconflict='C')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+# Issue 847. Doing an add followed by a remove for an item in state
+# "deleted" caused the "deleted" state to get forgotten
+
+def update_after_add_rm_deleted(sbox):
+ "update after add/rm of deleted state"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete a file and directory from WC
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ F_path = sbox.ospath('A/B/F')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path, F_path)
+
+ # Commit deletion
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Deleting'),
+ 'A/B/F' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.remove('A/B/F')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # alpha and F are now in state "deleted", next we add a new ones
+ svntest.main.file_append(alpha_path, "new alpha")
+ svntest.actions.run_and_verify_svn(None, [], 'add', alpha_path)
+
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', F_path)
+
+ # New alpha and F should be in add state A
+ expected_status.add({
+ 'A/B/E/alpha' : Item(status='A ', wc_rev=0),
+ 'A/B/F' : Item(status='A ', wc_rev=0),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Forced removal of new alpha and F must restore "deleted" state
+
+ svntest.actions.run_and_verify_svn(None, [], 'rm', '--force',
+ alpha_path, F_path)
+ if os.path.exists(alpha_path) or os.path.exists(F_path):
+ raise svntest.Failure
+
+ # "deleted" state is not visible in status
+ expected_status.remove('A/B/E/alpha', 'A/B/F')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Although parent dir is already at rev 1, the "deleted" state will cause
+ # alpha and F to be restored in the WC when updated to rev 1
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir)
+
+ expected_status.add({
+ 'A/B/E/alpha' : Item(status=' ', wc_rev=1),
+ 'A/B/F' : Item(status=' ', wc_rev=1),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+# Issue 1591. Updating a working copy which contains local
+# obstructions marks a directory as incomplete. Removal of the
+# obstruction and subsequent update should clear the "incomplete"
+# flag.
+
+def obstructed_update_alters_wc_props(sbox):
+ "obstructed update alters WC properties"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new dir in the repo in prep for creating an obstruction.
+ #print "Adding dir to repo"
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m',
+ 'prep for obstruction',
+ sbox.repo_url + '/A/foo')
+
+ # Create an obstruction, a file in the WC with the same name as
+ # present in a newer rev of the repo.
+ #print "Creating obstruction"
+ obstruction_parent_path = sbox.ospath('A')
+ obstruction_path = os.path.join(obstruction_parent_path, 'foo')
+ svntest.main.file_append(obstruction_path, 'an obstruction')
+
+ # Update the WC to that newer rev to trigger the obstruction.
+ #print "Updating WC"
+ # svntest.factory.make(sbox, 'svn update')
+ # exit(0)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/foo' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/foo' : Item(contents="an obstruction"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/foo' : Item(status='D ', treeconflict='C', wc_rev=2),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+
+ # Remove the file which caused the obstruction.
+ #print "Removing obstruction"
+ os.unlink(obstruction_path)
+
+ svntest.main.run_svn(None, 'revert', obstruction_path)
+
+ # Update the -- now unobstructed -- WC again.
+ #print "Updating WC again"
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/foo' : Item(),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/foo' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # The previously obstructed resource should now be in the WC.
+ if not os.path.isdir(obstruction_path):
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+# Issue 938.
+def update_replace_dir(sbox):
+ "update that replaces a directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete a directory
+ F_path = sbox.ospath('A/B/F')
+ svntest.actions.run_and_verify_svn(None, [], 'rm', F_path)
+
+ # Commit deletion
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/F')
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Add replacement directory
+ svntest.actions.run_and_verify_svn(None, [], 'mkdir', F_path)
+
+ # Commit addition
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/F', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update to HEAD
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Update to revision 1 replaces the directory
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/F' : Item(status='A ', prev_status='D '),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+
+def update_single_file(sbox):
+ "update with explicit file target"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Make a local mod to a file which will be committed
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, '\nAppended text for mu')
+
+ # Commit.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # At one stage 'svn up file' failed with a parent lock error
+ was_cwd = os.getcwd()
+ os.chdir(sbox.ospath('A'))
+
+ ### Can't get run_and_verify_update to work having done the chdir.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', '-r', '1', 'mu')
+ os.chdir(was_cwd)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+#----------------------------------------------------------------------
+def prop_update_on_scheduled_delete(sbox):
+ "receive prop update to file scheduled for deletion"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ other_wc = sbox.add_wc_path('other')
+
+ # Make the "other" working copy.
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ iota_path = sbox.ospath('iota')
+ other_iota_path = os.path.join(other_wc, 'iota')
+
+ svntest.main.run_svn(None, 'propset', 'foo', 'bar', iota_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ # Create expected status tree
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+
+ # Commit the change, creating revision 2.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ svntest.main.run_svn(None, 'rm', other_iota_path)
+
+ # Expected output tree for update of other_wc.
+ expected_output = svntest.wc.State(other_wc, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ })
+
+ # Expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+
+ # Expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(other_wc, 2)
+ expected_status.tweak('iota', status='D ', treeconflict='C')
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(other_wc,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def update_receive_illegal_name(sbox):
+ "bail when receive a file or dir named .svn"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # This tests the revision 4334 fix for issue #1068.
+
+ legal_url = sbox.repo_url + '/A/D/G/svn'
+ illegal_url = (sbox.repo_url
+ + '/A/D/G/' + svntest.main.get_admin_name())
+ # Ha! The client doesn't allow us to mkdir a '.svn' but it does
+ # allow us to copy to a '.svn' so ...
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mkdir', '-m', 'log msg',
+ legal_url)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', '-m', 'log msg',
+ legal_url, illegal_url)
+
+ # Do the update twice, both should fail. After the first failure
+ # the wc will be marked "incomplete".
+ for n in range(2):
+ exit_code, out, err = svntest.main.run_svn(1, 'up', wc_dir)
+ for line in err:
+ if line.find("of the same name") != -1:
+ break
+ else:
+ raise svntest.Failure
+
+ # At one stage an obstructed update in an incomplete wc would leave
+ # a txn behind
+ exit_code, out, err = svntest.main.run_svnadmin('lstxns', sbox.repo_dir)
+ if out or err:
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+
+def update_deleted_missing_dir(sbox):
+ "update missing dir to rev in which it is absent"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ E_path = sbox.ospath('A/B/E')
+ H_path = sbox.ospath('A/D/H')
+
+ # Create a new revision with directories deleted
+ svntest.main.run_svn(None, 'rm', E_path)
+ svntest.main.run_svn(None, 'rm', H_path)
+ svntest.main.run_svn(None,
+ 'ci', '-m', 'log msg', E_path, H_path)
+
+ # Update back to the old revision
+ svntest.main.run_svn(None,
+ 'up', '-r', '1', wc_dir)
+
+ # Delete the directories from disk
+ svntest.main.safe_rmtree(E_path)
+ svntest.main.safe_rmtree(H_path)
+
+ # Create expected output tree for an update of the missing items by name
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/H/psi' : Item(verb='Restored'),
+ 'A/D/H/omega' : Item(verb='Restored'),
+ 'A/D/H/chi' : Item(verb='Restored'),
+ 'A/B/E/beta' : Item(verb='Restored'),
+ 'A/B/E/alpha' : Item(verb='Restored'),
+ # A/B/E and A/D/H are also restored, but are then overriden by the delete
+ 'A/B/E' : Item(status='D ', prev_verb='Restored'),
+ 'A/D/H' : Item(status='D ', prev_verb='Restored'),
+ })
+
+ # Create expected disk tree for the update.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.remove('A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+
+ # Create expected status tree for the update.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ expected_status.remove('A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi')
+
+ # Do the update, specifying the deleted paths explicitly.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "-r", "2", E_path, H_path)
+
+ # Update back to the old revision again
+ svntest.main.run_svn(None,
+ 'up', '-r', '1', wc_dir)
+
+ # This time we're updating the whole working copy
+ expected_status.tweak(wc_rev=2)
+
+ # And now we don't expect restore operations
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/D/H' : Item(status='D '),
+ })
+
+ # Do the update, on the whole working copy this time
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ "-r", "2", wc_dir)
+
+#----------------------------------------------------------------------
+
+# Issue 919. This test was written as a regression test for "item
+# should remain 'deleted' when an update deletes a sibling".
+def another_hudson_problem(sbox):
+ "another \"hudson\" problem: updates that delete"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete/commit gamma thus making it 'deleted'
+ gamma_path = sbox.ospath('A/D/gamma')
+ svntest.main.run_svn(None, 'rm', gamma_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Delete directory G from the repository
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 3.\n'], [],
+ 'rm', '-m', 'log msg',
+ sbox.repo_url + '/A/D/G')
+
+ # Remove corresponding tree from working copy
+ G_path = sbox.ospath('A/D/G')
+ svntest.main.safe_rmtree(G_path)
+
+ # Update missing directory to receive the delete, this should mark G
+ # as 'deleted' and should not alter gamma's entry.
+
+ expected_output = ["Updating '%s':\n" % (G_path),
+ 'Restored \'' + G_path + '\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'pi\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'rho\'\n',
+ 'Restored \'' + G_path + os.path.sep + 'tau\'\n',
+ 'D '+G_path+'\n',
+ 'Updated to revision 3.\n',
+ ]
+
+ # Sigh, I can't get run_and_verify_update to work (but not because
+ # of issue 919 as far as I can tell)
+ expected_output = svntest.verify.UnorderedOutput(expected_output)
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'up', G_path)
+
+ # Both G and gamma should be 'deleted', update should produce no output
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/gamma')
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau',
+ 'A/D/gamma')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ "",
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+def update_deleted_targets(sbox):
+ "explicit update of deleted=true targets"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete/commit thus creating 'deleted=true' entries
+ gamma_path = sbox.ospath('A/D/gamma')
+ F_path = sbox.ospath('A/B/F')
+ svntest.main.run_svn(None, 'rm', gamma_path, F_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(verb='Deleting'),
+ 'A/B/F' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/gamma', 'A/B/F')
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Explicit update must not remove the 'deleted=true' entries
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [],
+ 'update', gamma_path)
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [],
+ 'update', F_path)
+
+ # Update to r1 to restore items, since the parent directory is already
+ # at r1 this fails if the 'deleted=true' entries are missing (issue 2250)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/gamma' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+
+
+#----------------------------------------------------------------------
+
+def new_dir_with_spaces(sbox):
+ "receive new dir with spaces in its name"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Create a new directory ("spacey dir") directly in repository
+ svntest.actions.run_and_verify_svn(['Committing transaction...\n',
+ 'Committed revision 2.\n'], [],
+ 'mkdir', '-m', 'log msg',
+ sbox.repo_url
+ + '/A/spacey%20dir')
+
+ # Update, and make sure ra_neon doesn't choke on the space.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/spacey dir' : Item(status='A '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/spacey dir' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/spacey dir' : Item(),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+
+def non_recursive_update(sbox):
+ "non-recursive update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Commit a change to A/mu and A/D/G/rho
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+
+ svntest.main.file_append(mu_path, "new")
+ svntest.main.file_append(rho_path, "new")
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update back to revision 1
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ 'A/D/G/rho' : Item(status='U '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=1)
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+ # Non-recursive update of A should change A/mu but not A/D/G/rho
+ A_path = sbox.ospath('A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ })
+
+ expected_status.tweak('A', 'A/mu', wc_rev=2)
+
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.\nnew")
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '-N', A_path)
+
+#----------------------------------------------------------------------
+
+def checkout_empty_dir(sbox):
+ "check out an empty dir"
+ # See issue #1472 -- checked out empty dir should not be marked as
+ # incomplete ("!" in status).
+ sbox.build(create_wc = False)
+ wc_dir = sbox.wc_dir
+
+ C_url = sbox.repo_url + '/A/C'
+
+ svntest.main.safe_rmtree(wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'checkout', C_url, wc_dir)
+
+ svntest.actions.run_and_verify_svn([], [], 'status', wc_dir)
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #919: "another ghudson bug". Basically, if
+# we fore- or back-date an item until it no longer exists, we were
+# completely removing the entry, rather than marking it 'deleted'
+# (which we now do.)
+
+def update_to_deletion(sbox):
+ "update target till it's gone, then get it back"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota_path = sbox.ospath('iota')
+
+ # Update iota to rev 0, so it gets removed.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('iota')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ '-r', '0', iota_path)
+
+ # Update the wc root, so iota comes back.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None)
+
+
+#----------------------------------------------------------------------
+
+def update_deletion_inside_out(sbox):
+ "update child before parent of a deleted tree"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ parent_path = sbox.ospath('A/B')
+ child_path = os.path.join(parent_path, 'E') # Could be a file, doesn't matter
+
+ # Delete the parent directory.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', parent_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '', wc_dir)
+
+ # Update back to r1.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '-r', '1', wc_dir)
+
+ # Update just the child to r2.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'update', '-r', '2', child_path)
+
+ # Now try a normal update.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B', 'A/B/lambda', 'A/B/F',
+ 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None)
+
+
+#----------------------------------------------------------------------
+# Regression test for issue #1793, whereby 'svn up dir' would delete
+# dir if schedule-add. Yikes.
+
+def update_schedule_add_dir(sbox):
+ "update a schedule-add directory"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Delete directory A/D/G in the repository via immediate commit
+ G_path = sbox.ospath('A/D/G')
+ G_url = sbox.repo_url + '/A/D/G'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', G_url, '-m', 'rev 2')
+
+ # Update the wc to HEAD (r2)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Do a URL->wc copy, creating a new schedule-add A/D/G.
+ # (Standard procedure when trying to resurrect the directory.)
+ D_path = sbox.ospath('A/D')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', G_url + '@1', D_path)
+
+ # status should now show the dir scheduled for addition-with-history
+ expected_status.add({
+ 'A/D/G' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now update with the schedule-add dir as the target.
+ svntest.actions.run_and_verify_svn(None, [], 'up', G_path)
+
+ # The update should be a no-op, and the schedule-add directory
+ # should still exist! 'svn status' shouldn't change at all.
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+#----------------------------------------------------------------------
+# Test updating items that do not exist in the current WC rev, but do
+# exist at some future revision.
+
+def update_to_future_add(sbox):
+ "update target that was added in a future rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Update the entire WC to rev 0
+ # Create expected output tree for an update to rev 0
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='D '),
+ 'A' : Item(status='D '),
+ })
+
+ # Create expected disk tree for the update to rev 0
+ expected_disk = svntest.wc.State(wc_dir, { })
+
+ # Do the update and check the results.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ '-r', '0', wc_dir)
+
+ # Update iota to the current HEAD.
+ iota_path = sbox.ospath('iota')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='A '),
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'iota' : Item("This is the file 'iota'.\n")
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ iota_path)
+
+ # Now try updating the directory into the future
+ A_path = sbox.ospath('A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status='A '),
+ 'A/mu' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/C' : Item(status='A '),
+ 'A/D' : Item(status='A '),
+ 'A/D/gamma' : Item(status='A '),
+ 'A/D/G' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A ')
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ A_path)
+
+#----------------------------------------------------------------------
+
+def update_xml_unsafe_dir(sbox):
+ "update dir with xml-unsafe name"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files
+ test_path = sbox.ospath(' foo & bar')
+ svntest.main.run_svn(None, 'mkdir', test_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ ' foo & bar' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree; all local revisions should be at 1,
+ # but 'foo & bar' should be at revision 2.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ ' foo & bar' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # chdir into the funky path, and update from there.
+ os.chdir(test_path)
+
+ expected_output = wc.State('', {
+ })
+
+ expected_disk = wc.State('', {
+ })
+
+ expected_status = wc.State('', {
+ '' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_update('', expected_output, expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+# eol-style handling during update with conflicts, scenario 1:
+# when update creates a conflict on a file, make sure the file and files
+# r<left>, r<right> and .mine are in the eol-style defined for that file.
+#
+# This test for 'svn merge' can be found in merge_tests.py as
+# merge_conflict_markers_matching_eol.
+def conflict_markers_matching_eol(sbox):
+ "conflict markers should match the file's eol style"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ filecount = 1
+
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ # Checkout a second working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url, wc_backup)
+
+ # set starting revision
+ cur_rev = 1
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, cur_rev)
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup,
+ cur_rev)
+
+ path_backup = os.path.join(wc_backup, 'A', 'mu')
+
+ # do the test for each eol-style
+ for eol, eolchar in zip(['CRLF', 'CR', 'native', 'LF'],
+ [crlf, '\015', '\n', '\012']):
+ # rewrite file mu and set the eol-style property.
+ svntest.main.file_write(mu_path, "This is the file 'mu'."+ eolchar, 'wb')
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol, mu_path)
+
+ expected_disk.add({
+ 'A/mu' : Item("This is the file 'mu'." + eolchar)
+ })
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ })
+
+ expected_status.tweak(wc_rev = cur_rev)
+ expected_status.add({
+ 'A/mu' : Item(status=' ', wc_rev = cur_rev + 1),
+ })
+
+ # Commit the original change and note the 'base' revision number
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+ cur_rev = cur_rev + 1
+ base_rev = cur_rev
+
+ svntest.main.run_svn(None, 'update', wc_backup)
+
+ # Make a local mod to mu
+ svntest.main.file_append(mu_path,
+ 'Original appended text for mu' + eolchar)
+
+ # Commit the original change and note the 'theirs' revision number
+ svntest.main.run_svn(None, 'commit', '-m', 'test log', wc_dir)
+ cur_rev = cur_rev + 1
+ theirs_rev = cur_rev
+
+ # Make a local mod to mu, will conflict with the previous change
+ svntest.main.file_append(path_backup,
+ 'Conflicting appended text for mu' + eolchar)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='C '),
+ })
+
+ # Create expected disk tree for the update.
+ expected_backup_disk = expected_disk.copy()
+
+ # verify content of resulting conflicted file
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'." + eolchar +
+ "<<<<<<< .mine" + eolchar +
+ "Conflicting appended text for mu" + eolchar +
+ "||||||| .r" + str(cur_rev - 1) + eolchar +
+ "=======" + eolchar +
+ "Original appended text for mu" + eolchar +
+ ">>>>>>> .r" + str(cur_rev) + eolchar),
+ })
+ # verify content of base(left) file
+ expected_backup_disk.add({
+ 'A/mu.r' + str(base_rev ) : Item(contents= "This is the file 'mu'." +
+ eolchar)
+ })
+ # verify content of theirs(right) file
+ expected_backup_disk.add({
+ 'A/mu.r' + str(theirs_rev ) : Item(contents= "This is the file 'mu'." +
+ eolchar +
+ "Original appended text for mu" + eolchar)
+ })
+ # verify content of mine file
+ expected_backup_disk.add({
+ 'A/mu.mine' : Item(contents= "This is the file 'mu'." +
+ eolchar +
+ "Conflicting appended text for mu" + eolchar)
+ })
+
+ # Create expected status tree for the update.
+ expected_backup_status.add({
+ 'A/mu' : Item(status=' ', wc_rev=cur_rev),
+ })
+ expected_backup_status.tweak('A/mu', status='C ')
+ expected_backup_status.tweak(wc_rev = cur_rev)
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update2(wc_backup,
+ expected_backup_output,
+ expected_backup_disk,
+ expected_backup_status,
+ keep_eol_style=keep_eol_style)
+
+ # cleanup for next run
+ svntest.main.run_svn(None, 'revert', '-R', wc_backup)
+ svntest.main.run_svn(None, 'update', wc_dir)
+
+# eol-style handling during update, scenario 2:
+# if part of that update is a propchange (add, change, delete) of
+# svn:eol-style, make sure the correct eol-style is applied before
+# calculating the merge (and conflicts if any)
+#
+# This test for 'svn merge' can be found in merge_tests.py as
+# merge_eolstyle_handling.
+def update_eolstyle_handling(sbox):
+ "handle eol-style propchange during update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+
+ # CRLF is a string that will match a CRLF sequence read from a text file.
+ # ### On Windows, we assume CRLF will be read as LF, so it's a poor test.
+ if os.name == 'nt':
+ crlf = '\n'
+ else:
+ crlf = '\r\n'
+
+ # Strict EOL style matching breaks Windows tests at least with Python 2
+ keep_eol_style = not svntest.main.is_os_windows()
+
+ # Checkout a second working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.run_and_verify_svn(None, [], 'checkout',
+ sbox.repo_url, wc_backup)
+ path_backup = os.path.join(wc_backup, 'A', 'mu')
+
+ # Test 1: add the eol-style property and commit, change mu in the second
+ # working copy and update; there should be no conflict!
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', "CRLF", mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'set eol-style property', wc_dir)
+
+ svntest.main.file_append_binary(path_backup, 'Added new line of text.\012')
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.tweak(
+ 'A/mu', contents= "This is the file 'mu'." + crlf +
+ "Added new line of text." + crlf)
+
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='GU'),
+ })
+
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_backup_status.tweak('A/mu', status='M ')
+
+ svntest.actions.run_and_verify_update2(wc_backup,
+ expected_backup_output,
+ expected_backup_disk,
+ expected_backup_status,
+ keep_eol_style=keep_eol_style)
+
+ # Test 2: now change the eol-style property to another value and commit,
+ # update the still changed mu in the second working copy; there should be
+ # no conflict!
+ svntest.main.run_svn(None, 'propset', 'svn:eol-style', "CR", mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'set eol-style property', wc_dir)
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'.\015" +
+ "Added new line of text.\015")
+ })
+
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status='GU'),
+ })
+
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 3)
+ expected_backup_status.tweak('A/mu', status='M ')
+
+ svntest.actions.run_and_verify_update2(wc_backup,
+ expected_backup_output,
+ expected_backup_disk,
+ expected_backup_status,
+ keep_eol_style=keep_eol_style)
+
+ # Test 3: now delete the eol-style property and commit, update the still
+ # changed mu in the second working copy; there should be no conflict!
+ # EOL of mu should be unchanged (=CR).
+ svntest.main.run_svn(None, 'propdel', 'svn:eol-style', mu_path)
+ svntest.main.run_svn(None,
+ 'commit', '-m', 'del eol-style property', wc_dir)
+
+ expected_backup_disk = svntest.main.greek_state.copy()
+ expected_backup_disk.add({
+ 'A/mu' : Item(contents= "This is the file 'mu'.\015" +
+ "Added new line of text.\015")
+ })
+
+ expected_backup_output = svntest.wc.State(wc_backup, {
+ 'A/mu' : Item(status=' U'),
+ })
+
+ expected_backup_status = svntest.actions.get_virginal_state(wc_backup, 4)
+ expected_backup_status.tweak('A/mu', status='M ')
+ svntest.actions.run_and_verify_update2(wc_backup,
+ expected_backup_output,
+ expected_backup_disk,
+ expected_backup_status,
+ keep_eol_style=keep_eol_style)
+
+# Bug in which "update" put a bogus revision number on a schedule-add file,
+# causing the wrong version of it to be committed.
+def update_copy_of_old_rev(sbox):
+ "update schedule-add copy of old rev"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ dir = sbox.ospath('A')
+ dir2 = sbox.ospath('A2')
+ file = os.path.join(dir, 'mu')
+ file2 = os.path.join(dir2, 'mu')
+ url = sbox.repo_url + '/A/mu'
+ url2 = sbox.repo_url + '/A2/mu'
+
+ # Remember the original text of the file
+ exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, [],
+ 'cat', '-r1',
+ url)
+
+ # Commit a different version of the file
+ svntest.main.file_write(file, "Second revision of 'mu'\n")
+ svntest.actions.run_and_verify_svn(None, [],
+ 'ci', '-m', '', wc_dir)
+
+ # Copy an old revision of its directory into a new path in the WC
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', '-r1', dir, dir2)
+
+ # Update. (Should do nothing, but added a bogus "revision" in "entries".)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'up', wc_dir)
+
+ # Commit, and check that it says it's committing the right thing
+ exp_out = ['Adding ' + dir2 + '\n',
+ 'Committing transaction...\n',
+ 'Committed revision 3.\n']
+ svntest.actions.run_and_verify_svn(exp_out, [],
+ 'ci', '-m', '', wc_dir)
+
+ # Verify the committed file's content
+ svntest.actions.run_and_verify_svn(text_r1, [],
+ 'cat', url2)
+
+#----------------------------------------------------------------------
+def forced_update(sbox):
+ "forced update tolerates obstructions to adds"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a couple of local mods to files
+ mu_path = sbox.ospath('A/mu')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(mu_path, 'appended mu text')
+ svntest.main.file_append(rho_path, 'new appended text for rho')
+
+ # Add some files
+ nu_path = sbox.ospath('A/B/F/nu')
+ svntest.main.file_append(nu_path, "This is the file 'nu'\n")
+ svntest.main.run_svn(None, 'add', nu_path)
+ kappa_path = sbox.ospath('kappa')
+ svntest.main.file_append(kappa_path, "This is the file 'kappa'\n")
+ svntest.main.run_svn(None, 'add', kappa_path)
+
+ # Add a dir with two files
+ I_path = sbox.ospath('A/C/I')
+ os.mkdir(I_path)
+ svntest.main.run_svn(None, 'add', I_path)
+ upsilon_path = os.path.join(I_path, 'upsilon')
+ svntest.main.file_append(upsilon_path, "This is the file 'upsilon'\n")
+ svntest.main.run_svn(None, 'add', upsilon_path)
+ zeta_path = os.path.join(I_path, 'zeta')
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'\n")
+ svntest.main.run_svn(None, 'add', zeta_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ 'A/B/F/nu' : Item(verb='Adding'),
+ 'kappa' : Item(verb='Adding'),
+ 'A/C/I' : Item(verb='Adding'),
+ 'A/C/I/upsilon' : Item(verb='Adding'),
+ 'A/C/I/zeta' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/nu' : Item(status=' ', wc_rev=2),
+ 'kappa' : Item(status=' ', wc_rev=2),
+ 'A/C/I' : Item(status=' ', wc_rev=2),
+ 'A/C/I/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/I/zeta' : Item(status=' ', wc_rev=2),
+ })
+ expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Make a local mod to mu that will merge cleanly.
+ backup_mu_path = os.path.join(wc_backup, 'A', 'mu')
+ svntest.main.file_append(backup_mu_path, 'appended mu text')
+
+ # Create unversioned files and dir that will obstruct A/B/F/nu, kappa,
+ # A/C/I, and A/C/I/upsilon coming from repos during update.
+ # The obstructing nu has the same contents as the repos, while kappa and
+ # upsilon differ, which means the latter two should show as modified after
+ # the forced update.
+ nu_path = os.path.join(wc_backup, 'A', 'B', 'F', 'nu')
+ svntest.main.file_append(nu_path, "This is the file 'nu'\n")
+ kappa_path = os.path.join(wc_backup, 'kappa')
+ svntest.main.file_append(kappa_path,
+ "This is the OBSTRUCTING file 'kappa'\n")
+ I_path = os.path.join(wc_backup, 'A', 'C', 'I')
+ os.mkdir(I_path)
+ upsilon_path = os.path.join(I_path, 'upsilon')
+ svntest.main.file_append(upsilon_path,
+ "This is the OBSTRUCTING file 'upsilon'\n")
+
+ # Create expected output tree for an update of the wc_backup.
+ # mu and rho are run of the mill update operations; merge and update
+ # respectively.
+ # kappa, nu, I, and upsilon all 'E'xisted as unversioned items in the WC.
+ # While the dir I does exist, zeta does not so it's just an add.
+ expected_output = wc.State(wc_backup, {
+ 'A/mu' : Item(status='G '),
+ 'A/D/G/rho' : Item(status='U '),
+ 'kappa' : Item(status='E '),
+ 'A/B/F/nu' : Item(status='E '),
+ 'A/C/I' : Item(status='E '),
+ 'A/C/I/upsilon' : Item(status='E '),
+ 'A/C/I/zeta' : Item(status='A '),
+ })
+
+ # Create expected output tree for an update of the wc_backup.
+ #
+ # - mu and rho are run of the mill update operations; merge and update
+ # respectively.
+ #
+ # - kappa, nu, I, and upsilon all 'E'xisted as unversioned items in the WC.
+ #
+ # - While the dir I does exist, I/zeta does not so it's just an add.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/F/nu' : Item("This is the file 'nu'\n"),
+ 'kappa' : Item("This is the OBSTRUCTING file 'kappa'\n"),
+ 'A/C/I' : Item(),
+ 'A/C/I/upsilon' : Item("This is the OBSTRUCTING file 'upsilon'\n"),
+ 'A/C/I/zeta' : Item("This is the file 'zeta'\n"),
+ })
+ expected_disk.tweak('A/mu',
+ contents=expected_disk.desc['A/mu'].contents
+ + 'appended mu text')
+ expected_disk.tweak('A/D/G/rho',
+ contents=expected_disk.desc['A/D/G/rho'].contents
+ + 'new appended text for rho')
+
+ # Create expected status tree for the update. Since the obstructing
+ # kappa and upsilon differ from the repos, they should show as modified.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.add({
+ 'A/B/F/nu' : Item(status=' ', wc_rev=2),
+ 'A/C/I' : Item(status=' ', wc_rev=2),
+ 'A/C/I/zeta' : Item(status=' ', wc_rev=2),
+ 'kappa' : Item(status='M ', wc_rev=2),
+ 'A/C/I/upsilon' : Item(status='M ', wc_rev=2),
+ })
+
+ # Perform forced update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ wc_backup, '--force')
+
+#----------------------------------------------------------------------
+def forced_update_failures(sbox):
+ "forced up fails with some types of obstructions"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Add a file
+ nu_path = sbox.ospath('A/B/F/nu')
+ svntest.main.file_append(nu_path, "This is the file 'nu'\n")
+ svntest.main.run_svn(None, 'add', nu_path)
+
+ # Add a dir
+ I_path = sbox.ospath('A/C/I')
+ os.mkdir(I_path)
+ svntest.main.run_svn(None, 'add', I_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/B/F/nu' : Item(verb='Adding'),
+ 'A/C/I' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/F/nu' : Item(status=' ', wc_rev=2),
+ 'A/C/I' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create an unversioned dir A/B/F/nu that will obstruct the file of the
+ # same name coming from the repository. Create an unversioned file A/C/I
+ # that will obstruct the dir of the same name.
+ nu_path = os.path.join(wc_backup, 'A', 'B', 'F', 'nu')
+ os.mkdir(nu_path)
+ I_path = os.path.join(wc_backup, 'A', 'C', 'I')
+ svntest.main.file_append(I_path,
+ "This is the file 'I'...shouldn't I be a dir?\n")
+
+ # A forced update that tries to add a file when an unversioned directory
+ # of the same name already exists should fail.
+ #svntest.factory.make(sbox, """svn up --force $WC_DIR.backup/A/B/F""")
+ #exit(0)
+ backup_A_B_F = os.path.join(wc_backup, 'A', 'B', 'F')
+
+ # svn up --force $WC_DIR.backup/A/B/F
+ expected_output = svntest.wc.State(wc_backup, {
+ 'A/B/F/nu' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/F/nu' : Item(),
+ 'A/C/I' :
+ Item(contents="This is the file 'I'...shouldn't I be a dir?\n"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_backup, 1)
+ expected_status.add({
+ 'A/B/F/nu' : Item(status='D ', treeconflict='C', wc_rev='2'),
+ })
+ expected_status.tweak('A/B/F', wc_rev='2')
+
+ actions.run_and_verify_update(wc_backup, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '--force', backup_A_B_F)
+
+
+ # A forced update that tries to add a directory when an unversioned file
+ # of the same name already exists should fail.
+ # svntest.factory.make(sbox, """
+ # svn up --force wc_dir_backup/A/C
+ # rm -rf wc_dir_backup/A/C/I wc_dir_backup/A/B/F/nu
+ # svn up wc_dir_backup
+ # svn up -r1 wc_dir_backup/A/C
+ # svn co url/A/C/I wc_dir_backup/A/C/I
+ # svn up --force wc_dir_backup/A/C
+ # """)
+ # exit(0)
+ url = sbox.repo_url
+ wc_dir_backup = sbox.wc_dir + '.backup'
+
+ backup_A_B_F_nu = os.path.join(wc_dir_backup, 'A', 'B', 'F', 'nu')
+ backup_A_C = os.path.join(wc_dir_backup, 'A', 'C')
+ backup_A_C_I = os.path.join(wc_dir_backup, 'A', 'C', 'I')
+ url_A_C_I = url + '/A/C/I'
+
+ # svn up --force wc_dir_backup/A/C
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/F/nu' : Item(),
+ 'A/C/I' :
+ Item(contents="This is the file 'I'...shouldn't I be a dir?\n"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir_backup, 1)
+ expected_status.add({
+ 'A/C/I' : Item(status='D ', treeconflict='C', wc_rev=2),
+ 'A/B/F/nu' : Item(status='D ', treeconflict='C', wc_rev=2),
+ })
+ expected_status.tweak('A/C', 'A/B/F', wc_rev='2')
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '--force', backup_A_C)
+
+ # rm -rf wc_dir_backup/A/C/I wc_dir_backup/A/B/F/nu
+ os.remove(backup_A_C_I)
+ svntest.main.safe_rmtree(backup_A_B_F_nu)
+
+ svntest.main.run_svn(None, 'revert', backup_A_C_I, backup_A_B_F_nu)
+
+ # svn up wc_dir_backup
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ })
+
+ expected_disk.tweak('A/B/F/nu', contents="This is the file 'nu'\n")
+ expected_disk.tweak('A/C/I', contents=None)
+
+ expected_status.tweak(wc_rev='2', status=' ')
+ expected_status.tweak('A/C/I', 'A/B/F/nu', treeconflict=None)
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status)
+
+ # svn up -r1 wc_dir_backup/A/C
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(status='D '),
+ })
+
+ expected_disk.remove('A/C/I')
+
+ expected_status.remove('A/C/I')
+ expected_status.tweak('A/C', wc_rev='1')
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '-r1', backup_A_C)
+
+ # svn co url/A/C/I wc_dir_backup/A/C/I
+ expected_output = svntest.wc.State(wc_dir_backup, {})
+
+ expected_disk = svntest.wc.State(wc_dir, {})
+
+ actions.run_and_verify_checkout(url_A_C_I, backup_A_C_I,
+ expected_output, expected_disk)
+
+ # svn up --force wc_dir_backup/A/C
+ expected_output = svntest.wc.State(wc_dir_backup, {
+ 'A/C/I' : Item(verb='Skipped'),
+ })
+
+ actions.run_and_verify_update(wc_dir_backup, expected_output, None, None,
+ [], False,
+ '--force', backup_A_C)
+
+
+#----------------------------------------------------------------------
+# Test for issue #2556. The tests maps a virtual drive to a working copy
+# and tries some basic update, commit and status actions on the virtual
+# drive.
+@SkipUnless(svntest.main.is_os_windows)
+def update_wc_on_windows_drive(sbox):
+ "update wc on the root of a Windows (virtual) drive"
+
+ def find_the_next_available_drive_letter():
+ "find the first available drive"
+
+ # get the list of used drive letters, use some Windows specific function.
+ try:
+ import win32api
+
+ drives=win32api.GetLogicalDriveStrings()
+ drives=drives.split('\000')
+
+ for d in range(ord('G'), ord('Z')+1):
+ drive = chr(d)
+ if not drive + ':\\' in drives:
+ return drive
+ except ImportError:
+ # In ActiveState python x64 win32api is not available
+ for d in range(ord('G'), ord('Z')+1):
+ drive = chr(d)
+ if not os.path.isdir(drive + ':\\'):
+ return drive
+
+ return None
+
+ # just create an empty folder, we'll checkout later.
+ sbox.build(create_wc = False)
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ os.mkdir(sbox.wc_dir)
+
+ # create a virtual drive to the working copy folder
+ drive = find_the_next_available_drive_letter()
+ if drive is None:
+ raise svntest.Skip('No drive letter available')
+
+ subprocess.call(['subst', drive +':', sbox.wc_dir])
+ wc_dir = drive + ':/'
+ was_cwd = os.getcwd()
+
+ try:
+ svntest.actions.run_and_verify_svn(None, [],
+ 'checkout',
+ sbox.repo_url, wc_dir)
+
+ # Make some local modifications
+ mu_path = os.path.join(wc_dir, 'A', 'mu').replace(os.sep, '/')
+ svntest.main.file_append(mu_path, '\nAppended text for mu')
+ zeta_path = os.path.join(wc_dir, 'zeta').replace(os.sep, '/')
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'\n")
+ svntest.main.run_svn(None, 'add', zeta_path)
+
+ # Commit.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(verb='Sending'),
+ 'zeta' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.add({
+ 'zeta' : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status, [],
+ wc_dir, zeta_path)
+
+ # Non recursive commit
+ dir1_path = os.path.join(wc_dir, 'dir1').replace(os.sep, '/')
+ os.mkdir(dir1_path)
+ svntest.main.run_svn(None, 'add', '-N', dir1_path)
+ file1_path = os.path.join(dir1_path, 'file1')
+ svntest.main.file_append(file1_path, "This is the file 'file1'\n")
+ svntest.main.run_svn(None, 'add', '-N', file1_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'dir1' : Item(verb='Adding'),
+ 'dir1/file1' : Item(verb='Adding'),
+ })
+
+ expected_status.add({
+ 'dir1' : Item(status=' ', wc_rev=3),
+ 'dir1/file1' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status, [],
+ '-N',
+ wc_dir,
+ dir1_path, file1_path)
+
+ # revert to previous revision to test update
+ os.chdir(wc_dir)
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_output = svntest.wc.State('', {
+ 'A/mu' : Item(status='U '),
+ 'zeta' : Item(status='D '),
+ 'dir1' : Item(status='D '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ '-r', '1', wc_dir)
+
+ os.chdir(was_cwd)
+
+ # update to the latest version, but use the relative path 'X:'
+ wc_dir = drive + ":"
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/mu' : Item(status='U '),
+ 'zeta' : Item(status='A '),
+ 'dir1' : Item(status='A '),
+ 'dir1/file1' : Item(status='A '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'dir1' : Item(status=' ', wc_rev=3),
+ 'dir1/file1' : Item(status=' ', wc_rev=3),
+ 'zeta' : Item(status=' ', wc_rev=3),
+ })
+
+ expected_disk.add({
+ 'zeta' : Item("This is the file 'zeta'\n"),
+ 'dir1/file1': Item("This is the file 'file1'\n"),
+ })
+ expected_disk.tweak('A/mu', contents = expected_disk.desc['A/mu'].contents
+ + '\nAppended text for mu')
+
+ # Create expected status with 'H:iota' style paths
+ expected_status_relative = svntest.wc.State('', {})
+ expected_status_relative.add_state(wc_dir, expected_status, strict=True)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status_relative)
+
+ finally:
+ os.chdir(was_cwd)
+ # cleanup the virtual drive
+ subprocess.call(['subst', '/D', drive +':'])
+
+# Issue #2618: "'Checksum mismatch' error when receiving
+# update for replaced-with-history file".
+def update_wc_with_replaced_file(sbox):
+ "update wc containing a replaced-with-history file"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy.
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # we need a change in the repository
+ iota_path = sbox.ospath('iota')
+ mu_path = sbox.ospath('A/mu')
+ iota_bu_path = os.path.join(wc_backup, 'iota')
+ svntest.main.file_append(iota_bu_path, "New line in 'iota'\n")
+ svntest.main.run_svn(None,
+ 'ci', wc_backup, '-m', 'changed file')
+
+ # First, a replacement without history.
+ svntest.main.run_svn(None, 'rm', iota_path)
+ svntest.main.file_append(iota_path, "")
+ svntest.main.run_svn(None, 'add', iota_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='R ', wc_rev='1')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now update the wc. The local replacement is a tree conflict with
+ # the incoming edit on that deleted item.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'iota' : Item(status='R ', wc_rev='2', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents="")
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ # Make us a working copy with a 'replace-with-history' file.
+ svntest.main.run_svn(None, 'revert', iota_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ wc_dir, '-r1')
+
+ svntest.main.run_svn(None, 'rm', iota_path)
+ svntest.main.run_svn(None, 'cp', mu_path, iota_path)
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='R ', copied='+', wc_rev='-')
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Now update the wc. The local replacement is a tree conflict with
+ # the incoming edit on that deleted item.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'iota' : Item(status='R ', wc_rev='-', treeconflict='C', copied='+'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('iota', contents="This is the file 'mu'.\n")
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+def update_with_obstructing_additions(sbox):
+ "update handles obstructing paths scheduled for add"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Add files and dirs to the repos via the first WC. Each of these
+ # will be added to the backup WC via an update:
+ #
+ # A/B/upsilon: Identical to the file scheduled for addition in
+ # the backup WC.
+ #
+ # A/C/nu: A "normal" add, won't exist in the backup WC.
+ #
+ # A/D/kappa: Textual and property conflict with the file scheduled
+ # for addition in the backup WC.
+ #
+ # A/D/epsilon: Textual conflict with the file scheduled for addition.
+ #
+ # A/D/zeta: Prop conflict with the file scheduled for addition.
+ #
+ # Three new dirs that will also be scheduled for addition:
+ # A/D/H/I: No props on either WC or REPOS.
+ # A/D/H/I/J: Prop conflict with the scheduled add.
+ # A/D/H/I/K: Same (mergeable) prop on WC and REPOS.
+ #
+ # A/D/H/I/K/xi: Identical to the file scheduled for addition in
+ # the backup WC. No props.
+ #
+ # A/D/H/I/L: A "normal" dir add, won't exist in the backup WC.
+ #
+ # A/D/H/I/J/eta: Conflicts with the file scheduled for addition in
+ # the backup WC. No props.
+ upsilon_path = sbox.ospath('A/B/upsilon')
+ svntest.main.file_append(upsilon_path, "This is the file 'upsilon'\n")
+ nu_path = sbox.ospath('A/C/nu')
+ svntest.main.file_append(nu_path, "This is the file 'nu'\n")
+ kappa_path = sbox.ospath('A/D/kappa')
+ svntest.main.file_append(kappa_path, "This is REPOS file 'kappa'\n")
+ epsilon_path = sbox.ospath('A/D/epsilon')
+ svntest.main.file_append(epsilon_path, "This is REPOS file 'epsilon'\n")
+ zeta_path = sbox.ospath('A/D/zeta')
+ svntest.main.file_append(zeta_path, "This is the file 'zeta'\n")
+ I_path = sbox.ospath('A/D/H/I')
+ os.mkdir(I_path)
+ J_path = os.path.join(I_path, 'J')
+ os.mkdir(J_path)
+ K_path = os.path.join(I_path, 'K')
+ os.mkdir(K_path)
+ L_path = os.path.join(I_path, 'L')
+ os.mkdir(L_path)
+ xi_path = os.path.join(K_path, 'xi')
+ svntest.main.file_append(xi_path, "This is the file 'xi'\n")
+ eta_path = os.path.join(J_path, 'eta')
+ svntest.main.file_append(eta_path, "This is REPOS file 'eta'\n")
+
+ svntest.main.run_svn(None, 'add', upsilon_path, nu_path,
+ kappa_path, epsilon_path, zeta_path, I_path)
+
+ # Set props that will conflict with scheduled adds.
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-REPOS',
+ kappa_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-REPOS',
+ zeta_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-REPOS',
+ J_path)
+
+ # Set prop that will match with scheduled add.
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-SAME',
+ epsilon_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-SAME',
+ K_path)
+
+ # Created expected output tree for 'svn ci'
+ expected_output = wc.State(wc_dir, {
+ 'A/B/upsilon' : Item(verb='Adding'),
+ 'A/C/nu' : Item(verb='Adding'),
+ 'A/D/kappa' : Item(verb='Adding'),
+ 'A/D/epsilon' : Item(verb='Adding'),
+ 'A/D/zeta' : Item(verb='Adding'),
+ 'A/D/H/I' : Item(verb='Adding'),
+ 'A/D/H/I/J' : Item(verb='Adding'),
+ 'A/D/H/I/J/eta' : Item(verb='Adding'),
+ 'A/D/H/I/K' : Item(verb='Adding'),
+ 'A/D/H/I/K/xi' : Item(verb='Adding'),
+ 'A/D/H/I/L' : Item(verb='Adding'),
+ })
+
+ # Create expected status tree.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ 'A/B/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/nu' : Item(status=' ', wc_rev=2),
+ 'A/D/kappa' : Item(status=' ', wc_rev=2),
+ 'A/D/epsilon' : Item(status=' ', wc_rev=2),
+ 'A/D/zeta' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J/eta' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=2),
+ })
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create various paths scheduled for addition which will obstruct
+ # the adds coming from the repos.
+ upsilon_backup_path = os.path.join(wc_backup, 'A', 'B', 'upsilon')
+ svntest.main.file_append(upsilon_backup_path,
+ "This is the file 'upsilon'\n")
+ kappa_backup_path = os.path.join(wc_backup, 'A', 'D', 'kappa')
+ svntest.main.file_append(kappa_backup_path,
+ "This is WC file 'kappa'\n")
+ epsilon_backup_path = os.path.join(wc_backup, 'A', 'D', 'epsilon')
+ svntest.main.file_append(epsilon_backup_path,
+ "This is WC file 'epsilon'\n")
+ zeta_backup_path = os.path.join(wc_backup, 'A', 'D', 'zeta')
+ svntest.main.file_append(zeta_backup_path, "This is the file 'zeta'\n")
+ I_backup_path = os.path.join(wc_backup, 'A', 'D', 'H', 'I')
+ os.mkdir(I_backup_path)
+ J_backup_path = os.path.join(I_backup_path, 'J')
+ os.mkdir(J_backup_path)
+ K_backup_path = os.path.join(I_backup_path, 'K')
+ os.mkdir(K_backup_path)
+ xi_backup_path = os.path.join(K_backup_path, 'xi')
+ svntest.main.file_append(xi_backup_path, "This is the file 'xi'\n")
+ eta_backup_path = os.path.join(J_backup_path, 'eta')
+ svntest.main.file_append(eta_backup_path, "This is WC file 'eta'\n")
+
+ svntest.main.run_svn(None, 'add', upsilon_backup_path, kappa_backup_path,
+ epsilon_backup_path, zeta_backup_path, I_backup_path)
+
+ # Set prop that will conflict with add from repos.
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-WC',
+ kappa_backup_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-WC',
+ zeta_backup_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-WC',
+ J_backup_path)
+
+ # Set prop that will match add from repos.
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-SAME',
+ epsilon_backup_path)
+ svntest.main.run_svn(None, 'propset', 'propname1', 'propval-SAME',
+ K_backup_path)
+
+ # Create expected output tree for an update of the wc_backup.
+ expected_output = wc.State(wc_backup, {
+ 'A/B/upsilon' : Item(status='E '),
+ 'A/C/nu' : Item(status='A '),
+ 'A/D/H/I' : Item(status='E '),
+ 'A/D/H/I/J' : Item(status='EC'),
+ 'A/D/H/I/J/eta' : Item(status='C '),
+ 'A/D/H/I/K' : Item(status='EG'),
+ 'A/D/H/I/K/xi' : Item(status='E '),
+ 'A/D/H/I/L' : Item(status='A '),
+ 'A/D/kappa' : Item(status='CC'),
+ 'A/D/epsilon' : Item(status='CG'),
+ 'A/D/zeta' : Item(status='EC'),
+ })
+
+ # Create expected disk for update of wc_backup.
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/upsilon' : Item("This is the file 'upsilon'\n"),
+ 'A/C/nu' : Item("This is the file 'nu'\n"),
+ 'A/D/H/I' : Item(),
+ 'A/D/H/I/J' : Item(props={'propname1' : 'propval-WC'}),
+ 'A/D/H/I/J/eta' : Item("\n".join(["<<<<<<< .mine",
+ "This is WC file 'eta'",
+ "||||||| .r0",
+ "=======",
+ "This is REPOS file 'eta'",
+ ">>>>>>> .r2",
+ ""])),
+ 'A/D/H/I/K' : Item(props={'propname1' : 'propval-SAME'}),
+ 'A/D/H/I/K/xi' : Item("This is the file 'xi'\n"),
+ 'A/D/H/I/L' : Item(),
+ 'A/D/kappa' : Item("\n".join(["<<<<<<< .mine",
+ "This is WC file 'kappa'",
+ "||||||| .r0",
+ "=======",
+ "This is REPOS file 'kappa'",
+ ">>>>>>> .r2",
+ ""]),
+ props={'propname1' : 'propval-WC'}),
+ 'A/D/epsilon' : Item("\n".join(["<<<<<<< .mine",
+ "This is WC file 'epsilon'",
+ "||||||| .r0",
+ "=======",
+ "This is REPOS file 'epsilon'",
+ ">>>>>>> .r2",
+ ""]),
+ props={'propname1' : 'propval-SAME'}),
+ 'A/D/zeta' : Item("This is the file 'zeta'\n",
+ props={'propname1' : 'propval-WC'}),
+ })
+
+ # Create expected status tree for the update. Since the obstructing
+ # kappa and upsilon differ from the repos, they should show as modified.
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.add({
+ 'A/B/upsilon' : Item(status=' ', wc_rev=2),
+ 'A/C/nu' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/J' : Item(status=' C', wc_rev=2),
+ 'A/D/H/I/J/eta' : Item(status='C ', wc_rev=2),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=2),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=2),
+ 'A/D/kappa' : Item(status='CC', wc_rev=2),
+ 'A/D/epsilon' : Item(status='C ', wc_rev=2),
+ 'A/D/zeta' : Item(status=' C', wc_rev=2),
+ })
+
+ # "Extra" files that we expect to result from the conflicts.
+ extra_files = ['eta\.r0', 'eta\.r2', 'eta\.mine',
+ 'kappa\.r0', 'kappa\.r2', 'kappa\.mine',
+ 'epsilon\.r0', 'epsilon\.r2', 'epsilon\.mine',
+ 'kappa.prej', 'zeta.prej', 'dir_conflicts.prej']
+
+ # Perform forced update and check the results in three
+ # ways (including props).
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--adds-as-modification', wc_backup,
+ extra_files=extra_files)
+
+ # Some obstructions are still not permitted:
+ #
+ # Test that file and dir obstructions scheduled for addition *with*
+ # history fail when update tries to add the same path.
+
+ # URL to URL copy of A/D/G to A/M.
+ G_URL = sbox.repo_url + '/A/D/G'
+ M_URL = sbox.repo_url + '/A/M'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', G_URL, M_URL, '-m', '')
+
+ # WC to WC copy of A/D/H to A/M, M now scheduled for addition with
+ # history in WC and pending addition from the repos.
+ H_path = sbox.ospath('A/D/H')
+ A_path = sbox.ospath('A')
+ M_path = sbox.ospath('A/M')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', H_path, M_path)
+
+ # URL to URL copy of A/D/H/omega to omicron.
+ omega_URL = sbox.repo_url + '/A/D/H/omega'
+ omicron_URL = sbox.repo_url + '/omicron'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', omega_URL, omicron_URL,
+ '-m', '')
+
+ # WC to WC copy of A/D/H/chi to omicron, omicron now scheduled for
+ # addition with history in WC and pending addition from the repos.
+ chi_path = sbox.ospath('A/D/H/chi')
+ omicron_path = sbox.ospath('omicron')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', chi_path,
+ omicron_path)
+
+ # Try to update M's Parent.
+ expected_output = wc.State(A_path, {
+ 'M' : Item(status=' ', treeconflict='C'),
+ 'M/rho' : Item(status=' ', treeconflict='A'),
+ 'M/pi' : Item(status=' ', treeconflict='A'),
+ 'M/tau' : Item(status=' ', treeconflict='A'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B/upsilon' : Item("This is the file 'upsilon'\n"),
+ 'A/C/nu' : Item("This is the file 'nu'\n"),
+ 'A/D/H/I' : Item(),
+ 'A/D/H/I/J' : Item(),
+ 'A/D/H/I/J/eta' : Item("This is REPOS file 'eta'\n"),
+ 'A/D/H/I/K' : Item(),
+ 'A/D/H/I/K/xi' : Item("This is the file 'xi'\n"),
+ 'A/D/H/I/L' : Item(),
+ 'A/D/kappa' : Item("This is REPOS file 'kappa'\n"),
+ 'A/D/epsilon' : Item("This is REPOS file 'epsilon'\n"),
+ 'A/D/gamma' : Item("This is the file 'gamma'.\n"),
+ 'A/D/zeta' : Item("This is the file 'zeta'\n"),
+ 'A/M/I' : Item(),
+ 'A/M/I/J' : Item(),
+ 'A/M/I/J/eta' : Item("This is REPOS file 'eta'\n"),
+ 'A/M/I/K' : Item(),
+ 'A/M/I/K/xi' : Item("This is the file 'xi'\n"),
+ 'A/M/I/L' : Item(),
+ 'A/M/chi' : Item("This is the file 'chi'.\n"),
+ 'A/M/psi' : Item("This is the file 'psi'.\n"),
+ 'A/M/omega' : Item("This is the file 'omega'.\n"),
+ 'omicron' : Item("This is the file 'chi'.\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 4)
+ expected_status.tweak('', 'iota', wc_rev=1)
+ expected_status.add({
+ 'A/B/upsilon' : Item(status=' ', wc_rev=4),
+ 'A/C/nu' : Item(status=' ', wc_rev=4),
+ 'A/D/kappa' : Item(status=' ', wc_rev=4),
+ 'A/D/epsilon' : Item(status=' ', wc_rev=4),
+ 'A/D/gamma' : Item(status=' ', wc_rev=4),
+ 'A/D/zeta' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I/J' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I/J/eta' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I/K' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I/K/xi' : Item(status=' ', wc_rev=4),
+ 'A/D/H/I/L' : Item(status=' ', wc_rev=4),
+ 'A/M' : Item(status='R ', copied='+', wc_rev='-',
+ treeconflict='C'),
+ 'A/M/I' : Item(status='A ', copied='+', wc_rev='-',
+ entry_status=' '), # New op_root
+ 'A/M/I/J' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/I/J/eta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/I/K' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/I/K/xi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/I/L' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/M/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'omicron' : Item(status='A ', copied='+', wc_rev='-'),
+
+ # Inserted under the tree conflict
+ 'A/M/pi' : Item(status='D ', wc_rev='4'),
+ 'A/M/rho' : Item(status='D ', wc_rev='4'),
+ 'A/M/tau' : Item(status='D ', wc_rev='4'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '--adds-as-modification',
+ A_path)
+
+ # Resolve the tree conflict.
+ svntest.main.run_svn(None, 'resolve', '--accept', 'working', M_path)
+
+ # Try to update omicron's parent, non-recusively so as not to
+ # try and update M first.
+ expected_output = wc.State(wc_dir, {
+ 'omicron' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_status.tweak('', 'iota', status=' ', wc_rev=4)
+ expected_status.tweak('omicron', status='R ', copied='+', wc_rev='-',
+ treeconflict='C')
+ expected_status.tweak('A/M', treeconflict=None)
+
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ wc_dir, '-N', '--adds-as-modification')
+
+ # Resolve the tree conflict.
+ svntest.main.run_svn(None, 'resolved', omicron_path)
+
+ expected_output = wc.State(wc_dir, { })
+
+ expected_status.tweak('omicron', treeconflict=None)
+
+ # Again, --force shouldn't matter.
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ omicron_path, '-N', '--force')
+
+# Test for issue #2022: Update shouldn't touch conflicted files.
+def update_conflicted(sbox):
+ "update conflicted files"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+ D_path = sbox.ospath('A/D')
+ pi_path = sbox.ospath('A/D/G/pi')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Make some modifications to the files and a dir, creating r2.
+ svntest.main.file_append(iota_path, 'Original appended text for iota\n')
+
+ svntest.main.run_svn(None, 'propset', 'prop', 'val', lambda_path)
+
+ svntest.main.file_append(mu_path, 'Original appended text for mu\n')
+
+ svntest.main.run_svn(None, 'propset', 'prop', 'val', mu_path)
+ svntest.main.run_svn(None, 'propset', 'prop', 'val', D_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/mu': Item(verb='Sending'),
+ 'A/B/lambda': Item(verb='Sending'),
+ 'A/D': Item(verb='Sending'),
+ })
+
+ expected_status.tweak('iota', 'A/mu', 'A/B/lambda', 'A/D', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Do another change to each path that we will need later.
+ # Also, change a file below A/D in the path.
+ svntest.main.file_append(iota_path, 'Another line for iota\n')
+ svntest.main.file_append(mu_path, 'Another line for mu\n')
+ svntest.main.file_append(lambda_path, 'Another line for lambda\n')
+
+ svntest.main.run_svn(None, 'propset', 'prop', 'val2', D_path)
+
+ svntest.main.file_append(pi_path, 'Another line for pi\n')
+
+ expected_status.tweak('iota', 'A/mu', 'A/B/lambda', 'A/D', 'A/D/G/pi',
+ wc_rev=3)
+
+ expected_output.add({
+ 'A/D/G/pi': Item(verb='Sending')})
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Go back to revision 1.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status='U '),
+ 'A/B/lambda' : Item(status='UU'),
+ 'A/mu' : Item(status='UU'),
+ 'A/D': Item(status=' U'),
+ 'A/D/G/pi': Item(status='U '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r1', wc_dir)
+
+ # Create modifications conflicting with rev 2.
+ svntest.main.file_append(iota_path, 'Conflicting appended text for iota\n')
+ svntest.main.run_svn(None, 'propset', 'prop', 'conflictval', lambda_path)
+ svntest.main.file_append(mu_path, 'Conflicting appended text for mu\n')
+ svntest.main.run_svn(None, 'propset', 'prop', 'conflictval', mu_path)
+ svntest.main.run_svn(None, 'propset', 'prop', 'conflictval', D_path)
+
+ # Update to revision 2, expecting conflicts.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota': Item(status='C '),
+ 'A/B/lambda': Item(status=' C'),
+ 'A/mu': Item(status='CC'),
+ 'A/D': Item(status=' C'),
+ })
+
+ expected_disk.tweak('iota',
+ contents="\n".join(["This is the file 'iota'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for iota",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for iota",
+ ">>>>>>> .r2",
+ ""]))
+ expected_disk.tweak('A/mu',
+ contents="\n".join(["This is the file 'mu'.",
+ "<<<<<<< .mine",
+ "Conflicting appended text for mu",
+ "||||||| .r1",
+ "=======",
+ "Original appended text for mu",
+ ">>>>>>> .r2",
+ ""]),
+ props={'prop': 'conflictval'})
+ expected_disk.tweak('A/B/lambda', 'A/D', props={'prop': 'conflictval'})
+
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('iota', status='C ')
+ expected_status.tweak('A/B/lambda', 'A/D', status=' C')
+ expected_status.tweak('A/mu', status='CC')
+
+ extra_files = [ 'iota.r1', 'iota.r2', 'iota.mine',
+ 'mu.r1', 'mu.r2', 'mu.mine', 'mu.prej',
+ 'lambda.prej',
+ 'dir_conflicts.prej']
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r2', wc_dir,
+ extra_files=extra_files+[])
+
+ # Now, update to HEAD, which should skip all the conflicted files, but
+ # still update the pi file.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Skipped'),
+ 'A/B/lambda' : Item(verb='Skipped'),
+ 'A/mu' : Item(verb='Skipped'),
+ 'A/D' : Item(verb='Skipped'),
+ })
+
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('iota', 'A/B/lambda', 'A/mu', 'A/D', wc_rev=2)
+ # We no longer update descendants of a prop-conflicted dir.
+ expected_status.tweak('A/D/G',
+ 'A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi',
+ 'A/D/gamma', wc_rev=2)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ extra_files=extra_files)
+
+#----------------------------------------------------------------------
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_update_elision(sbox):
+ "mergeinfo does not elide after update"
+
+ # No mergeinfo elision is performed when doing updates. So updates may
+ # result in equivalent mergeinfo on a path and it's nearest working copy
+ # parent with explicit mergeinfo. This is currently permitted and
+ # honestly we could probably do without this test(?).
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ alpha_COPY_path = sbox.ospath('A/B_COPY/E/alpha')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ B_COPY_path = sbox.ospath('A/B_COPY')
+ E_COPY_path = sbox.ospath('A/B_COPY/E')
+ beta_path = sbox.ospath('A/B/E/beta')
+ lambda_path = sbox.ospath('A/B/lambda')
+
+ # Make a branch A/B_COPY
+ expected_stdout = verify.UnorderedOutput([
+ "A " + sbox.ospath('A/B_COPY/lambda') + "\n",
+ "A " + sbox.ospath('A/B_COPY/E') + "\n",
+ "A " + sbox.ospath('A/B_COPY/E/alpha') + "\n",
+ "A " + sbox.ospath('A/B_COPY/E/beta') + "\n",
+ "A " + sbox.ospath('A/B_COPY/F') + "\n",
+ "Checked out revision 1.\n",
+ "A " + B_COPY_path + "\n",
+ ])
+ svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy',
+ sbox.repo_url + "/A/B", B_COPY_path)
+
+ expected_output = wc.State(wc_dir, {'A/B_COPY' : Item(verb='Adding')})
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.add({
+ "A/B_COPY" : Item(status=' ', wc_rev=2),
+ "A/B_COPY/lambda" : Item(status=' ', wc_rev=2),
+ "A/B_COPY/E" : Item(status=' ', wc_rev=2),
+ "A/B_COPY/E/alpha" : Item(status=' ', wc_rev=2),
+ "A/B_COPY/E/beta" : Item(status=' ', wc_rev=2),
+ "A/B_COPY/F" : Item(status=' ', wc_rev=2),})
+
+ svntest.actions.run_and_verify_commit(wc_dir,
+ expected_output,
+ expected_status)
+
+ # Make some changes under A/B
+
+ # r3 - modify and commit A/B/E/beta
+ svntest.main.file_write(beta_path, "New content")
+
+ expected_output = wc.State(wc_dir, {'A/B/E/beta' : Item(verb='Sending')})
+
+ expected_status.tweak('A/B/E/beta', wc_rev=3)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r4 - modify and commit A/B/lambda
+ svntest.main.file_write(lambda_path, "New content")
+
+ expected_output = wc.State(wc_dir, {'A/B/lambda' : Item(verb='Sending')})
+
+ expected_status.tweak('A/B/lambda', wc_rev=4)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # r5 - modify and commit A/B/E/alpha
+ svntest.main.file_write(alpha_path, "New content")
+
+ expected_output = wc.State(wc_dir, {'A/B/E/alpha' : Item(verb='Sending')})
+
+ expected_status.tweak('A/B/E/alpha', wc_rev=5)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Merge r2:5 into A/B_COPY
+ expected_output = wc.State(B_COPY_path, {
+ 'lambda' : Item(status='U '),
+ 'E/alpha' : Item(status='U '),
+ 'E/beta' : Item(status='U '),
+ })
+
+ expected_mergeinfo_output = wc.State(B_COPY_path, {
+ '' : Item(status=' U'),
+ })
+
+ expected_elision_output = wc.State(B_COPY_path, {
+ })
+
+ expected_merge_status = wc.State(B_COPY_path, {
+ '' : Item(status=' M', wc_rev=2),
+ 'lambda' : Item(status='M ', wc_rev=2),
+ 'E' : Item(status=' ', wc_rev=2),
+ 'E/alpha' : Item(status='M ', wc_rev=2),
+ 'E/beta' : Item(status='M ', wc_rev=2),
+ 'F' : Item(status=' ', wc_rev=2),
+ })
+
+ expected_merge_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3-5'}),
+ 'lambda' : Item("New content"),
+ 'E' : Item(),
+ 'E/alpha' : Item("New content"),
+ 'E/beta' : Item("New content"),
+ 'F' : Item(),
+ })
+
+ expected_skip = wc.State(B_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(B_COPY_path, '2', '5',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_merge_disk,
+ expected_merge_status,
+ expected_skip,
+ check_props=True)
+
+ # r6 - Commit the merge
+ expected_output = wc.State(wc_dir,
+ {'A/B_COPY' : Item(verb='Sending'),
+ 'A/B_COPY/E/alpha' : Item(verb='Sending'),
+ 'A/B_COPY/E/beta' : Item(verb='Sending'),
+ 'A/B_COPY/lambda' : Item(verb='Sending')})
+
+ expected_status.tweak('A/B_COPY', wc_rev=6)
+ expected_status.tweak('A/B_COPY/E/alpha', wc_rev=6)
+ expected_status.tweak('A/B_COPY/E/beta', wc_rev=6)
+ expected_status.tweak('A/B_COPY/lambda', wc_rev=6)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update WC back to r5, A/COPY_B is at it's pre-merge state again
+ expected_output = wc.State(wc_dir,
+ {'A/B_COPY' : Item(status=' U'),
+ 'A/B_COPY/E/alpha' : Item(status='U '),
+ 'A/B_COPY/E/beta' : Item(status='U '),
+ 'A/B_COPY/lambda' : Item(status='U '),})
+
+ expected_status.tweak(wc_rev=5)
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/B_COPY' : Item(),
+ 'A/B_COPY/lambda' : Item("This is the file 'lambda'.\n"),
+ 'A/B_COPY/E' : Item(),
+ 'A/B_COPY/E/alpha' : Item("This is the file 'alpha'.\n"),
+ 'A/B_COPY/E/beta' : Item("This is the file 'beta'.\n"),
+ 'A/B_COPY/F' : Item(),
+ })
+ expected_disk.tweak('A/B/lambda', contents="New content")
+ expected_disk.tweak('A/B/E/alpha', contents="New content")
+ expected_disk.tweak('A/B/E/beta', contents="New content")
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '5', wc_dir)
+
+ # Merge r2:5 to A/B_COPY/E/alpha
+ expected_output = wc.State(alpha_COPY_path, {
+ 'alpha' : Item(status='U '),
+ })
+ expected_skip = wc.State(alpha_COPY_path, { })
+
+ # run_and_verify_merge doesn't support merging to a file WCPATH
+ # so use run_and_verify_svn.
+ svntest.actions.run_and_verify_svn(expected_merge_output([[3,5]],
+ ['U ' + alpha_COPY_path + '\n',
+ ' U ' + alpha_COPY_path + '\n']),
+ [], 'merge', '-r2:5',
+ sbox.repo_url + '/A/B/E/alpha',
+ alpha_COPY_path)
+
+
+ expected_alpha_status = wc.State(alpha_COPY_path, {
+ '' : Item(status='MM', wc_rev=5),
+ })
+
+ svntest.actions.run_and_verify_status(alpha_COPY_path,
+ expected_alpha_status)
+
+ svntest.actions.run_and_verify_svn(["/A/B/E/alpha:3-5\n"], [],
+ 'propget', SVN_PROP_MERGEINFO,
+ alpha_COPY_path)
+
+ # Update WC. The local mergeinfo (r3-5) on A/B_COPY/E/alpha is
+ # identical to that on added to A/B_COPY by the update, but update
+ # doesn't support elision so this redundancy is permitted.
+ expected_output = wc.State(wc_dir, {
+ 'A/B_COPY/lambda' : Item(status='U '),
+ 'A/B_COPY/E/alpha' : Item(status='G '),
+ 'A/B_COPY/E/beta' : Item(status='U '),
+ 'A/B_COPY' : Item(status=' U'),
+ })
+
+ expected_disk.tweak('A/B_COPY', props={SVN_PROP_MERGEINFO : '/A/B:3-5'})
+ expected_disk.tweak('A/B_COPY/lambda', contents="New content")
+ expected_disk.tweak('A/B_COPY/E/beta', contents="New content")
+ expected_disk.tweak('A/B_COPY/E/alpha', contents="New content",
+ props={SVN_PROP_MERGEINFO : '/A/B/E/alpha:3-5'})
+
+ expected_status.tweak(wc_rev=6)
+ expected_status.tweak('A/B_COPY/E/alpha', status=' M')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True)
+
+ # Now test that an updated target's mergeinfo can itself elide.
+ # r7 - modify and commit A/B/E/alpha
+ svntest.main.file_write(alpha_path, "More new content")
+ expected_output = wc.State(wc_dir, {
+ 'A/B/E/alpha' : Item(verb='Sending'),
+ 'A/B_COPY/E/alpha' : Item(verb='Sending')})
+ expected_status.tweak('A/B/E/alpha', 'A/B_COPY/E/alpha', status=' ',
+ wc_rev=7)
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update A to get all paths to the same working revision.
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [],
+ 'up', wc_dir)
+
+ # Merge r6:7 into A/B_COPY/E
+ expected_output = wc.State(E_COPY_path, {
+ 'alpha' : Item(status='U '),
+ })
+
+ expected_mergeinfo_output = wc.State(E_COPY_path, {
+ '' : Item(status=' G'),
+ 'alpha' : Item(status=' U'),
+ })
+
+ expected_elision_output = wc.State(E_COPY_path, {
+ 'alpha' : Item(status=' U'),
+ })
+
+ expected_merge_status = wc.State(E_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'alpha' : Item(status='MM', wc_rev=7),
+ 'beta' : Item(status=' ', wc_rev=7),
+ })
+
+ expected_merge_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/E:3-5,7'}),
+ 'alpha' : Item("More new content"),
+ 'beta' : Item("New content"),
+ })
+
+ expected_skip = wc.State(E_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(E_COPY_path, '6', '7',
+ sbox.repo_url + '/A/B/E', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_merge_disk,
+ expected_merge_status,
+ expected_skip,
+ check_props=True)
+
+ # r8 - Commit the merge
+ svntest.actions.run_and_verify_svn(exp_noop_up_out(7),
+ [], 'update', wc_dir)
+
+ expected_output = wc.State(wc_dir,
+ {'A/B_COPY/E' : Item(verb='Sending'),
+ 'A/B_COPY/E/alpha' : Item(verb='Sending')})
+
+ expected_status.tweak(wc_rev=7)
+ expected_status.tweak('A/B_COPY/E', 'A/B_COPY/E/alpha', wc_rev=8)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Update A/COPY_B/E back to r7
+ expected_output = wc.State(wc_dir, {
+ 'A/B_COPY/E/alpha' : Item(status='UU'),
+ 'A/B_COPY/E' : Item(status=' U'),
+ })
+
+ expected_status.tweak(wc_rev=7)
+
+ expected_disk.tweak('A/B_COPY',
+ props={SVN_PROP_MERGEINFO : '/A/B:3-5'})
+ expected_disk.tweak('A/B/E/alpha', contents="More new content")
+ expected_disk.tweak('A/B_COPY/E/alpha', contents="New content")
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '7', E_COPY_path)
+
+ # Merge r6:7 to A/B_COPY
+ expected_output = wc.State(B_COPY_path, {
+ 'E/alpha' : Item(status='U '),
+ })
+
+ expected_mergeinfo_output = wc.State(B_COPY_path, {
+ '' : Item(status=' U'),
+ 'E/alpha' : Item(status=' U'),
+ })
+
+ expected_elision_output = wc.State(B_COPY_path, {
+ 'E/alpha' : Item(status=' U'),
+ })
+
+ expected_merge_status = wc.State(B_COPY_path, {
+ '' : Item(status=' M', wc_rev=7),
+ 'lambda' : Item(status=' ', wc_rev=7),
+ 'E' : Item(status=' ', wc_rev=7),
+ 'E/alpha' : Item(status='MM', wc_rev=7),
+ 'E/beta' : Item(status=' ', wc_rev=7),
+ 'F' : Item(status=' ', wc_rev=7),
+ })
+
+ expected_merge_disk = wc.State('', {
+ '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3-5,7'}),
+ 'lambda' : Item("New content"),
+ 'E' : Item(),
+ 'E/alpha' : Item("More new content"),
+ 'E/beta' : Item("New content"),
+ 'F' : Item(),
+ })
+
+ expected_skip = wc.State(B_COPY_path, { })
+
+ svntest.actions.run_and_verify_merge(B_COPY_path, '6', '7',
+ sbox.repo_url + '/A/B', None,
+ expected_output,
+ expected_mergeinfo_output,
+ expected_elision_output,
+ expected_merge_disk,
+ expected_merge_status,
+ expected_skip,
+ [], True, True)
+
+ # Update just A/B_COPY/E. The mergeinfo (r3-5,7) reset on
+ # A/B_COPY/E by the udpate is identical to the local info on
+ # A/B_COPY, so should elide, leaving no mereginfo on E.
+ expected_output = wc.State(wc_dir, {
+ 'A/B_COPY/E/alpha' : Item(status='GG'),
+ 'A/B_COPY/E/' : Item(status=' U'),
+ })
+
+ expected_status.tweak('A/B_COPY', status=' M', wc_rev=7)
+ expected_status.tweak('A/B_COPY/E', status=' ', wc_rev=8)
+ expected_status.tweak('A/B_COPY/E/alpha', wc_rev=8)
+ expected_status.tweak('A/B_COPY/E/beta', wc_rev=8)
+
+ expected_disk.tweak('A/B_COPY',
+ props={SVN_PROP_MERGEINFO : '/A/B:3-5,7'})
+ expected_disk.tweak('A/B_COPY/E',
+ props={SVN_PROP_MERGEINFO : '/A/B/E:3-5,7'})
+ expected_disk.tweak('A/B_COPY/E/alpha', contents="More new content",
+ props={})
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ E_COPY_path)
+
+
+#----------------------------------------------------------------------
+# Very obscure bug: Issue #2977.
+# Let's say there's a revision with
+# $ svn mv b c
+# $ svn mv a b
+# $ svn ci
+# and a later revision that modifies b. We then try a fresh checkout. If
+# the server happens to send us 'b' first, then when it later gets 'c'
+# (with a copyfrom of 'b') it might try to use the 'b' in the wc as the
+# copyfrom base. This is wrong, because 'b' was changed later; however,
+# due to a bug, the setting of svn:entry:committed-rev on 'b' is not being
+# properly seen by the client, and it chooses the wrong base. Corruption!
+#
+# Note that because this test depends on the order that the server sends
+# changes, it is very fragile; even changing the file names can avoid
+# triggering the bug.
+
+def update_copied_from_replaced_and_changed(sbox):
+ "update chooses right copyfrom for double move"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ fn1_relpath = 'A/B/E/aardvark'
+ fn2_relpath = 'A/B/E/alpha'
+ fn3_relpath = 'A/B/E/beta'
+ fn1_path = sbox.ospath(fn1_relpath)
+ fn2_path = sbox.ospath(fn2_relpath)
+ fn3_path = sbox.ospath(fn3_relpath)
+
+ # Move fn2 to fn1
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', fn2_path, fn1_path)
+
+ # Move fn3 to fn2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'mv', fn3_path, fn2_path)
+
+ # Commit that change, creating r2.
+ expected_output = svntest.wc.State(wc_dir, {
+ fn1_relpath : Item(verb='Adding'),
+ fn2_relpath : Item(verb='Replacing'),
+ fn3_relpath : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove(fn2_relpath, fn3_relpath)
+ expected_status.add({
+ fn1_relpath : Item(status=' ', wc_rev=2),
+ fn2_relpath : Item(status=' ', wc_rev=2),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Modify fn2.
+ fn2_final_contents = "I have new contents for the middle file."
+ svntest.main.file_write(fn2_path, fn2_final_contents)
+
+ # Commit the changes, creating r3.
+ expected_output = svntest.wc.State(wc_dir, {
+ fn2_relpath : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove(fn2_relpath, fn3_relpath)
+ expected_status.add({
+ fn1_relpath : Item(status=' ', wc_rev=2),
+ fn2_relpath : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Go back to r1.
+ expected_output = svntest.wc.State(wc_dir, {
+ fn1_relpath: Item(status='D '),
+ fn2_relpath: Item(status='A ', prev_status='D '), # D then A
+ fn3_relpath: Item(status='A '),
+ })
+
+ # Create expected disk tree for the update to rev 0
+ expected_disk = svntest.main.greek_state.copy()
+
+ # Do the update and check the results.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ None,
+ [], False,
+ '-r', '1', wc_dir)
+
+ # And back up to 3 again.
+ expected_output = svntest.wc.State(wc_dir, {
+ fn1_relpath: Item(status='A '),
+ fn2_relpath: Item(status='A ', prev_status='D '), # D then A
+ fn3_relpath: Item(status='D '),
+ })
+
+ # Create expected disk tree for the update to rev 0
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ fn1_relpath : Item("This is the file 'alpha'.\n"),
+ })
+ expected_disk.tweak(fn2_relpath, contents=fn2_final_contents)
+ expected_disk.remove(fn3_relpath)
+
+ # reuse old expected_status, but at r3
+ expected_status.tweak(wc_rev=3)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+#----------------------------------------------------------------------
+# Regression test: ra_neon assumes that you never delete a property on
+# a newly-added file, which is wrong if it's add-with-history.
+def update_copied_and_deleted_prop(sbox):
+ "updating a copied file with a deleted property"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ iota_path = sbox.ospath('iota')
+ iota2_path = sbox.ospath('iota2')
+
+ # Add a property on iota
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propset', 'foo', 'bar', iota_path)
+ # Commit that change, creating r2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ })
+
+ expected_status_mixed = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status_mixed.tweak('iota', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status_mixed)
+
+ # Copy iota to iota2 and delete the property on it.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'copy', iota_path, iota2_path)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'propdel', 'foo', iota2_path)
+
+ # Commit that change, creating r3.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota2' : Item(verb='Adding'),
+ })
+
+ expected_status_mixed.add({
+ 'iota2' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status_mixed)
+
+ # Update the whole wc, verifying disk as well.
+ expected_output = svntest.wc.State(wc_dir, { })
+
+ expected_disk_r3 = svntest.main.greek_state.copy()
+ expected_disk_r3.add({
+ 'iota2' : Item("This is the file 'iota'.\n"),
+ })
+ expected_disk_r3.tweak('iota', props={'foo':'bar'})
+
+ expected_status_r3 = expected_status_mixed.copy()
+ expected_status_r3.tweak(wc_rev=3)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk_r3,
+ expected_status_r3,
+ check_props=True)
+
+ # Now go back to r2.
+ expected_output = svntest.wc.State(wc_dir, {'iota2': Item(status='D ')})
+
+ expected_disk_r2 = expected_disk_r3.copy()
+ expected_disk_r2.remove('iota2')
+
+ expected_status_r2 = expected_status_r3.copy()
+ expected_status_r2.tweak(wc_rev=2)
+ expected_status_r2.remove('iota2')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk_r2,
+ expected_status_r2,
+ [], True,
+ "-r2", wc_dir)
+
+ # And finally, back to r3, getting an add-with-history-and-property-deleted
+ expected_output = svntest.wc.State(wc_dir, {'iota2': Item(status='A ')})
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk_r3,
+ expected_status_r3,
+ check_props=True)
+
+#----------------------------------------------------------------------
+
+def update_output_with_conflicts(rev, target, paths=None, resolved=False):
+ """Return the expected output for an update of TARGET to revision REV, in
+ which all of the PATHS are updated and conflicting.
+
+ If PATHS is None, it means [TARGET]. The output is a list of lines.
+ """
+ if paths is None:
+ paths = [target]
+
+ lines = ["Updating '%s':\n" % target]
+ for path in paths:
+ lines += ['C %s\n' % path]
+ lines += ['Updated to revision %d.\n' % rev]
+ if resolved:
+ for path in paths:
+ lines += ["Merge conflicts in '%s' marked as resolved.\n" % path]
+ lines += svntest.main.summary_of_conflicts(text_resolved=len(paths))
+ else:
+ lines += svntest.main.summary_of_conflicts(text_conflicts=len(paths))
+ return lines
+
+def update_output_with_conflicts_resolved(rev, target, paths=None):
+ """Like update_output_with_conflicts(), but where all of the conflicts are
+ resolved within the update.
+ """
+ lines = update_output_with_conflicts(rev, target, paths, resolved=True)
+ return lines
+
+#----------------------------------------------------------------------
+
+def update_accept_conflicts(sbox):
+ "update --accept automatic conflict resolution"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Make a backup copy of the working copy
+ wc_backup = sbox.add_wc_path('backup')
+ svntest.actions.duplicate_dir(wc_dir, wc_backup)
+
+ # Make a few local mods to files which will be committed
+ iota_path = sbox.ospath('iota')
+ lambda_path = sbox.ospath('A/B/lambda')
+ mu_path = sbox.ospath('A/mu')
+ alpha_path = sbox.ospath('A/B/E/alpha')
+ beta_path = sbox.ospath('A/B/E/beta')
+ pi_path = sbox.ospath('A/D/G/pi')
+ rho_path = sbox.ospath('A/D/G/rho')
+ svntest.main.file_append(lambda_path, 'Their appended text for lambda\n')
+ svntest.main.file_append(iota_path, 'Their appended text for iota\n')
+ svntest.main.file_append(mu_path, 'Their appended text for mu\n')
+ svntest.main.file_append(alpha_path, 'Their appended text for alpha\n')
+ svntest.main.file_append(beta_path, 'Their appended text for beta\n')
+ svntest.main.file_append(pi_path, 'Their appended text for pi\n')
+ svntest.main.file_append(rho_path, 'Their appended text for rho\n')
+
+ # Make a few local mods to files which will be conflicted
+ iota_path_backup = os.path.join(wc_backup, 'iota')
+ lambda_path_backup = os.path.join(wc_backup, 'A', 'B', 'lambda')
+ mu_path_backup = os.path.join(wc_backup, 'A', 'mu')
+ alpha_path_backup = os.path.join(wc_backup, 'A', 'B', 'E', 'alpha')
+ beta_path_backup = os.path.join(wc_backup, 'A', 'B', 'E', 'beta')
+ pi_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'pi')
+ rho_path_backup = os.path.join(wc_backup, 'A', 'D', 'G', 'rho')
+ svntest.main.file_append(iota_path_backup,
+ 'My appended text for iota\n')
+ svntest.main.file_append(lambda_path_backup,
+ 'My appended text for lambda\n')
+ svntest.main.file_append(mu_path_backup,
+ 'My appended text for mu\n')
+ svntest.main.file_append(alpha_path_backup,
+ 'My appended text for alpha\n')
+ svntest.main.file_append(beta_path_backup,
+ 'My appended text for beta\n')
+ svntest.main.file_append(pi_path_backup,
+ 'My appended text for pi\n')
+ svntest.main.file_append(rho_path_backup,
+ 'My appended text for rho\n')
+
+ # Created expected output tree for 'svn ci'
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Sending'),
+ 'A/B/lambda' : Item(verb='Sending'),
+ 'A/mu' : Item(verb='Sending'),
+ 'A/B/E/alpha': Item(verb='Sending'),
+ 'A/B/E/beta': Item(verb='Sending'),
+ 'A/D/G/pi' : Item(verb='Sending'),
+ 'A/D/G/rho' : Item(verb='Sending'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', wc_rev=2)
+ expected_status.tweak('A/B/lambda', wc_rev=2)
+ expected_status.tweak('A/mu', wc_rev=2)
+ expected_status.tweak('A/B/E/alpha', wc_rev=2)
+ expected_status.tweak('A/B/E/beta', wc_rev=2)
+ expected_status.tweak('A/D/G/pi', wc_rev=2)
+ expected_status.tweak('A/D/G/rho', wc_rev=2)
+
+ # Commit.
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Now we'll update each of our 5 files in wc_backup; each one will get
+ # conflicts, and we'll handle each with a different --accept option.
+
+ # Setup SVN_EDITOR and SVN_MERGE for --accept={edit,launch}.
+ svntest.main.use_editor('append_foo')
+
+ # iota: no accept option
+ # Just leave the conflicts alone, since run_and_verify_svn already uses
+ # the --non-interactive option.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts(
+ 2, iota_path_backup),
+ [],
+ 'update', iota_path_backup)
+
+ # lambda: --accept=postpone
+ # Just leave the conflicts alone.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts(
+ 2, lambda_path_backup),
+ [],
+ 'update', '--accept=postpone',
+ lambda_path_backup)
+
+ # mu: --accept=base
+ # Accept the pre-update base file.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved(
+ 2, mu_path_backup),
+ [],
+ 'update', '--accept=base',
+ mu_path_backup)
+
+ # alpha: --accept=mine
+ # Accept the user's working file.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved(
+ 2, alpha_path_backup),
+ [],
+ 'update', '--accept=mine-full',
+ alpha_path_backup)
+
+ # beta: --accept=theirs
+ # Accept their file.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved(
+ 2, beta_path_backup),
+ [],
+ 'update', '--accept=theirs-full',
+ beta_path_backup)
+
+ # pi: --accept=edit
+ # Run editor and accept the edited file. The merge tool will leave
+ # conflicts in place, so expect a message on stderr, but expect
+ # svn to exit with an exit code of 0.
+ svntest.actions.run_and_verify_svn2(update_output_with_conflicts_resolved(
+ 2, pi_path_backup),
+ "system(.*) returned.*", 0,
+ 'update', '--accept=edit',
+ '--force-interactive',
+ pi_path_backup)
+
+ # rho: --accept=launch
+ # Run the external merge tool, it should leave conflict markers in place.
+ svntest.actions.run_and_verify_svn(update_output_with_conflicts(
+ 2, rho_path_backup),
+ [],
+ 'update', '--accept=launch',
+ '--force-interactive',
+ rho_path_backup)
+
+ # Set the expected disk contents for the test
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_disk.tweak('iota', contents=("This is the file 'iota'.\n"
+ '<<<<<<< .mine\n'
+ 'My appended text for iota\n'
+ '||||||| .r1\n'
+ '=======\n'
+ 'Their appended text for iota\n'
+ '>>>>>>> .r2\n'))
+ expected_disk.tweak('A/B/lambda', contents=("This is the file 'lambda'.\n"
+ '<<<<<<< .mine\n'
+ 'My appended text for lambda\n'
+ '||||||| .r1\n'
+ '=======\n'
+ 'Their appended text for lambda\n'
+ '>>>>>>> .r2\n'))
+ expected_disk.tweak('A/mu', contents="This is the file 'mu'.\n")
+ expected_disk.tweak('A/B/E/alpha', contents=("This is the file 'alpha'.\n"
+ 'My appended text for alpha\n'))
+ expected_disk.tweak('A/B/E/beta', contents=("This is the file 'beta'.\n"
+ 'Their appended text for beta\n'))
+ expected_disk.tweak('A/D/G/pi', contents=("This is the file 'pi'.\n"
+ '<<<<<<< .mine\n'
+ 'My appended text for pi\n'
+ '||||||| .r1\n'
+ '=======\n'
+ 'Their appended text for pi\n'
+ '>>>>>>> .r2\n'
+ 'foo\n'))
+ expected_disk.tweak('A/D/G/rho', contents=("This is the file 'rho'.\n"
+ '<<<<<<< .mine\n'
+ 'My appended text for rho\n'
+ '||||||| .r1\n'
+ '=======\n'
+ 'Their appended text for rho\n'
+ '>>>>>>> .r2\n'
+ 'foo\n'))
+
+ # Set the expected extra files for the test
+ extra_files = ['iota.*\.r1', 'iota.*\.r2', 'iota.*\.mine',
+ 'lambda.*\.r1', 'lambda.*\.r2', 'lambda.*\.mine',
+ 'rho.*\.r1', 'rho.*\.r2', 'rho.*\.mine']
+
+ # Set the expected status for the test
+ expected_status = svntest.actions.get_virginal_state(wc_backup, 2)
+ expected_status.tweak('iota', 'A/B/lambda', 'A/mu',
+ 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/D/G/pi', 'A/D/G/rho', wc_rev=2)
+ expected_status.tweak('iota', status='C ')
+ expected_status.tweak('A/B/lambda', status='C ')
+ expected_status.tweak('A/mu', status='M ')
+ expected_status.tweak('A/B/E/alpha', status='M ')
+ expected_status.tweak('A/B/E/beta', status=' ')
+ expected_status.tweak('A/D/G/pi', status='M ')
+ expected_status.tweak('A/D/G/rho', status='C ')
+
+ # Set the expected output for the test
+ expected_output = wc.State(wc_backup, {})
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_backup,
+ expected_output,
+ expected_disk,
+ expected_status,
+ extra_files=extra_files)
+
+
+#----------------------------------------------------------------------
+
+
+def update_uuid_changed(sbox):
+ "update fails when repos uuid changed"
+
+ # read_only=False, since we don't want to run setuuid on the (shared)
+ # pristine repository.
+ sbox.build(read_only = False)
+
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+
+ uuid_before = svntest.actions.get_wc_uuid(wc_dir)
+
+ # Change repository's uuid.
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'setuuid', repo_dir)
+
+ # 'update' detected the new uuid...
+ svntest.actions.run_and_verify_svn(None, '.*UUID.*',
+ 'update', wc_dir)
+
+ # ...and didn't overwrite the old uuid.
+ uuid_after = svntest.actions.get_wc_uuid(wc_dir)
+ if uuid_before != uuid_after:
+ raise svntest.Failure
+
+
+#----------------------------------------------------------------------
+
+# Issue #1672: if an update deleting a dir prop is interrupted (by a
+# local obstruction, for example) then restarting the update will not
+# delete the prop, causing the wc to become out of sync with the
+# repository.
+def restarted_update_should_delete_dir_prop(sbox):
+ "restarted update should delete dir prop"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ A_path = sbox.ospath('A')
+ zeta_path = os.path.join(A_path, 'zeta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Commit a propset on A.
+ svntest.main.run_svn(None, 'propset', 'prop', 'val', A_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A': Item(verb='Sending'),
+ })
+
+ expected_status.tweak('A', wc_rev=2)
+
+ svntest.actions.run_and_verify_commit(wc_dir, expected_output,
+ expected_status)
+
+ # Create a second working copy.
+ ### Does this hack still work with wc-ng?
+ other_wc = sbox.add_wc_path('other')
+ svntest.actions.duplicate_dir(wc_dir, other_wc)
+
+ other_A_path = os.path.join(other_wc, 'A')
+ other_zeta_path = os.path.join(other_wc, 'A', 'zeta')
+
+ # In the second working copy, delete A's prop and add a new file.
+ svntest.main.run_svn(None, 'propdel', 'prop', other_A_path)
+ svntest.main.file_write(other_zeta_path, 'New file\n')
+ svntest.main.run_svn(None, 'add', other_zeta_path)
+
+ expected_output = svntest.wc.State(other_wc, {
+ 'A': Item(verb='Sending'),
+ 'A/zeta' : Item(verb='Adding'),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(other_wc, 1)
+ expected_status.tweak('A', wc_rev=3)
+ expected_status.add({
+ 'A/zeta' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_commit(other_wc, expected_output,
+ expected_status)
+
+ # Back in the first working copy, create an obstructing path and
+ # update. The update will flag a tree conflict.
+ svntest.main.file_write(zeta_path, 'Obstructing file\n')
+
+ #svntest.factory.make(sbox, 'svn up')
+ #exit(0)
+ # svn up
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' U'),
+ 'A/zeta' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.add({
+ 'A/zeta' : Item(contents="Obstructing file\n"),
+ })
+
+ expected_status = actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/zeta' : Item(status='D ', treeconflict='C', wc_rev='3'),
+ })
+
+ actions.run_and_verify_update(wc_dir, expected_output, expected_disk,
+ expected_status)
+
+ # Now, delete the obstructing path and rerun the update.
+ os.unlink(zeta_path)
+
+ svntest.main.run_svn(None, 'revert', zeta_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.tweak('A', props = {})
+ expected_disk.add({
+ 'A/zeta' : Item("New file\n"),
+ })
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 3)
+ expected_status.add({
+ 'A/zeta' : Item(status=' ', wc_rev=3),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props = True)
+
+#----------------------------------------------------------------------
+
+# Detect tree conflicts among files and directories,
+# edited or deleted in a deep directory structure.
+#
+# See use cases 1-3 in notes/tree-conflicts/use-cases.txt for background.
+
+# convenience definitions
+leaf_edit = svntest.deeptrees.deep_trees_leaf_edit
+tree_del = svntest.deeptrees.deep_trees_tree_del
+leaf_del = svntest.deeptrees.deep_trees_leaf_del
+
+disk_after_leaf_edit = svntest.deeptrees.deep_trees_after_leaf_edit
+disk_after_leaf_del = svntest.deeptrees.deep_trees_after_leaf_del
+disk_after_tree_del = svntest.deeptrees.deep_trees_after_tree_del
+
+deep_trees_conflict_output = svntest.deeptrees.deep_trees_conflict_output
+deep_trees_conflict_output_skipped = \
+ svntest.deeptrees.deep_trees_conflict_output_skipped
+deep_trees_status_local_tree_del = \
+ svntest.deeptrees.deep_trees_status_local_tree_del
+deep_trees_status_local_leaf_edit = \
+ svntest.deeptrees.deep_trees_status_local_leaf_edit
+
+DeepTreesTestCase = svntest.deeptrees.DeepTreesTestCase
+
+
+def tree_conflicts_on_update_1_1(sbox):
+ "tree conflicts 1.1: tree del, leaf edit on update"
+
+ # use case 1, as in notes/tree-conflicts/use-cases.txt
+ # 1.1) local tree delete, incoming leaf edit
+
+ sbox.build()
+
+ expected_output = deep_trees_conflict_output.copy()
+ expected_output.add({
+ 'DDF/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='U'),
+ 'DD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DD/D1/D2/epsilon' : Item(status=' ', treeconflict='A'),
+ 'DDD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3/zeta' : Item(status=' ', treeconflict='A'),
+ 'D/D1/delta' : Item(status=' ', treeconflict='A'),
+ 'DF/D1/beta' : Item(status=' ', treeconflict='U'),
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+ # The files delta, epsilon, and zeta are incoming additions, but since
+ # they are all within locally deleted trees they should also be schedule
+ # for deletion.
+ expected_status = deep_trees_status_local_tree_del.copy()
+ expected_status.add({
+ 'D/D1/delta' : Item(status='D '),
+ 'DD/D1/D2/epsilon' : Item(status='D '),
+ 'DDD/D1/D2/D3/zeta' : Item(status='D '),
+ })
+
+ # Update to the target rev.
+ expected_status.tweak(wc_rev=3)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file edit upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .file.*/F/alpha@3$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .dir.*/DF/D1@3$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .dir.*/DDF/D1@3$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .dir.*/D/D1@3$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .dir.*/DD/D1@3$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .dir.*/DDD/D1@3$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_update(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_edit",
+ tree_del,
+ leaf_edit,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+def tree_conflicts_on_update_1_2(sbox):
+ "tree conflicts 1.2: tree del, leaf del on update"
+
+ # 1.2) local tree delete, incoming leaf delete
+
+ sbox.build()
+
+ expected_output = deep_trees_conflict_output.copy()
+ expected_output.add({
+ 'DDD/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDD/D1/D2/D3' : Item(status=' ', treeconflict='D'),
+ 'DF/D1/beta' : Item(status=' ', treeconflict='D'),
+ 'DD/D1/D2' : Item(status=' ', treeconflict='D'),
+ 'DDF/D1/D2' : Item(status=' ', treeconflict='U'),
+ 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='D'),
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+
+ expected_status = deep_trees_status_local_tree_del.copy()
+
+ # Expect the incoming leaf deletes to actually occur. Even though they
+ # are within (or in the case of F/alpha and D/D1 are the same as) the
+ # trees locally scheduled for deletion we must still delete them and
+ # update the scheduled for deletion items to the target rev. Otherwise
+ # once the conflicts are resolved we still have a mixed-rev WC we can't
+ # commit without updating...which, you guessed it, raises tree conflicts
+ # again, repeat ad infinitum - see issue #3334.
+ #
+ # Update to the target rev.
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak('F/alpha',
+ 'D/D1',
+ status='! ', wc_rev=None)
+ # Remove the incoming deletes from status and disk.
+ expected_status.remove('DD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DDF/D1/D2/gamma',
+ 'DF/D1/beta')
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .dir.*/DF/D1@3$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .dir.*/DDF/D1@3$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .dir.*/DD/D1@3$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir edit upon update'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .dir.*/DDD/D1@3$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_update(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_leaf_del",
+ tree_del,
+ leaf_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+def tree_conflicts_on_update_2_1(sbox):
+ "tree conflicts 2.1: leaf edit, tree del on update"
+
+ # use case 2, as in notes/tree-conflicts/use-cases.txt
+ # 2.1) local leaf edit, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = disk_after_leaf_edit
+
+ expected_status = deep_trees_status_local_leaf_edit.copy()
+ # Adjust the status of the roots of the six subtrees scheduled for deletion
+ # during the update. Since these are all tree conflicts, they will all be
+ # scheduled for addition as copies with history - see Issue #3334.
+ expected_status.tweak(
+ 'D/D1',
+ 'F/alpha',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ status='A ', copied='+', wc_rev='-')
+ # See the status of all the paths *under* the above six subtrees. Only the
+ # roots of the added subtrees show as schedule 'A', these childs paths show
+ # only that history is scheduled with the commit.
+ expected_status.tweak(
+ 'DD/D1/D2',
+ 'DDD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DF/D1/beta',
+ 'DDF/D1/D2',
+ 'DDF/D1/D2/gamma',
+ copied='+', wc_rev='-')
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file edit, incoming file delete or move upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ ### D/D1/delta is locally-added during leaf_edit. when tree_del executes,
+ ### it will delete D/D1, and the update reschedules local D/D1 for
+ ### local-copy from its original revision. however, right now, we cannot
+ ### denote that delta is a local-add rather than a child of that D/D1 copy.
+ ### thus, it appears in the status output as a (M)odified child.
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_update(sbox,
+ [ DeepTreesTestCase("local_leaf_edit_incoming_tree_del",
+ leaf_edit,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+
+def tree_conflicts_on_update_2_2(sbox):
+ "tree conflicts 2.2: leaf del, tree del on update"
+
+ # 2.2) local leaf delete, incoming tree delete
+
+ ### Current behaviour fails to show conflicts when deleting
+ ### a directory tree that has modifications. (Will be solved
+ ### when dirs_same_p() is implemented)
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = svntest.wc.State('', {
+ 'DDF/D1/D2' : Item(),
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF/D1' : Item(),
+ 'DD/D1' : Item(),
+ 'DDD/D1/D2' : Item(),
+ })
+
+ expected_status = svntest.deeptrees.deep_trees_virginal_state.copy()
+ expected_status.add({'' : Item()})
+ expected_status.tweak(contents=None, status=' ', wc_rev=3)
+ # Tree conflicts.
+ expected_status.tweak(
+ 'D/D1',
+ 'F/alpha',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ treeconflict='C', wc_rev=2)
+
+ # Expect the incoming tree deletes and the local leaf deletes to mean
+ # that all deleted paths are *really* gone, not simply scheduled for
+ # deletion.
+ expected_status.tweak('DD/D1', 'DF/D1', 'DDF/D1', 'DDD/D1',
+ status='A ', copied='+', treeconflict='C',
+ wc_rev='-')
+ expected_status.tweak('DDF/D1/D2', 'DDD/D1/D2',
+ copied='+', wc_rev='-')
+ expected_status.tweak('DD/D1/D2', 'DF/D1/beta', 'DDD/D1/D2/D3',
+ 'DDF/D1/D2/gamma',
+ status='D ', copied='+', wc_rev='-')
+ expected_status.tweak('F/alpha', 'D/D1',
+ status='! ', treeconflict='C', wc_rev=None)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir edit, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_update(sbox,
+ [ DeepTreesTestCase("local_leaf_del_incoming_tree_del",
+ leaf_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+
+#----------------------------------------------------------------------
+# Test for issue #3329 'Update throws error when skipping some tree
+# conflicts'
+#
+# Marked as XFail until issue #3329 is resolved.
+@Issue(3329)
+def tree_conflicts_on_update_2_3(sbox):
+ "tree conflicts 2.3: skip on 2nd update"
+
+ # Test that existing tree conflicts are skipped
+
+ expected_output = deep_trees_conflict_output_skipped
+
+ expected_disk = disk_after_leaf_edit
+
+ expected_status = deep_trees_status_local_leaf_edit.copy()
+
+ # Adjust the status of the roots of the six subtrees scheduled for deletion
+ # during the update. Since these are all tree conflicts, they will all be
+ # scheduled for addition as copies with history - see Issue #3334.
+ expected_status.tweak(
+ 'D/D1',
+ 'F/alpha',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ status='A ', copied='+', wc_rev='-')
+ # See the status of all the paths *under* the above six subtrees. Only the
+ # roots of the added subtrees show as schedule 'A', these child paths show
+ # only that history is scheduled with the commit.
+ expected_status.tweak(
+ 'DD/D1/D2',
+ 'DDD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DF/D1/beta',
+ 'DDF/D1/D2',
+ 'DDF/D1/D2/gamma',
+ copied='+', wc_rev='-')
+
+ # Paths where output should be a single 'Skipped' message.
+ skip_paths = [
+ 'D/D1',
+ 'F/alpha',
+ 'DDD/D1',
+ 'DDD/D1/D2/D3',
+ ]
+
+ # This is where the test fails. Repeat updates on '', 'D', 'F', or
+ # 'DDD' report no skips.
+ chdir_skip_paths = [
+ ('D', 'D1'),
+ ('F', 'alpha'),
+ ('DDD', 'D1'),
+ ('', ['D/D1', 'F/alpha', 'DD/D1', 'DF/D1', 'DDD/D1', 'DDF/D1']),
+ ]
+ # Note: We don't step *into* a directory that's deleted in the repository.
+ # E.g. ('DDD/D1/D2', '') would correctly issue a "path does not
+ # exist" error, because at that point it can't know about the
+ # tree-conflict on DDD/D1. ('D/D1', '') likewise, as tree-conflict
+ # information is stored in the parent of a victim directory.
+
+ svntest.deeptrees.deep_trees_skipping_on_update(sbox,
+ DeepTreesTestCase("local_leaf_edit_incoming_tree_del_skipping",
+ leaf_edit,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status),
+ skip_paths,
+ chdir_skip_paths)
+
+
+def tree_conflicts_on_update_3(sbox):
+ "tree conflicts 3: tree del, tree del on update"
+
+ # use case 3, as in notes/tree-conflicts/use-cases.txt
+ # local tree delete, incoming tree delete
+
+ expected_output = deep_trees_conflict_output
+
+ expected_disk = svntest.wc.State('', {
+ 'F' : Item(),
+ 'D' : Item(),
+ 'DF' : Item(),
+ 'DD' : Item(),
+ 'DDF' : Item(),
+ 'DDD' : Item(),
+ })
+ expected_status = deep_trees_status_local_tree_del.copy()
+
+ # Expect the incoming tree deletes and the local tree deletes to mean
+ # that all deleted paths are *really* gone, not simply scheduled for
+ # deletion.
+ expected_status.tweak('F/alpha',
+ 'D/D1',
+ 'DD/D1',
+ 'DF/D1',
+ 'DDD/D1',
+ 'DDF/D1',
+ status='! ', wc_rev=None)
+ # Remove from expected status and disk everything below the deleted paths.
+ expected_status.remove('DD/D1/D2',
+ 'DF/D1/beta',
+ 'DDD/D1/D2',
+ 'DDD/D1/D2/D3',
+ 'DDF/D1/D2',
+ 'DDF/D1/D2/gamma',)
+
+ expected_info = {
+ 'F/alpha' : {
+ 'Tree conflict' :
+ '^local file delete, incoming file delete or move upon update'
+ + ' Source left: .file.*/F/alpha@2'
+ + ' Source right: .none.*(/F/alpha@3)?$',
+ },
+ 'DF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DF/D1@2'
+ + ' Source right: .none.*(/DF/D1@3)?$',
+ },
+ 'DDF/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDF/D1@2'
+ + ' Source right: .none.*(/DDF/D1@3)?$',
+ },
+ 'D/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/D/D1@2'
+ + ' Source right: .none.*(/D/D1@3)?$',
+ },
+ 'DD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DD/D1@2'
+ + ' Source right: .none.*(/DD/D1@3)?$',
+ },
+ 'DDD/D1' : {
+ 'Tree conflict' :
+ '^local dir delete, incoming dir delete or move upon update'
+ + ' Source left: .dir.*/DDD/D1@2'
+ + ' Source right: .none.*(/DDD/D1@3)?$',
+ },
+ }
+
+ svntest.deeptrees.deep_trees_run_tests_scheme_for_update(sbox,
+ [ DeepTreesTestCase("local_tree_del_incoming_tree_del",
+ tree_del,
+ tree_del,
+ expected_output,
+ expected_disk,
+ expected_status,
+ expected_info = expected_info) ] )
+
+# Issue #3334: a modify-on-deleted tree conflict should leave the node
+# updated to the target revision but still scheduled for deletion.
+def tree_conflict_uc1_update_deleted_tree(sbox):
+ "tree conflicts on update UC1, update deleted tree"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ from svntest.actions import run_and_verify_svn, run_and_verify_resolve
+ from svntest.actions import run_and_verify_update, run_and_verify_commit
+ from svntest.verify import AnyOutput
+
+ """A directory tree 'D1' should end up exactly the same in these two
+ scenarios:
+
+ New scenario:
+ [[[
+ svn checkout -r1 # in which D1 has its original state
+ svn delete D1
+ svn update -r2 # update revs & bases to r2
+ svn resolve --accept=mine # keep the local, deleted version
+ ]]]
+
+ Existing scenario:
+ [[[
+ svn checkout -r2 # in which D1 is already modified
+ svn delete D1
+ ]]]
+ """
+
+ A = sbox.ospath('A')
+
+ def modify_dir(dir):
+ """Make some set of local modifications to an existing tree:
+ A prop change, add a child, delete a child, change a child."""
+ run_and_verify_svn(AnyOutput, [], 'propset', 'p', 'v', dir)
+
+ path = os.path.join(dir, 'new_file')
+ svntest.main.file_write(path, "This is the file 'new_file'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', path)
+
+ path = os.path.join(dir, 'C', 'N')
+ os.mkdir(path)
+ path2 = os.path.join(dir, 'C', 'N', 'nu')
+ svntest.main.file_write(path2, "This is the file 'nu'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', path)
+
+ path = os.path.join(dir, 'B', 'lambda')
+ svntest.actions.run_and_verify_svn(None, [], 'delete', path)
+
+ path = os.path.join(dir, 'B', 'E', 'alpha')
+ svntest.main.file_append(path, "An extra line.\n")
+
+ # Prep for both scenarios
+ modify_dir(A)
+ run_and_verify_svn(AnyOutput, [], 'ci', A, '-m', 'modify_dir')
+ run_and_verify_svn(AnyOutput, [], 'up', wc_dir)
+
+ # Existing scenario
+ wc2 = sbox.add_wc_path('wc2')
+ A2 = os.path.join(wc2, 'A')
+ svntest.actions.duplicate_dir(sbox.wc_dir, wc2)
+ run_and_verify_svn(AnyOutput, [], 'delete', A2)
+
+ # New scenario (starts at the revision before the committed mods)
+ run_and_verify_svn(AnyOutput, [], 'up', A, '-r1')
+ run_and_verify_svn(AnyOutput, [], 'delete', A)
+
+ expected_output = None
+ expected_disk = None
+ expected_status = None
+
+ run_and_verify_update(A, expected_output, expected_disk, expected_status)
+ run_and_verify_resolve([A], '--recursive', '--accept=working', A)
+
+ resolved_status = svntest.wc.State('', {
+ '' : Item(status=' ', wc_rev=2),
+ 'A' : Item(status='D ', wc_rev=2),
+ 'A/B' : Item(status='D ', wc_rev=2),
+ 'A/B/E' : Item(status='D ', wc_rev=2),
+ 'A/B/E/alpha' : Item(status='D ', wc_rev=2),
+ 'A/B/E/beta' : Item(status='D ', wc_rev=2),
+ 'A/B/F' : Item(status='D ', wc_rev=2),
+ 'A/mu' : Item(status='D ', wc_rev=2),
+ 'A/C' : Item(status='D ', wc_rev=2),
+ 'A/C/N' : Item(status='D ', wc_rev=2),
+ 'A/C/N/nu' : Item(status='D ', wc_rev=2),
+ 'A/D' : Item(status='D ', wc_rev=2),
+ 'A/D/gamma' : Item(status='D ', wc_rev=2),
+ 'A/D/G' : Item(status='D ', wc_rev=2),
+ 'A/D/G/pi' : Item(status='D ', wc_rev=2),
+ 'A/D/G/rho' : Item(status='D ', wc_rev=2),
+ 'A/D/G/tau' : Item(status='D ', wc_rev=2),
+ 'A/D/H' : Item(status='D ', wc_rev=2),
+ 'A/D/H/chi' : Item(status='D ', wc_rev=2),
+ 'A/D/H/omega' : Item(status='D ', wc_rev=2),
+ 'A/D/H/psi' : Item(status='D ', wc_rev=2),
+ 'A/new_file' : Item(status='D ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+
+ # The status of the new and old scenarios should be identical.
+ expected_status = resolved_status.copy()
+ expected_status.wc_dir = wc2
+
+ svntest.actions.run_and_verify_status(wc2, expected_status)
+
+ expected_status = resolved_status.copy()
+ expected_status.wc_dir = wc_dir
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # Just for kicks, try to commit.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(verb='Deleting'),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev=2),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+
+ run_and_verify_commit(wc_dir, expected_output, expected_status,
+ [], wc_dir, '-m', 'commit resolved tree')
+
+
+# Issue #3334: a delete-onto-modified tree conflict should leave the node
+# scheduled for re-addition.
+@Issue(3334)
+def tree_conflict_uc2_schedule_re_add(sbox):
+ "tree conflicts on update UC2, schedule re-add"
+ sbox.build()
+ saved_cwd = os.getcwd()
+ os.chdir(sbox.wc_dir)
+
+ from svntest.actions import run_and_verify_svn, run_and_verify_resolve
+ from svntest.actions import run_and_verify_update
+ from svntest.verify import AnyOutput
+
+ """A directory tree 'D1' should end up exactly the same in these two
+ scenarios:
+
+ New scenario:
+ [[[
+ svn checkout -r1 # in which D1 exists
+ modify_d1 # make local mods in D1
+ svn update -r2 # tries to delete D1
+ svn resolve --accept=mine # keep the local, re-added version
+ ]]]
+
+ Existing scenario:
+ [[[
+ svn checkout -r2 # in which D1 does not exist
+ svn copy -r1 D1 . # make a pristine copy of D1@1
+ modify_d1 # make local mods in D1
+ ]]]
+
+ where modify_d1 makes property changes to D1 itself and/or
+ adds/deletes/modifies any of D1's children.
+ """
+
+ dir = 'A' # an existing tree in the WC and repos
+ dir_url = sbox.repo_url + '/' + dir
+
+ def modify_dir(dir):
+ """Make some set of local modifications to an existing tree:
+ A prop change, add a child, delete a child, change a child."""
+ run_and_verify_svn(AnyOutput, [],
+ 'propset', 'p', 'v', dir)
+ path = os.path.join(dir, 'new_file')
+ svntest.main.file_write(path, "This is the file 'new_file'.\n")
+ svntest.actions.run_and_verify_svn(None, [], 'add', path)
+
+ path = os.path.join(dir, 'B', 'lambda')
+ svntest.actions.run_and_verify_svn(None, [], 'delete', path)
+
+ path = os.path.join(dir, 'B', 'E', 'alpha')
+ svntest.main.file_append(path, "An extra line.\n")
+
+ # Prepare the repos so that a later 'update' has an incoming deletion:
+ # Delete the dir in the repos, making r2
+ run_and_verify_svn(AnyOutput, [],
+ '-m', '', 'delete', dir_url)
+
+ # Existing scenario
+ os.chdir(saved_cwd)
+ wc2 = sbox.add_wc_path('wc2')
+ dir2 = os.path.join(wc2, dir)
+ svntest.actions.duplicate_dir(sbox.wc_dir, wc2)
+ run_and_verify_svn(AnyOutput, [], 'up', wc2)
+ run_and_verify_svn(AnyOutput, [], 'copy', dir_url + '@1', dir2)
+ modify_dir(dir2)
+
+ # New scenario
+ # (The dir is already checked out.)
+ os.chdir(sbox.wc_dir)
+ modify_dir(dir)
+
+ expected_output = None
+ expected_disk = None
+ expected_status = None
+ run_and_verify_update('A', expected_output, expected_disk, expected_status)
+ run_and_verify_resolve([dir], '--recursive', '--accept=working', dir)
+
+ os.chdir(saved_cwd)
+
+ def get_status(dir):
+ expected_status = svntest.wc.State(dir, {
+ '' : Item(status=' ', wc_rev='2'),
+ 'A' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/B' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B/lambda' : Item(status='D ', wc_rev='-', copied='+'),
+ 'A/B/E' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B/E/alpha' : Item(status='M ', wc_rev='-', copied='+'),
+ 'A/B/E/beta' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/B/F' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/mu' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/C' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/gamma' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/G' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/G/pi' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/G/rho' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/G/tau' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/H' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/H/chi' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/H/omega' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/D/H/psi' : Item(status=' ', wc_rev='-', copied='+'),
+ 'A/new_file' : Item(status='A ', wc_rev=0),
+ 'iota' : Item(status=' ', wc_rev=2),
+ })
+ return expected_status
+
+ # The status of the new and old scenarios should be identical...
+ expected_status = get_status(wc2)
+ ### The following fails, as of Apr 6, 2010. The problem is that A/new_file
+ ### has been *added* within a copy, yet the wc_db datastore cannot
+ ### differentiate this from a copied-child. As a result, new_file is
+ ### reported as a (M)odified node, rather than (A)dded.
+ svntest.actions.run_and_verify_status(wc2, expected_status)
+
+ # ...except for the revision of the root of the WC and iota, because
+ # above 'A' was the target of the update, not the WC root.
+ expected_status = get_status(sbox.wc_dir)
+ expected_status.tweak('', 'iota', wc_rev=1)
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+ ### Do we need to do more to confirm we got what we want here?
+
+#----------------------------------------------------------------------
+def set_deep_depth_on_target_with_shallow_children(sbox):
+ "infinite --set-depth adds shallow children"
+
+ # Regardless of what depth the update target is at, if it has shallow
+ # subtrees and we update --set-depth infinity, these shallow subtrees
+ # should be populated.
+ #
+ # See http://svn.haxx.se/dev/archive-2009-04/0344.shtml.
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ B_path = sbox.ospath('A/B')
+ D_path = sbox.ospath('A/D')
+
+ # Trim the tree: Set A/B to depth empty and A/D to depth immediates.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status='D '),
+ 'A/B/lambda' : Item(status='D '),
+ 'A/B/F' : Item(status='D '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/F',
+ 'A/B/lambda',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B/F',
+ 'A/B/lambda',
+ 'A/B/E',
+ 'A/B/E/alpha',
+ 'A/B/E/beta')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--set-depth', 'empty',
+ B_path)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G/pi' : Item(status='D '),
+ 'A/D/G/rho' : Item(status='D '),
+ 'A/D/G/tau' : Item(status='D '),
+ 'A/D/H/chi' : Item(status='D '),
+ 'A/D/H/omega' : Item(status='D '),
+ 'A/D/H/psi' : Item(status='D '),
+ })
+
+ expected_status.remove('A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi')
+
+ expected_disk.remove('A/D/G/pi',
+ 'A/D/G/rho',
+ 'A/D/G/tau',
+ 'A/D/H/chi',
+ 'A/D/H/omega',
+ 'A/D/H/psi')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--set-depth', 'immediates',
+ D_path)
+
+ # Now update A with --set-depth infinity. All the subtrees we
+ # removed above should come back.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/D/G/pi' : Item(status='A '),
+ 'A/D/G/rho' : Item(status='A '),
+ 'A/D/G/tau' : Item(status='A '),
+ 'A/D/H/chi' : Item(status='A '),
+ 'A/D/H/omega' : Item(status='A '),
+ 'A/D/H/psi' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '--set-depth', 'infinity',
+ A_path)
+
+#----------------------------------------------------------------------
+
+def update_wc_of_dir_to_rev_not_containing_this_dir(sbox):
+ "update wc of dir to rev not containing this dir"
+
+ sbox.build()
+
+ # Create working copy of 'A' directory
+ A_url = sbox.repo_url + "/A"
+ other_wc_dir = sbox.add_wc_path("other")
+ svntest.actions.run_and_verify_svn(None, [], "co", A_url, other_wc_dir)
+
+ # Delete 'A' directory from repository
+ svntest.actions.run_and_verify_svn(None, [], "rm", A_url, "-m", "")
+
+ # Try to update working copy of 'A' directory
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E160005: Target path '/A' does not exist",
+ "up", other_wc_dir)
+
+#----------------------------------------------------------------------
+# Test for issue #3569 svn update --depth <DEPTH> allows making a working
+# copy incomplete.
+@Issue(3569)
+def update_empty_hides_entries(sbox):
+ "svn up --depth empty hides entries for next update"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ expected_disk_empty = []
+ expected_status_empty = []
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Update to revision 0 - Removes all files from WC
+ svntest.actions.run_and_verify_update(wc_dir,
+ None,
+ expected_disk_empty,
+ expected_status_empty,
+ [], True,
+ '-r', '0',
+ wc_dir)
+
+ # Now update back to HEAD
+ svntest.actions.run_and_verify_update(wc_dir,
+ None,
+ expected_disk,
+ expected_status,
+ [], True,
+ wc_dir)
+
+ # Update to revision 0 - Removes all files from WC
+ svntest.actions.run_and_verify_update(wc_dir,
+ None,
+ expected_disk_empty,
+ expected_status_empty,
+ [], True,
+ '-r', '0',
+ wc_dir)
+
+ # Update the directory itself back to HEAD
+ svntest.actions.run_and_verify_update(wc_dir,
+ None,
+ expected_disk_empty,
+ expected_status_empty,
+ [], True,
+ '--depth', 'empty',
+ wc_dir)
+
+ # Now update the rest back to head
+
+ # This operation is currently a NO-OP, because the WC-Crawler
+ # tells the repository that it contains a full tree of the HEAD
+ # revision.
+ svntest.actions.run_and_verify_update(wc_dir,
+ None,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+#----------------------------------------------------------------------
+# Test for issue #3573 'local non-inheritable mergeinfo changes not
+# properly merged with updated mergeinfo'
+@SkipUnless(server_has_mergeinfo)
+def mergeinfo_updates_merge_with_local_mods(sbox):
+ "local mergeinfo changes are merged with updates"
+
+ # Copy A to A_COPY in r2, and make some changes to A_COPY in r3-r6.
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ expected_disk, expected_status = set_up_branch(sbox)
+
+ # Some paths we'll care about
+ A_path = sbox.ospath('A')
+ A_COPY_path = sbox.ospath('A_COPY')
+
+ # Merge -c3 from A to A_COPY at --depth empty, commit as r7.
+ ###
+ ### No, we are not checking the merge output for these simple
+ ### merges. This is already covered *TO DEATH* in merge_tests.py.
+ ###
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c3', '--depth', 'empty',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r3 from A to A_COPY at depth empty',
+ wc_dir)
+ # Merge -c5 from A to A_COPY (at default --depth infinity), commit as r8.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c5',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn(None, [], 'ci', '-m',
+ 'Merge r5 from A to A_COPY', wc_dir)
+
+ # Update WC to r7, repeat merge of -c3 from A to A_COPY but this
+ # time do it at --depth infinity. Confirm that the mergeinfo
+ # on A_COPY is no longer inheritable.
+ svntest.actions.run_and_verify_svn(None, [], 'up', '-r7', wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'merge', '-c3', '--depth', 'infinity',
+ sbox.repo_url + '/A', A_COPY_path)
+ svntest.actions.run_and_verify_svn([A_COPY_path + " - /A:3\n"], [],
+ 'pg', SVN_PROP_MERGEINFO, '-R',
+ A_COPY_path)
+
+ # Update the WC (to r8), the mergeinfo on A_COPY should now have both
+ # the local mod from the uncommitted merge (/A:3* --> /A:3) and the change
+ # brought down by the update (/A:3* --> /A:3*,5) leaving us with /A:3,5.
+ ### This was failing because of issue #3573. The local mergeinfo change
+ ### is reverted, leaving '/A:3*,5' on A_COPY.
+ svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir)
+ svntest.actions.run_and_verify_svn([A_COPY_path + " - /A:3,5\n"], [],
+ 'pg', SVN_PROP_MERGEINFO, '-R',
+ A_COPY_path)
+
+#----------------------------------------------------------------------
+# A regression test for a 1.7-dev crash upon updating a WC to a different
+# revision when it contained an excluded dir.
+def update_with_excluded_subdir(sbox):
+ """update with an excluded subdir"""
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ G = os.path.join(sbox.ospath('A/D/G'))
+
+ # Make the directory 'G' excluded.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(status='D '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau')
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], False,
+ '--set-depth=exclude', G)
+
+ # Commit a new revision so there is something to update to.
+ svntest.main.run_svn(None, 'mkdir', '-m', '', sbox.repo_url + '/New')
+
+ # Test updating the WC.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'New' : Item(status='A ') })
+ expected_disk.add({
+ 'New' : Item() })
+ expected_status.add({
+ 'New' : Item(status=' ') })
+ expected_status.tweak(wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status)
+
+#----------------------------------------------------------------------
+# Test for issue #3471 'svn up touches file w/ lock & svn:keywords property'
+@Issue(3471)
+def update_with_file_lock_and_keywords_property_set(sbox):
+ """update with file lock & keywords property set"""
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, '$Id$')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords', 'Id', mu_path)
+ svntest.main.run_svn(None, 'lock', mu_path)
+ mu_ts_before_update = os.path.getmtime(mu_path)
+
+ # Make sure we are at a different timestamp to really notice a mtime change
+ time.sleep(1.1)
+
+ # Issue #3471 manifests itself here; The timestamp of 'mu' gets updated
+ # to the time of the last "svn up".
+ sbox.simple_update()
+ mu_ts_after_update = os.path.getmtime(mu_path)
+ if (mu_ts_before_update != mu_ts_after_update):
+ logger.warn("The timestamp of 'mu' before and after update does not match.")
+ raise svntest.Failure
+
+#----------------------------------------------------------------------
+# Updating a nonexistent or deleted path should be a successful no-op,
+# when there is no incoming change. In trunk@1035343, such an update
+# within a copied directory triggered an assertion failure.
+@Issue(3807)
+def update_nonexistent_child_of_copy(sbox):
+ """update a nonexistent child of a copied dir"""
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ svntest.main.run_svn(None, 'copy', 'A', 'A2')
+
+ # Try updating a nonexistent path in the copied dir.
+ expected_output = svntest.wc.State('A2', {
+ 'nonexistent' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_update(os.path.join('A2', 'nonexistent'),
+ expected_output, None, None)
+
+ # Try updating a deleted path in the copied dir.
+ svntest.main.run_svn(None, 'delete', os.path.join('A2', 'mu'))
+
+ expected_output = svntest.wc.State('A2', {
+ 'mu' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_update(os.path.join('A2', 'mu'),
+ expected_output, None, None)
+ if os.path.exists('A2/mu'):
+ raise svntest.Failure("A2/mu improperly revived")
+
+@Issue(3807)
+def revive_children_of_copy(sbox):
+ """undelete a child of a copied dir"""
+ sbox.build()
+ os.chdir(sbox.wc_dir)
+
+ chi2_path = os.path.join('A2/D/H/chi')
+ psi2_path = os.path.join('A2/D/H/psi')
+
+ svntest.main.run_svn(None, 'copy', 'A', 'A2')
+ svntest.main.run_svn(None, 'rm', chi2_path)
+ os.unlink(psi2_path)
+
+ svntest.main.run_svn(None, 'revert', chi2_path, psi2_path)
+ if not os.path.exists(chi2_path):
+ raise svntest.Failure('chi unexpectedly non-existent')
+ if not os.path.exists(psi2_path):
+ raise svntest.Failure('psi unexpectedly non-existent')
+
+@SkipUnless(svntest.main.is_os_windows)
+def skip_access_denied(sbox):
+ """access denied paths should be skipped"""
+
+ # We need something to lock the file. 'msvcrt' looks common on Windows
+ try:
+ import msvcrt
+ except ImportError:
+ raise svntest.Skip('python msvcrt library not available')
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ iota = sbox.ospath('iota')
+
+ svntest.main.file_write(iota, 'Q')
+ sbox.simple_commit()
+ sbox.simple_update() # Update to r2
+
+ # Open iota for writing to keep an handle open
+ f = open(iota, 'w')
+
+ # Write new text of exactly the same size to avoid the early out
+ # on a different size without properties.
+ f.write('R')
+ f.flush()
+
+ # And lock the first byte of the file
+ msvcrt.locking(f.fileno(), 1, 1)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(verb='Skipped'),
+ })
+
+ # Create expected status tree: iota isn't updated
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('iota', status='M ', wc_rev=2)
+
+ # And now check that update skips the path
+ # *and* status shows the path as modified.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ wc_dir, '-r', '1')
+
+ f.close()
+
+def update_to_HEAD_plus_1(sbox):
+ "updating to HEAD+1 should fail"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Attempt the update, expecting an error. (Sometimes the error
+ # strings says "No such revision", sometimes "No such target
+ # revision".)
+ svntest.actions.run_and_verify_update(wc_dir,
+ None, None, None,
+ ".*E160006.*No such.*revision.*",
+ False,
+ wc_dir, '-r', '2')
+
+ other_wc = sbox.add_wc_path('other')
+ other_url = sbox.repo_url + '/A'
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', other_url, other_wc)
+ svntest.actions.run_and_verify_update(other_wc,
+ None, None, None,
+ ".*E160006.*No such.*revision.*",
+ False,
+ other_wc, '-r', '2')
+
+def update_moved_dir_leaf_del(sbox):
+ "update locally moved dir with leaf del"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(False, 'rm', '-m', 'remove /A/B/E/alpha',
+ sbox.repo_url + "/A/B/E/alpha")
+ sbox.simple_move("A/B/E", "A/B/E2")
+
+ # Produce a tree conflict by updating the working copy to the
+ # revision which removed A/B/E/alpha. The deletion collides with
+ # the local move of A/B/E to A/B/E2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='D'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C',
+ moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Now resolve the conflict, using --accept=mine-conflict applying
+ # the update to A/B/E2
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', treeconflict=None)
+ expected_status.remove('A/B/E2/alpha')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3144,3630)
+# Like break_moved_dir_edited_leaf_del, but with --accept=mine-conflict
+def update_moved_dir_edited_leaf_del(sbox):
+ "update locally moved dir with edited leaf del"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(False, 'rm', '-m', 'remove /A/B/E/alpha',
+ sbox.repo_url + "/A/B/E/alpha")
+ sbox.simple_move("A/B/E", "A/B/E2")
+ svntest.main.file_write(sbox.ospath('A/B/E2/alpha'),
+ "This is a changed 'alpha'.\n")
+
+ # Produce a tree conflict by updating the working copy to the
+ # revision which removed A/B/E/alpha. The deletion collides with
+ # the local move of A/B/E to A/B/E2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='D'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is a changed 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C',
+ moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Now resolve the conflict, using --accept=mine-conflict.
+ # This should apply the update to A/B/E2, and flag a tree
+ # conflict on A/B/E2/alpha (incoming delete vs. local edit)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B/E'))
+ expected_status.tweak('A/B/E', treeconflict=None)
+ expected_status.tweak('A/B/E2/alpha', status='A ', copied='+', wc_rev='-',
+ entry_status=' ', treeconflict='C')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+def update_moved_dir_file_add(sbox):
+ "update locally moved dir with incoming file"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ foo_path = "A/B/E/foo"
+ foo_content = "This is the file 'foo'.\n"
+
+ svntest.main.file_write(sbox.ospath(foo_path), foo_content, 'wb')
+ sbox.simple_add(foo_path)
+ sbox.simple_commit()
+ # update to go back in time, before the last commit
+ svntest.main.run_svn(False, 'update', '-r', '1', wc_dir)
+ sbox.simple_move("A/B/E", "A/B/E2")
+
+ # Produce a tree conflict by updating the working copy to the
+ # revision which created A/B/E/foo. The addition collides with
+ # the local move of A/B/E to A/B/E2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/foo' : Item(status=' ', treeconflict='A'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E/foo' : Item(status='D ', wc_rev='2'),
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C',
+ moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/alpha', status='D ')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Now resolve the conflict, using --accept=mine-conflict.
+ # This should apply the update to A/B/E2, adding A/B/E2/foo.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B/E'))
+ # the incoming file should auto-merge
+ expected_status.tweak('A/B/E', treeconflict=None)
+ expected_status.add({
+ 'A/B/E2/foo' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+def update_moved_dir_dir_add(sbox):
+ "update locally moved dir with incoming dir"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ foo_path = "A/B/E/foo"
+ bar_path = "A/B/E/foo/bar"
+ bar_content = "This is the file 'bar'.\n"
+
+ sbox.simple_mkdir(foo_path)
+ svntest.main.file_write(sbox.ospath(bar_path), bar_content, 'wb')
+ sbox.simple_add(bar_path)
+ sbox.simple_commit()
+ # update to go back in time, before the last commit
+ svntest.main.run_svn(False, 'update', '-r', '1', wc_dir)
+ sbox.simple_move("A/B/E", "A/B/E2")
+
+ # the incoming file should auto-merge
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/foo' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/foo/bar' : Item(status=' ', treeconflict='A'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', status='D ')
+ expected_status.tweak('A/B/E', treeconflict='C', moved_to='A/B/E2')
+ expected_status.add({
+ 'A/B/E/foo' : Item(status='D ', wc_rev='2'),
+ 'A/B/E/foo/bar' : Item(status='D ', wc_rev='2'),
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--recursive',
+ '--accept=mine-conflict', wc_dir)
+ expected_status.tweak(treeconflict=None)
+ expected_status.add({
+ 'A/B/E2/foo' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/foo/bar' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(4037)
+def update_moved_dir_file_move(sbox):
+ "update locally moved dir with incoming file move"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_move("A/B/E/alpha", "A/B/F/alpha")
+ sbox.simple_commit()
+ # update to go back in time, before the previous commit
+ svntest.main.run_svn(False, 'update', '-r', '1', wc_dir)
+ sbox.simple_move("A/B/E", "A/B/E2")
+
+ # The incoming "move" creates a tree-conflict as an incoming change
+ # in a local move. We don't yet track moves on the server so we
+ # don't recognise the incoming change as a move.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='D'),
+ 'A/B/F/alpha' : Item(status='A '),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/B/F/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C',
+ moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ expected_status.add({
+ 'A/B/F/alpha' : Item(status=' ', wc_rev='2'),
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # The incoming change is a delete as we don't yet track server-side
+ # moves. Resolving the tree-conflict as "mine-conflict" applies the
+ # delete to the move destination.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B/E'))
+
+ expected_status.tweak('A/B/E', treeconflict=None)
+ expected_status.remove('A/B/E2/alpha')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+@Issue(3144,3630)
+def update_move_text_mod(sbox):
+ "text mod to moved files"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.main.file_append(sbox.ospath('A/B/lambda'), "modified\n")
+ svntest.main.file_append(sbox.ospath('A/B/E/beta'), "modified\n")
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+
+ sbox.simple_move("A/B/E", "A/E2")
+ sbox.simple_move("A/B/lambda", "A/lambda2")
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/lambda',
+ status='D ')
+ expected_status.tweak('A/B/E', moved_to='A/E2')
+ expected_status.tweak('A/B/lambda', moved_to='A/lambda2')
+ expected_status.add({
+ 'A/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/E2/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/lambda2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/lambda'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/lambda' : Item(status=' ', treeconflict='C'),
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E', 'A/B/lambda')
+ expected_disk.add({
+ 'A/E2' : Item(),
+ 'A/E2/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/lambda2' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A/B/E', 'A/B/lambda', treeconflict='C')
+ expected_status.tweak('A/E2', 'A/E2/alpha', 'A/E2/beta', 'A/lambda2',
+ wc_rev='-')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--recursive',
+ '--accept=mine-conflict',
+ wc_dir)
+
+ expected_status.tweak('A/B/E', 'A/B/lambda', treeconflict=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.tweak('A/E2/beta',
+ contents="This is the file 'beta'.\nmodified\n"),
+ expected_disk.tweak('A/lambda2',
+ contents="This is the file 'lambda'.\nmodified\n"),
+ svntest.actions.verify_disk(wc_dir, expected_disk, check_props = True)
+
+
+@Issue(3144,3630)
+def update_nested_move_text_mod(sbox):
+ "text mod to moved file in moved dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.main.file_append(sbox.ospath('A/B/E/alpha'), "modified\n")
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+
+ sbox.simple_move("A/B/E", "A/E2")
+ sbox.simple_move("A/E2/alpha", "A/alpha2")
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', status='D ')
+ expected_status.tweak('A/B/E', moved_to='A/E2')
+ expected_status.add({
+ 'A/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/E2/alpha' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/alpha2'),
+ 'A/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/alpha2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/E2/alpha'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/E2' : Item(),
+ 'A/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/alpha2' : Item(contents="This is the file 'alpha'.\n"),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A/B/E', treeconflict='C')
+ expected_status.tweak('A/E2', 'A/E2/alpha', 'A/E2/beta', 'A/alpha2',
+ wc_rev='-')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--recursive',
+ '--accept=mine-conflict',
+ wc_dir)
+
+ expected_status.tweak('A/B/E', treeconflict=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.tweak('A/alpha2',
+ contents="This is the file 'alpha'.\nmodified\n"),
+ svntest.actions.verify_disk(wc_dir, expected_disk, check_props = True)
+
+def update_with_parents_and_exclude(sbox):
+ "bring a subtree in over an excluded path"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # Now we are going to exclude A
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status='D '),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ '--set-depth', 'exclude',
+ sbox.ospath('A'))
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status='A '),
+ 'A/B' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ 'A/B' : Item(status=' ', wc_rev='1'),
+ 'A/B/F' : Item(status=' ', wc_rev='1'),
+ 'A/B/E' : Item(status=' ', wc_rev='1'),
+ 'A/B/E/beta' : Item(status=' ', wc_rev='1'),
+ 'A/B/E/alpha' : Item(status=' ', wc_rev='1'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ '--parents',
+ sbox.ospath('A/B'))
+
+@Issue(4288)
+def update_edit_delete_obstruction(sbox):
+ "obstructions shouldn't cause update failures"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # r2
+ sbox.simple_rm('A/B','iota')
+ svntest.main.file_append(sbox.ospath('A/mu'), "File change")
+ sbox.simple_propset('key', 'value', 'A/D', 'A/D/G')
+ sbox.simple_commit()
+
+ # r3
+ sbox.simple_mkdir('iota')
+ sbox.simple_copy('A/D/gamma', 'A/B')
+ sbox.simple_rm('A/D/H/chi')
+ sbox.simple_commit()
+
+ sbox.simple_update('', 1)
+
+ # Create obstructions
+ svntest.main.safe_rmtree(sbox.ospath('A/B'))
+ svntest.main.file_append(sbox.ospath('A/B'), "Obstruction")
+
+ svntest.main.safe_rmtree(sbox.ospath('A/D'))
+ svntest.main.file_append(sbox.ospath('A/D'), "Obstruction")
+
+ os.remove(sbox.ospath('iota'))
+ os.mkdir(sbox.ospath('iota'))
+
+ os.remove(sbox.ospath('A/mu'))
+ os.mkdir(sbox.ospath('A/mu'))
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='2'),
+ 'A' : Item(status=' ', wc_rev='2'),
+ 'A/mu' : Item(status='~ ', treeconflict='C', wc_rev='2'),
+ 'A/D' : Item(status='~ ', treeconflict='C', wc_rev='2'),
+ 'A/D/G' : Item(status='! ', wc_rev='2'),
+ 'A/D/G/pi' : Item(status='! ', wc_rev='2'),
+ 'A/D/G/tau' : Item(status='! ', wc_rev='2'),
+ 'A/D/G/rho' : Item(status='! ', wc_rev='2'),
+ 'A/D/H' : Item(status='! ', wc_rev='2'),
+ 'A/D/H/omega' : Item(status='! ', wc_rev='2'),
+ 'A/D/H/chi' : Item(status='! ', wc_rev='2'),
+ 'A/D/H/psi' : Item(status='! ', wc_rev='2'),
+ 'A/D/gamma' : Item(status='! ', wc_rev='2'),
+ 'A/C' : Item(status=' ', wc_rev='2'),
+ 'A/B' : Item(status='~ ', treeconflict='C', wc_rev='-',
+ entry_status='A ', entry_copied='+'),
+ 'A/B/F' : Item(status='! ', wc_rev='-', entry_copied='+'),
+ 'A/B/E' : Item(status='! ', wc_rev='-', entry_copied='+'),
+ 'A/B/E/beta' : Item(status='! ', wc_rev='-', entry_copied='+'),
+ 'A/B/E/alpha' : Item(status='! ', wc_rev='-', entry_copied='+'),
+ 'A/B/lambda' : Item(status='! ', wc_rev='-', entry_copied='+'),
+ 'iota' : Item(status='~ ', treeconflict='C', wc_rev='-',
+ entry_status='A ', entry_copied='+'),
+ })
+ expected_disk = svntest.wc.State('', {
+ 'A/D' : Item(contents="Obstruction", props={'key':'value'}),
+ 'A/C' : Item(),
+ 'A/B' : Item(contents="Obstruction"),
+ 'A/mu' : Item(),
+ 'iota' : Item(),
+ })
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'iota' : Item(status=' ', treeconflict='C'),
+ 'A/mu' : Item(status=' ', treeconflict='C'),
+ 'A/D' : Item(status=' ', treeconflict='C'),
+ 'A/D/G' : Item(status=' ', treeconflict='U'),
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ })
+
+ # And now update to delete B and iota
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '2', wc_dir)
+
+ # Cleanup obstructions
+ os.remove(sbox.ospath('A/B'))
+ os.remove(sbox.ospath('A/D'))
+ os.rmdir(sbox.ospath('iota'))
+ os.rmdir(sbox.ospath('A/mu'))
+
+ # Revert to remove working nodes and tree conflicts
+ svntest.actions.run_and_verify_svn(None, [],
+ 'revert', '-R',
+ sbox.ospath('A/B'),
+ sbox.ospath('A/mu'),
+ sbox.ospath('A/D'),
+ sbox.ospath('iota'))
+ sbox.simple_update('', 1)
+
+ # Now obstruct A (as parent of the changed node), and retry
+ svntest.main.safe_rmtree(sbox.ospath('A'))
+ svntest.main.file_append(sbox.ospath('A'), "Obstruction")
+
+ # And now update to delete B and iota
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' ', treeconflict='C'),
+ 'A/mu' : Item(status=' ', treeconflict='U'),
+ 'A/D' : Item(status=' ', treeconflict='U'),
+ 'A/D/G' : Item(status=' ', treeconflict='U'),
+ 'A/D/H' : Item(status=' ', treeconflict='U'),
+ 'A/D/H/chi' : Item(status=' ', treeconflict='D'),
+ 'A/B' : Item(prev_status=' ', prev_treeconflict='D', # Replacement
+ status=' ', treeconflict='A'),
+ 'iota' : Item(status='A ', prev_status='D '), # Replacement
+ })
+
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(contents="Obstruction"),
+ 'iota' : Item(),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='3'),
+ 'A' : Item(status='~ ', treeconflict='C', wc_rev='3'),
+ 'A/mu' : Item(status='! ', wc_rev='3'),
+ 'A/D' : Item(status='! ', wc_rev='3'),
+ 'A/D/G' : Item(status='! ', wc_rev='3'),
+ 'A/D/G/rho' : Item(status='! ', wc_rev='3'),
+ 'A/D/G/pi' : Item(status='! ', wc_rev='3'),
+ 'A/D/G/tau' : Item(status='! ', wc_rev='3'),
+ 'A/D/gamma' : Item(status='! ', wc_rev='3'),
+ 'A/D/H' : Item(status='! ', wc_rev='3'),
+ 'A/D/H/psi' : Item(status='! ', wc_rev='3'),
+ 'A/D/H/omega' : Item(status='! ', wc_rev='3'),
+ 'A/C' : Item(status='! ', wc_rev='3'),
+ 'A/B' : Item(status='! ', wc_rev='3'),
+ 'iota' : Item(status=' ', wc_rev='3'),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '3', wc_dir)
+
+def update_deleted(sbox):
+ "update a deleted tree"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ sbox.simple_rm('A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ })
+
+ # This runs an update anchored on A, which is deleted. The update editor
+ # shouldn't look at the ACTUAL/WORKING data in this case, but in 1.7 it did.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ None,
+ [], True,
+ sbox.ospath('A/B'))
+
+@Issue(3144,3630)
+# Like update_moved_dir_edited_leaf_del, but with --accept=theirs-conflict
+def break_moved_dir_edited_leaf_del(sbox):
+ "break local move of dir with edited leaf del"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(False, 'rm', '-m', 'remove /A/B/E/alpha',
+ sbox.repo_url + "/A/B/E/alpha")
+ sbox.simple_move("A/B/E", "A/B/E2")
+ svntest.main.file_write(sbox.ospath('A/B/E2/alpha'),
+ "This is a changed 'alpha'.\n")
+
+ # Produce a tree conflict by updating the working copy to the
+ # revision which removed A/B/E/alpha. The deletion collides with
+ # the local move of A/B/E to A/B/E2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='D'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E')
+ expected_disk.add({
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is a changed 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E', status='D ', treeconflict='C',
+ moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Now resolve the conflict, using --accept=working
+ # This should break the move of A/B/E to A/B/E2, leaving A/B/E2
+ # as a copy. The deletion of A/B/E is not reverted.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--recursive',
+ '--accept=working', wc_dir)
+ expected_status.tweak('A/B/E', treeconflict=None, moved_to=None)
+ expected_status.tweak('A/B/E2', moved_from=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(3144,3630)
+def break_moved_replaced_dir(sbox):
+ "break local move of dir plus replace"
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.main.run_svn(False, 'rm', '-m', 'remove /A/B/E/alpha',
+ sbox.repo_url + "/A/B/E/alpha")
+ sbox.simple_move("A/B/E", "A/B/E2")
+ svntest.main.file_write(sbox.ospath('A/B/E2/alpha'),
+ "This is a changed 'alpha'.\n")
+
+ # Locally replace A/B/E with something else
+ sbox.simple_copy('A/D/H', 'A/B/E')
+
+ # Produce a tree conflict by updating the working copy to the
+ # revision which removed A/B/E/alpha. The deletion collides with
+ # the local move of A/B/E to A/B/E2.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='D'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.add({
+ 'A/B/E/chi' : Item(contents="This is the file 'chi'.\n"),
+ 'A/B/E/psi' : Item(contents="This is the file 'psi'.\n"),
+ 'A/B/E/omega' : Item(contents="This is the file 'omega'.\n"),
+ 'A/B/E2' : Item(),
+ 'A/B/E2/alpha' : Item(contents="This is a changed 'alpha'.\n"),
+ 'A/B/E2/beta' : Item(contents="This is the file 'beta'.\n"),
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B/E'),
+ 'A/B/E2/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E2/alpha' : Item(status='M ', copied='+', wc_rev='-'),
+ })
+ expected_status.remove('A/B/E/alpha')
+ expected_status.tweak('A/B/E', status='R ', copied='+', wc_rev='-',
+ treeconflict='C', moved_to='A/B/E2')
+ expected_status.tweak('A/B/E/beta', status='D ')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+ # Now resolve the conflict, using --accept=working
+ # This should break the move of A/B/E to A/B/E2, leaving A/B/E2
+ # as a copy. A/B/E is not reverted.
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve', '--recursive',
+ '--accept=working', wc_dir)
+ expected_status.tweak('A/B/E2', moved_from=None)
+ expected_status.tweak('A/B/E', treeconflict=None, moved_to=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+@Issue(4295)
+def update_removes_switched(sbox):
+ "update completely removes switched node"
+
+ sbox.build(create_wc = False)
+
+ wc_dir = sbox.wc_dir
+ repo_url = sbox.repo_url
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'cp', repo_url + '/A',
+ repo_url + '/AA', '-m', 'Q')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'co', repo_url + '/A', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [],
+ 'switch', repo_url + '/AA/B',
+ wc_dir + '/B')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', repo_url + '/AA/B', '-m', 'Q')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'B' : Item(status='D '),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/G' : Item(status=' ', wc_rev='3'),
+ 'D/G/rho' : Item(status=' ', wc_rev='3'),
+ 'D/G/pi' : Item(status=' ', wc_rev='3'),
+ 'D/G/tau' : Item(status=' ', wc_rev='3'),
+ 'D/H' : Item(status=' ', wc_rev='3'),
+ 'D/H/omega' : Item(status=' ', wc_rev='3'),
+ 'D/H/chi' : Item(status=' ', wc_rev='3'),
+ 'D/H/psi' : Item(status=' ', wc_rev='3'),
+ 'D/gamma' : Item(status=' ', wc_rev='3'),
+ 'C' : Item(status=' ', wc_rev='3'),
+ 'mu' : Item(status=' ', wc_rev='3'),
+ })
+
+ # Before r1435684 the inherited properties code would try to fetch
+ # inherited properties for ^/AA/B and fail.
+ #
+ # The inherited properties fetch code would then bail and forget to reset
+ # the ra-session URL back to its original value.
+ #
+ # After that the update code (which ignored the specific error code) was
+ # continued the update against /AA/B (url of missing switched path)
+ # instead of against A (the working copy url).
+
+ # This update removes 'A/B', since its in-repository location is removed.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'B' : Item(status='A '),
+ 'B/lambda' : Item(status='A '),
+ 'B/E' : Item(status='A '),
+ 'B/E/alpha' : Item(status='A '),
+ 'B/E/beta' : Item(status='A '),
+ 'B/F' : Item(status='A '),
+ })
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='3'),
+ 'D' : Item(status=' ', wc_rev='3'),
+ 'D/G' : Item(status=' ', wc_rev='3'),
+ 'D/G/rho' : Item(status=' ', wc_rev='3'),
+ 'D/G/pi' : Item(status=' ', wc_rev='3'),
+ 'D/G/tau' : Item(status=' ', wc_rev='3'),
+ 'D/H' : Item(status=' ', wc_rev='3'),
+ 'D/H/omega' : Item(status=' ', wc_rev='3'),
+ 'D/H/chi' : Item(status=' ', wc_rev='3'),
+ 'D/H/psi' : Item(status=' ', wc_rev='3'),
+ 'D/gamma' : Item(status=' ', wc_rev='3'),
+ 'B' : Item(status=' ', wc_rev='3'),
+ 'B/E' : Item(status=' ', wc_rev='3'),
+ 'B/E/alpha' : Item(status=' ', wc_rev='3'),
+ 'B/E/beta' : Item(status=' ', wc_rev='3'),
+ 'B/F' : Item(status=' ', wc_rev='3'),
+ 'B/lambda' : Item(status=' ', wc_rev='3'),
+ 'C' : Item(status=' ', wc_rev='3'),
+ 'mu' : Item(status=' ', wc_rev='3'),
+ })
+
+ # And this final update brings back the node, as it was before switching.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status)
+
+@Issue(3192)
+def incomplete_overcomplete(sbox):
+ "verify editor v1 incomplete behavior"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+ repo_dir = sbox.repo_dir
+ repo_url = sbox.repo_url
+
+ # r2 - Make sure we have some dir properties in a clean wc
+ sbox.simple_rm('A', 'iota')
+ sbox.simple_propset('keep', 'keep-value', '')
+ sbox.simple_propset('del', 'del-value', '')
+ sbox.simple_commit()
+
+ # r3 - Perform some changes that will be undone later
+ sbox.simple_mkdir('ADDED-dir')
+ sbox.simple_add_text('The added file', 'added-file')
+ sbox.simple_propset('prop-added', 'value', '')
+ sbox.simple_commit('')
+ sbox.simple_update('')
+
+ r3_disk = svntest.wc.State('', {
+ 'added-file' : Item(contents="The added file"),
+ '.' : Item(props={'prop-added':'value', 'del':'del-value', 'keep':'keep-value'}),
+ 'ADDED-dir' : Item(),
+ })
+
+ r3_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='3'),
+ 'ADDED-dir' : Item(status=' ', wc_rev='3'),
+ 'added-file' : Item(status=' ', wc_rev='3'),
+ })
+
+ # Verify assumptions for later check
+ svntest.actions.run_and_verify_status(wc_dir, r3_status)
+ svntest.actions.verify_disk(wc_dir, r3_disk, check_props = True)
+
+
+ # r4 - And we undo r3
+ sbox.simple_rm('ADDED-dir', 'added-file')
+ sbox.simple_propdel('prop-added', '')
+ sbox.simple_commit('')
+
+ # r5 - Create some alternate changes
+ sbox.simple_mkdir('NOT-ADDED-dir')
+ sbox.simple_add_text('The not added file', 'not-added-file')
+ sbox.simple_propset('prop-not-added', 'value', '')
+ sbox.simple_commit('')
+
+ # Nothing to do to bring the wc to single revision
+ expected_output = svntest.wc.State(wc_dir, {
+ })
+
+ r5_disk = svntest.wc.State('', {
+ '' : Item(props={'prop-not-added':'value',
+ 'del':'del-value',
+ 'keep':'keep-value'}),
+ 'NOT-ADDED-dir' : Item(),
+ 'not-added-file' : Item(contents="The not added file"),
+ })
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='5'),
+ 'NOT-ADDED-dir' : Item(status=' ', wc_rev='5'),
+ 'not-added-file' : Item(status=' ', wc_rev='5'),
+ })
+
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ r5_disk,
+ expected_status,
+ check_props=True)
+
+ # And now we mark the directory incomplete, as if the update had failed
+ # half-way through an update to r3
+ svntest.actions.set_incomplete(wc_dir, 3)
+
+ # Tweak status to verify us breaking the wc
+ expected_status.tweak('', status='! ', wc_rev=3)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ # But the working copy is still 100% at r5
+ svntest.actions.verify_disk(wc_dir, r5_disk, check_props = True)
+
+ # And expect update to do the right thing even though r3 is already encoded
+ # in the parent. This includes fixing the list of children (reported to the
+ # server, which will report adds and deletes) and fixing the property list
+ # (received all; client should delete properties that shouldn't be here)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ '' : Item(status=' U'),
+ 'not-added-file' : Item(status='D '),
+ 'ADDED-dir' : Item(status='A '),
+ 'added-file' : Item(status='A '),
+ 'NOT-ADDED-dir' : Item(status='D '),
+ })
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ r3_disk,
+ r3_status,
+ [], True,
+ wc_dir, '-r', 3)
+
+@Issue(4300)
+def update_swapped_depth_dirs(sbox):
+ "text mod to file in swapped depth dir"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ sbox.build()
+ wc_dir = sbox.wc_dir
+ svntest.main.file_append(sbox.ospath('A/B/E/alpha'), "modified\n")
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+
+ sbox.simple_move("A/B/E", "A/E")
+ sbox.simple_move("A/B", "A/E/B")
+ # This is almost certainly not the right status but it's what
+ # is currently being output so we're using it here so we
+ # can get to the deeper problem.
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.tweak("A/B", "A/B/lambda", "A/B/F", "A/B/E",
+ "A/B/E/alpha", "A/B/E/beta", status="D ")
+ expected_status.tweak("A/B", moved_to="A/E/B")
+ expected_status.add({
+ 'A/E' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/E/B/E'),
+ 'A/E/B' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B'),
+ 'A/E/B/E' : Item(status='D ', copied='+', wc_rev='-',
+ moved_to='A/E'),
+ 'A/E/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/E/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/E/B/E/alpha': Item(status='D ', copied='+', wc_rev='-'),
+ 'A/E/B/E/beta' : Item(status='D ', copied='+', wc_rev='-'),
+ })
+
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ 'A/B/E' : Item(status=' ', treeconflict='U'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B', 'A/B/lambda', 'A/B/F', 'A/B/E',
+ 'A/B/E/alpha', 'A/B/E/beta')
+ expected_disk.add({
+ 'A/E' : Item(),
+ 'A/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/E/B' : Item(),
+ 'A/E/B/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ 'A/E/B/F' : Item(),
+ })
+ expected_status.tweak(wc_rev=2)
+ expected_status.tweak('A/B', treeconflict='C')
+ expected_status.tweak('A/E', 'A/E/alpha', 'A/E/beta', 'A/E/B',
+ 'A/E/B/E', 'A/E/B/E/alpha', 'A/E/B/E/beta',
+ 'A/E/B/lambda', 'A/E/B/F', wc_rev='-')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ check_props=True)
+
+def move_update_props(sbox):
+ "move-update with property mods"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Commit some 'future' property changes
+ sbox.simple_propset('propertyA', 'value1',
+ 'A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ sbox.simple_commit()
+ sbox.simple_propset('propertyB', 'value2',
+ 'A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
+ sbox.simple_commit()
+ sbox.simple_update(revision=1)
+
+ # Make some local property changes
+ sbox.simple_propset('propertyB', 'value3',
+ 'A/B/E', 'A/B/E/beta')
+
+ sbox.simple_move("A/B", "A/B2")
+
+ # Update and expect a conflict
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status=' ', treeconflict='C'),
+ 'A/B/E' : Item(status=' ', treeconflict='U'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='U'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='U'),
+ })
+ expected_disk = svntest.main.greek_state.copy()
+ expected_disk.remove('A/B/E/alpha', 'A/B/E/beta', 'A/B/E',
+ 'A/B/lambda', 'A/B/F', 'A/B')
+ expected_disk.add({
+ 'A/B2' : Item(),
+ 'A/B2/E' : Item(),
+ 'A/B2/E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'A/B2/E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'A/B2/F' : Item(),
+ 'A/B2/lambda' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ expected_disk.tweak('A/B2/E', 'A/B2/E/beta', props={'propertyB':'value3'})
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.tweak('A/B', status='D ', treeconflict='C', moved_to='A/B2')
+ expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda', status='D ')
+ expected_status.add({
+ 'A/B2' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A/B'),
+ 'A/B2/E' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/B2/E/beta' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/B2/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B2/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B2/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '2', wc_dir)
+
+ # Resolve conflict moving changes to destination without conflict
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B'))
+
+ expected_status.tweak('A/B', treeconflict=None)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.tweak('A/B2', 'A/B2/E/alpha', props={'propertyA' : 'value1'})
+ expected_disk.tweak('A/B2/E', 'A/B2/E/beta', props={'propertyA' : 'value1',
+ 'propertyB':'value3'})
+ svntest.actions.verify_disk(wc_dir, expected_disk, check_props = True)
+
+ # Further update and expect a conflict.
+ expected_status.tweak('A/B', status='D ', treeconflict='C', moved_to='A/B2')
+ expected_status.tweak(wc_rev=3)
+ expected_status.tweak( 'A/B2', 'A/B2/E', 'A/B2/E/beta', 'A/B2/E/alpha',
+ 'A/B2/F', 'A/B2/lambda', wc_rev='-')
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], True,
+ '-r', '3', wc_dir)
+
+ # Resolve conflict moving changes and raising property conflicts
+ svntest.actions.run_and_verify_svn(None, [],
+ 'resolve',
+ '--accept=mine-conflict',
+ sbox.ospath('A/B'))
+
+ expected_status.tweak('A/B', treeconflict=None)
+ expected_status.tweak('A/B2/E', 'A/B2/E/beta', status=' C')
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+ expected_disk.tweak('A/B2', 'A/B2/E/alpha', props={'propertyA' : 'value1',
+ 'propertyB' : 'value2'})
+ expected_disk.tweak('A/B2/E', 'A/B2/E/beta', props={'propertyA' : 'value1',
+ 'propertyB' : 'value3'})
+ extra_files = ['dir_conflicts.prej', 'beta.prej']
+ svntest.actions.verify_disk(wc_dir, expected_disk, True,
+ extra_files=extra_files)
+
+@Issues(3288)
+@SkipUnless(svntest.main.is_os_windows)
+def windows_update_backslash(sbox):
+ "test filename with backslashes inside"
+
+ sbox.build()
+
+ wc_dir = sbox.wc_dir
+
+ mucc_url = sbox.repo_url
+
+ if mucc_url.startswith('http'):
+ # Apache Httpd doesn't allow creating paths with '\\' in them on Windows
+ # AH00026: found %2f (encoded '/') in URI (decoded='/svn-test-work/repositories/authz_tests-30/!svn/ver/2/A/completely\\unusable\\dir'), returning 404
+ #
+ # Let's use file:// to work around.
+ mucc_url = 'file:///' + os.path.abspath(sbox.repo_dir).replace('\\', '/')
+
+ svntest.actions.run_and_verify_svnmucc(None, [],
+ '-U', mucc_url,
+ '-m', '',
+ 'mkdir', 'A/completely\\unusable\\dir')
+
+ # No error and a proper skip + recording in the working copy would also
+ # be a good result. This just verifies current behavior:
+ #
+ # - Error via file://, svn:// or http:// with SVNPathAuthz short_circuit
+ #
+ # - No error via http:// with SVNPathAuthz on
+ # (The reason is that Apache Httpd doesn't allow paths with '\\' in
+ # them on Windows, and a subrequest-based access check returns 404.
+ # This makes mod_dav_svn report the path as server excluded (aka
+ # absent), which doesn't produce output when updating.)
+ #
+ # Since https://issues.apache.org/jira/browse/SVN-3288 is about a crash,
+ # we're fine with either result -- that is, if `svn update' finished
+ # without an error, we expect specific stdout and proper wc state.
+ # If it failed, we expect to get the following error:
+ #
+ # svn: E155000: 'completely\unusable\dir' is not valid as filename
+ # in directory [...]
+ #
+ exit_code, output, errput = svntest.main.run_svn(1, 'up', wc_dir)
+ if exit_code == 0:
+ verify.verify_outputs("Unexpected output", output, errput, [
+ "Updating '%s':\n" % wc_dir,
+ "At revision 2.\n"
+ ], [])
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+ elif exit_code == 1:
+ verify.verify_outputs("Unexpected output", output, errput,
+ None, 'svn: E155000: .* is not valid.*')
+ else:
+ raise verify.SVNUnexpectedExitCode(exit_code)
+
+def update_moved_away(sbox):
+ "update subtree of moved away"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_add_text('new', 'new')
+ sbox.simple_commit()
+
+ sbox.simple_move('A', 'A_moved')
+
+ # Adding prev_status=' ' and prev_treeconflict='C' to A will make
+ # the test PASS but why are we getting two conflicts?
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' ', treeconflict='C'),
+ })
+
+ expected_disk = None
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status='D ', wc_rev='1', moved_to='A_moved',
+ treeconflict='C'),
+ 'A/B' : Item(status='D ', wc_rev='1'),
+ 'A/B/E' : Item(status='D ', wc_rev='2'),
+ 'A/B/E/beta' : Item(status='D ', wc_rev='2'),
+ 'A/B/E/alpha' : Item(status='D ', wc_rev='2'),
+ 'A/B/F' : Item(status='D ', wc_rev='1'),
+ 'A/B/lambda' : Item(status='D ', wc_rev='1'),
+ 'A/D' : Item(status='D ', wc_rev='1'),
+ 'A/D/G' : Item(status='D ', wc_rev='1'),
+ 'A/D/G/pi' : Item(status='D ', wc_rev='1'),
+ 'A/D/G/tau' : Item(status='D ', wc_rev='1'),
+ 'A/D/G/rho' : Item(status='D ', wc_rev='1'),
+ 'A/D/H' : Item(status='D ', wc_rev='1'),
+ 'A/D/H/psi' : Item(status='D ', wc_rev='1'),
+ 'A/D/H/chi' : Item(status='D ', wc_rev='1'),
+ 'A/D/H/omega' : Item(status='D ', wc_rev='1'),
+ 'A/D/gamma' : Item(status='D ', wc_rev='1'),
+ 'A/C' : Item(status='D ', wc_rev='1'),
+ 'A/mu' : Item(status='D ', wc_rev='1'),
+ 'A_moved' : Item(status='A ', copied='+', wc_rev='-',
+ moved_from='A'),
+ 'A_moved/D' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/G' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/D/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B/E/beta' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/mu' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A_moved/C' : Item(status=' ', copied='+', wc_rev='-'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ 'new' : Item(status=' ', wc_rev='2'),
+ })
+
+ # This update raises a tree-conflict on A. The conflict cannot be
+ # resolved to update the move destination because the move source is
+ # mixed rev.
+
+ # Note that this exact scenario doesn't apply to switch as we don't
+ # allow switches with as root a shadowed node. However it is
+ # possible to get essentially the problem with switch by invoking a
+ # depth immedates switch on the parent of the root of the move
+ # source. That switches the root of the move without switching the
+ # children.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status,
+ [], False,
+ sbox.ospath('A/B/E'))
+
+@Issues(4323)
+def bump_below_tree_conflict(sbox):
+ "tree conflicts should be skipped during update"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'rm', sbox.repo_url + '/A/B',
+ '-m', '')
+
+ sbox.simple_add_text('Q', 'q')
+ sbox.simple_commit()
+ sbox.simple_add_text('R', 'r')
+ sbox.simple_commit()
+
+ sbox.simple_update(revision='1')
+
+ sbox.simple_rm('A')
+
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A' : Item(status=' ', treeconflict='C'), # The real TC
+ 'A/B' : Item(status=' ', treeconflict='D'), # Shadowed delete
+ })
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+
+ expected_status.tweak('A', status='D ', treeconflict='C', wc_rev='2')
+ expected_status.tweak('A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/tau', 'A/D/G/pi',
+ 'A/D/H', 'A/D/H/omega', 'A/D/H/chi', 'A/D/H/psi',
+ 'A/D/gamma', 'A/mu', 'A/C', status='D ')
+
+ expected_status.remove('A/B', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha',
+ 'A/B/E/beta', 'A/B/F')
+
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ '-r', '2', wc_dir)
+
+ # A is tree conflicted, so an update of A/D should be a skip/no-op.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ sbox.ospath('A/D'))
+
+ # A is tree conflicted, so an update of A/D/G should be a skip/no-op.
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/D/G' : Item(verb='Skipped'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ sbox.ospath('A/D/G'))
+
+@Issues(4111)
+def update_child_below_add(sbox):
+ "update child below added tree"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_update('A/B', 0)
+ e_path = sbox.ospath('A/B/E')
+
+ # Update skips and errors on A/B/E because A/B has a not-present BASE node.
+ expected_output = ["Skipped '"+e_path+"'\n"]
+ expected_err = "svn: E155007: "
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+ expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta',
+ 'A/B/F', 'A/B/lambda')
+ svntest.actions.run_and_verify_svn(expected_output,
+ expected_err,
+ 'update', e_path)
+ svntest.actions.run_and_verify_status(wc_dir, expected_status)
+
+
+ # Add working nodes over A/B
+ sbox.simple_mkdir('A/B')
+ sbox.simple_mkdir('A/B/E')
+ sbox.simple_add_text('the new alpha', 'A/B/E/alpha')
+
+ expected_status.add({
+ 'A/B' : Item(status='A ', wc_rev='-'),
+ 'A/B/E' : Item(status='A ', wc_rev='-'),
+ 'A/B/E/alpha' : Item(status='A ', wc_rev='-'),
+ })
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B/E' : Item(verb='Skipped'),
+ })
+ # Update should still skip A/B/E
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ None,
+ expected_status,
+ [], False,
+ sbox.ospath('A/B/E'))
+
+def update_conflict_details(sbox):
+ "update conflict details"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ sbox.simple_append('A/B/E/new', 'new\n')
+ sbox.simple_add('A/B/E/new')
+ sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n')
+ sbox.simple_rm('A/B/E/beta', 'A/B/F')
+ sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B')
+ sbox.simple_mkdir('A/B/E/new-dir1')
+ sbox.simple_mkdir('A/B/E/new-dir2')
+ sbox.simple_mkdir('A/B/E/new-dir3')
+ sbox.simple_rm('A/B/lambda')
+ sbox.simple_mkdir('A/B/lambda')
+ sbox.simple_commit()
+
+ sbox.simple_update('', 1)
+
+ sbox.simple_propset('key', 'vAl', 'A/B')
+ sbox.simple_move('A/B/E/beta', 'beta')
+ sbox.simple_propset('a', 'b', 'A/B/F', 'A/B/lambda')
+ sbox.simple_append('A/B/E/alpha', 'other\nnew\nlines')
+ sbox.simple_mkdir('A/B/E/new')
+ sbox.simple_mkdir('A/B/E/new-dir1')
+ sbox.simple_append('A/B/E/new-dir2', 'something')
+ sbox.simple_append('A/B/E/new-dir3', 'something')
+ sbox.simple_add('A/B/E/new-dir3')
+
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
+ expected_status.add({
+ 'A/B/E/new' : Item(status='R ', treeconflict='C', wc_rev='2'),
+ 'A/B/E/new-dir2' : Item(status='D ', treeconflict='C', wc_rev='2'),
+ 'A/B/E/new-dir3' : Item(status='R ', treeconflict='C', wc_rev='2'),
+ 'A/B/E/new-dir1' : Item(status=' ', wc_rev='2'),
+ 'A/C' : Item(status=' ', wc_rev='2'),
+ 'iota' : Item(status=' ', wc_rev='2'),
+ 'beta' : Item(status='A ', copied='+', wc_rev='-')
+ })
+ expected_status.tweak('A/B', status=' C', wc_rev='2')
+ expected_status.tweak('A/B/E/alpha', status='C ', wc_rev='2')
+ expected_status.tweak('A/B/E/beta', status='! ', treeconflict='C', wc_rev=None)
+ expected_status.tweak('A/B/F', status='A ', copied='+', treeconflict='C', wc_rev='-')
+ expected_status.tweak('A/B/lambda', status='RM', copied='+', treeconflict='C', wc_rev='-')
+ expected_status.tweak('A/mu', status=' ', wc_rev='2')
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(status=' C'),
+ 'A/B/E' : Item(status=' U'),
+ 'A/B/E/new' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/alpha' : Item(status='C '),
+ 'A/B/E/new-dir2' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/new-dir3' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/new-dir1' : Item(status='E '),
+ 'A/B/F' : Item(status=' ', treeconflict='C'),
+ # ### 2 tree conflict reports; one for delete; one for add...
+ 'A/B/lambda' : Item(status=' ', treeconflict='A',
+ prev_status=' ', prev_treeconflict='C'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ None, expected_status,
+ [], False,
+ '--adds-as-modification', wc_dir)
+
+ # Update can't pass source as none at a specific URL@revision,
+ # because it doesn't know... the working copy could be mixed
+ # revision or may have excluded parts...
+ expected_info = [
+ {
+ "Path" : re.escape(sbox.ospath('A/B')),
+
+ "Conflicted Properties" : "key",
+ "Conflict Details": re.escape(
+ 'incoming dir edit upon update' +
+ ' Source left: (dir) ^/A/B@1' +
+ ' Source right: (dir) ^/A/B@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E')),
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/alpha')),
+ "Conflict Previous Base File" : '.*alpha.*',
+ "Conflict Previous Working File" : '.*alpha.*',
+ "Conflict Current Base File": '.*alpha.*',
+ "Conflict Details": re.escape(
+ 'incoming file edit upon update' +
+ ' Source left: (file) ^/A/B/E/alpha@1' +
+ ' Source right: (file) ^/A/B/E/alpha@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/beta')),
+ "Tree conflict": re.escape(
+ 'local file moved away, incoming file delete or move upon update' +
+ ' Source left: (file) ^/A/B/E/beta@1' +
+ ' Source right: (none) ^/A/B/E/beta@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/new')),
+ "Tree conflict": re.escape(
+ 'local dir add, incoming file add upon update' +
+ ' Source left: (none)' +
+ ' Source right: (file) ^/A/B/E/new@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/new-dir1')),
+ # No tree conflict. Existing directory taken over
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/new-dir2')),
+ "Tree conflict": re.escape(
+ 'local file unversioned, incoming dir add upon update' +
+ ' Source left: (none)' +
+ ' Source right: (dir) ^/A/B/E/new-dir2@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/E/new-dir3')),
+ "Tree conflict": re.escape(
+ 'local file add, incoming dir add upon update' +
+ ' Source left: (none)' +
+ ' Source right: (dir) ^/A/B/E/new-dir3@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/F')),
+ "Tree conflict": re.escape(
+ 'local dir edit, incoming dir delete or move upon update' +
+ ' Source left: (dir) ^/A/B/F@1' +
+ ' Source right: (none) ^/A/B/F@2')
+ },
+ {
+ "Path" : re.escape(sbox.ospath('A/B/lambda')),
+ "Tree conflict": re.escape(
+ 'local file edit, incoming replace with dir upon update' +
+ ' Source left: (file) ^/A/B/lambda@1' +
+ ' Source right: (dir) ^/A/B/lambda@2')
+ },
+ ]
+
+ svntest.actions.run_and_verify_info(expected_info, sbox.ospath('A/B'),
+ '--depth', 'infinity')
+
+# Keywords should be updated in local file even if text change is shortcut
+# (due to the local change being the same as the incoming change, for example).
+@XFail()
+def update_keywords_on_shortcut(sbox):
+ "update_keywords_on_shortcut"
+
+ sbox.build()
+ wc_dir = sbox.wc_dir
+
+ # Start with a file with keywords expanded
+ mu_path = sbox.ospath('A/mu')
+ svntest.main.file_append(mu_path, '$LastChangedRevision$\n')
+ svntest.main.run_svn(None, 'ps', 'svn:keywords', 'LastChangedRevision', mu_path)
+ sbox.simple_commit('A/mu')
+
+ # Modify the text, and commit
+ svntest.main.file_append(mu_path, 'New line.\n')
+ sbox.simple_commit('A/mu')
+
+ # Update back to the previous revision
+ sbox.simple_update('A/mu', 2)
+
+ # Make the same change again locally
+ svntest.main.file_append(mu_path, 'New line.\n')
+
+ # Update, so that merging the text change is a short-cut merge
+ text_before_up = open(sbox.ospath('A/mu'), 'r').readlines()
+ sbox.simple_update('A/mu')
+ text_after_up = open(sbox.ospath('A/mu'), 'r').readlines()
+
+ # Check the keywords have been updated
+ if not any(['$LastChangedRevision: 2 $' in line
+ for line in text_before_up]):
+ raise svntest.Failure("keyword not as expected in test set-up phase")
+ if not any(['$LastChangedRevision: 3 $' in line
+ for line in text_after_up]):
+ raise svntest.Failure("update did not update the LastChangedRevision keyword")
+
+def update_add_conflicted_deep(sbox):
+ "deep add conflicted"
+
+ sbox.build()
+ repo_url = sbox.repo_url
+
+ svntest.actions.run_and_verify_svnmucc(
+ None, [], '-U', repo_url, '-m', '',
+ 'mkdir', 'A/z',
+ 'mkdir', 'A/z/z',
+ 'mkdir', 'A/z/z/z')
+
+ svntest.actions.run_and_verify_svnmucc(
+ None, [], '-U', repo_url, '-m', '',
+ 'rm', 'A/z',
+ 'mkdir', 'A/z',
+ 'mkdir', 'A/z/z',
+ 'mkdir', 'A/z/z/z')
+
+ sbox.simple_append('A/z', 'A/z')
+ sbox.simple_add('A/z')
+ sbox.simple_update('A', 2)
+ # This final update used to segfault using 1.9.0 and 1.9.1
+ sbox.simple_update('A/z/z', 3)
+
+def missing_tmp_update(sbox):
+ "missing tmp update caused segfault"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None, [], False,
+ wc_dir, '--set-depth', 'empty')
+
+ os.rmdir(sbox.ospath(svntest.main.get_admin_name() + '/tmp'))
+
+ svntest.actions.run_and_verify_svn(None, '.*Unable to create.*',
+ 'up', wc_dir, '--set-depth', 'infinity')
+
+ # This re-creates .svn/tmp as a side-effect.
+ svntest.actions.run_and_verify_svn(None, [], 'cleanup',
+ '--vacuum-pristines', wc_dir)
+
+ svntest.actions.run_and_verify_update(wc_dir, None, None, None, [], False,
+ wc_dir, '--set-depth', 'infinity')
+
+def update_delete_switched(sbox):
+ "update delete switched"
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ svntest.actions.run_and_verify_switch(wc_dir, sbox.ospath('A/B/E'),
+ sbox.repo_url + '/A/D/G',
+ None, None, None, [], False,
+ '--ignore-ancestry')
+
+ # Introduce some change somewhere...
+ sbox.simple_propset('A', 'A', 'A')
+
+ expected_status = svntest.wc.State(wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status='A ', copied='+', treeconflict='C', wc_rev='-'),
+ 'A/B' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B/E/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/E/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/gamma' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H/omega' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H/psi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/D/H/chi' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/mu' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/C' : Item(status=' ', copied='+', wc_rev='-'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ })
+ svntest.actions.run_and_verify_update(wc_dir, None, None, expected_status,
+ [], False, sbox.ospath('A'), '-r', 0)
+
+@XFail()
+def update_add_missing_local_add(sbox):
+ "update adds missing local addition"
+
+ sbox.build(read_only=True)
+
+ # Note that updating 'A' to r0 doesn't reproduce this issue...
+ sbox.simple_update('', revision='0')
+ sbox.simple_mkdir('A')
+ sbox.simple_add_text('mumumu', 'A/mu')
+ os.unlink(sbox.ospath('A/mu'))
+ os.rmdir(sbox.ospath('A'))
+
+ sbox.simple_update()
+
+#######################################################################
+# Run the tests
+
+
+# list all tests here, starting with None:
+test_list = [ None,
+ update_binary_file,
+ update_binary_file_2,
+ update_ignores_added,
+ update_to_rev_zero,
+ receive_overlapping_same_change,
+ update_to_resolve_text_conflicts,
+ update_delete_modified_files,
+ update_after_add_rm_deleted,
+ update_missing,
+ update_replace_dir,
+ update_single_file,
+ prop_update_on_scheduled_delete,
+ update_receive_illegal_name,
+ update_deleted_missing_dir,
+ another_hudson_problem,
+ update_deleted_targets,
+ new_dir_with_spaces,
+ non_recursive_update,
+ checkout_empty_dir,
+ update_to_deletion,
+ update_deletion_inside_out,
+ update_schedule_add_dir,
+ update_to_future_add,
+ obstructed_update_alters_wc_props,
+ update_xml_unsafe_dir,
+ conflict_markers_matching_eol,
+ update_eolstyle_handling,
+ update_copy_of_old_rev,
+ forced_update,
+ forced_update_failures,
+ update_wc_on_windows_drive,
+ update_wc_with_replaced_file,
+ update_with_obstructing_additions,
+ update_conflicted,
+ mergeinfo_update_elision,
+ update_copied_from_replaced_and_changed,
+ update_copied_and_deleted_prop,
+ update_accept_conflicts,
+ update_uuid_changed,
+ restarted_update_should_delete_dir_prop,
+ tree_conflicts_on_update_1_1,
+ tree_conflicts_on_update_1_2,
+ tree_conflicts_on_update_2_1,
+ tree_conflicts_on_update_2_2,
+ tree_conflicts_on_update_2_3,
+ tree_conflicts_on_update_3,
+ tree_conflict_uc1_update_deleted_tree,
+ tree_conflict_uc2_schedule_re_add,
+ set_deep_depth_on_target_with_shallow_children,
+ update_wc_of_dir_to_rev_not_containing_this_dir,
+ update_empty_hides_entries,
+ mergeinfo_updates_merge_with_local_mods,
+ update_with_excluded_subdir,
+ update_with_file_lock_and_keywords_property_set,
+ update_nonexistent_child_of_copy,
+ revive_children_of_copy,
+ skip_access_denied,
+ update_to_HEAD_plus_1,
+ update_moved_dir_leaf_del,
+ update_moved_dir_edited_leaf_del,
+ update_moved_dir_file_add,
+ update_moved_dir_dir_add,
+ update_moved_dir_file_move,
+ update_binary_file_3,
+ update_move_text_mod,
+ update_nested_move_text_mod,
+ update_with_parents_and_exclude,
+ update_edit_delete_obstruction,
+ update_deleted,
+ break_moved_dir_edited_leaf_del,
+ break_moved_replaced_dir,
+ update_removes_switched,
+ incomplete_overcomplete,
+ update_swapped_depth_dirs,
+ move_update_props,
+ windows_update_backslash,
+ update_moved_away,
+ bump_below_tree_conflict,
+ update_child_below_add,
+ update_conflict_details,
+ update_keywords_on_shortcut,
+ update_add_conflicted_deep,
+ missing_tmp_update,
+ update_delete_switched,
+ update_add_missing_local_add,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/cmdline/update_tests_data/checkout_broken_eol.dump b/subversion/tests/cmdline/update_tests_data/checkout_broken_eol.dump
new file mode 100644
index 0000000..48f339e
--- /dev/null
+++ b/subversion/tests/cmdline/update_tests_data/checkout_broken_eol.dump
@@ -0,0 +1,48 @@
+SVN-fs-dump-format-version: 2
+
+UUID: f8e39098-bf12-0410-85ab-a169258d30d2
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2006-05-01T20:06:54.550716Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 99
+Content-length: 99
+
+K 7
+svn:log
+V 3
+msg
+K 10
+svn:author
+V 2
+pl
+K 8
+svn:date
+V 27
+2006-05-01T20:08:34.088612Z
+PROPS-END
+
+Node-path: file
+Node-kind: file
+Node-action: add
+Prop-content-length: 40
+Text-content-length: 12
+Content-length: 52
+
+K 13
+svn:eol-style
+V 6
+native
+PROPS-END
+line
+line2
+
+
diff --git a/subversion/tests/cmdline/upgrade_tests.py b/subversion/tests/cmdline/upgrade_tests.py
new file mode 100755
index 0000000..43258f6
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests.py
@@ -0,0 +1,1559 @@
+#!/usr/bin/env python
+#
+# upgrade_tests.py: test the working copy upgrade process
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+#
+# These tests exercise the upgrade capabilities of 'svn upgrade' as it
+# moves working copies between wc-1 and wc-ng.
+#
+
+import os
+import re
+import shutil
+import sys
+import tarfile
+import tempfile
+import logging
+import stat
+
+logger = logging.getLogger()
+
+import svntest
+from svntest import wc
+
+Item = svntest.wc.StateItem
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+wc_is_too_old_regex = (".*is too old \(format \d+.*\).*")
+
+
+def get_current_format():
+ # Get current format from subversion/libsvn_wc/wc.h
+ format_file = open(os.path.join(os.path.dirname(__file__), "..", "..", "libsvn_wc", "wc.h")).read()
+ return int(re.search("\n#define SVN_WC__VERSION (\d+)\n", format_file).group(1))
+
+
+def replace_sbox_with_tarfile(sbox, tar_filename,
+ dir=None):
+ try:
+ svntest.main.safe_rmtree(sbox.wc_dir)
+ except OSError as e:
+ pass
+
+ if not dir:
+ dir = tar_filename.split('.')[0]
+
+ tarpath = os.path.join(os.path.dirname(sys.argv[0]), 'upgrade_tests_data',
+ tar_filename)
+ t = tarfile.open(tarpath, 'r:bz2')
+ extract_dir = tempfile.mkdtemp(dir=svntest.main.temp_dir)
+ for member in t.getmembers():
+ t.extract(member, extract_dir)
+
+ shutil.move(os.path.join(extract_dir, dir), sbox.wc_dir)
+
+def replace_sbox_repo_with_tarfile(sbox, tar_filename, dir=None):
+ try:
+ svntest.main.safe_rmtree(sbox.repo_dir)
+ except OSError as e:
+ pass
+
+ if not dir:
+ dir = tar_filename.split('.')[0]
+
+ tarpath = os.path.join(os.path.dirname(sys.argv[0]), 'upgrade_tests_data',
+ tar_filename)
+ t = tarfile.open(tarpath, 'r:bz2')
+ extract_dir = tempfile.mkdtemp(dir=svntest.main.temp_dir)
+ for member in t.getmembers():
+ t.extract(member, extract_dir)
+
+ shutil.move(os.path.join(extract_dir, dir), sbox.repo_dir)
+
+def check_format(sbox, expected_format):
+ dot_svn = svntest.main.get_admin_name()
+ for root, dirs, files in os.walk(sbox.wc_dir):
+ db = svntest.sqlite3.connect(os.path.join(root, dot_svn, 'wc.db'))
+ c = db.cursor()
+ c.execute('pragma user_version;')
+ found_format = c.fetchone()[0]
+ db.close()
+
+ if found_format != expected_format:
+ raise svntest.Failure("found format '%d'; expected '%d'; in wc '%s'" %
+ (found_format, expected_format, root))
+
+ dirs[:] = []
+
+ if dot_svn in dirs:
+ dirs.remove(dot_svn)
+
+def check_pristine(sbox, files):
+ for file in files:
+ file_path = sbox.ospath(file)
+ file_text = open(file_path, 'r').read()
+ file_pristine = open(svntest.wc.text_base_path(file_path), 'r').read()
+ if (file_text != file_pristine):
+ raise svntest.Failure("pristine mismatch for '%s'" % (file))
+
+def check_dav_cache(dir_path, wc_id, expected_dav_caches):
+ dot_svn = svntest.main.get_admin_name()
+ db = svntest.sqlite3.connect(os.path.join(dir_path, dot_svn, 'wc.db'))
+
+ c = db.cursor()
+
+ # Check if python's sqlite can read our db
+ c.execute('select sqlite_version()')
+ sqlite_ver = svntest.main.ensure_list(map(int, c.fetchone()[0].split('.')))
+
+ # SQLite versions have 3 or 4 number groups
+ major = sqlite_ver[0]
+ minor = sqlite_ver[1]
+ patch = sqlite_ver[2]
+
+ if major < 3 or (major == 3 and minor < 6) \
+ or (major == 3 and minor == 6 and patch < 18):
+ return # We need a newer SQLite
+
+ for local_relpath, expected_dav_cache in expected_dav_caches.items():
+ # NODES conversion is complete enough that we can use it if it exists
+ c.execute("""pragma table_info(nodes)""")
+ if c.fetchone():
+ c.execute('select dav_cache from nodes ' +
+ 'where wc_id=? and local_relpath=? and op_depth = 0',
+ (wc_id, local_relpath))
+ row = c.fetchone()
+ else:
+ c.execute('select dav_cache from base_node ' +
+ 'where wc_id=? and local_relpath=?',
+ (wc_id, local_relpath))
+ row = c.fetchone()
+ if row is None:
+ raise svntest.Failure("no dav cache for '%s'" % (local_relpath))
+ dav_cache = str(row[0])
+ if dav_cache != str(expected_dav_cache):
+ raise svntest.Failure(
+ "wrong dav cache for '%s'\n Found: '%s'\n Expected: '%s'" %
+ (local_relpath, dav_cache, expected_dav_cache))
+
+ db.close()
+
+# Very simple working copy property diff handler for single line textual properties
+# Should probably be moved to svntest/actions.py after some major refactoring.
+def simple_property_verify(dir_path, expected_props):
+
+ # Shows all items in dict1 that are not also in dict2
+ def diff_props(dict1, dict2, name, match):
+
+ equal = True;
+ for key in dict1:
+ node = dict1[key]
+ node2 = dict2.get(key, None)
+ if node2:
+ for prop in node:
+ v1 = node[prop]
+ v2 = node2.get(prop, None)
+
+ if not v2:
+ logger.warn('\'%s\' property on \'%s\' not found in %s',
+ prop, key, name)
+ equal = False
+ if match and v1 != v2:
+ logger.warn('Expected \'%s\' on \'%s\' to be \'%s\', but found \'%s\'',
+ prop, key, v1, v2)
+ equal = False
+ else:
+ logger.warn('\'%s\': %s not found in %s', key, dict1[key], name)
+ equal = False
+
+ return equal
+
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '-R',
+ '-v', dir_path)
+
+ actual_props = {}
+ target = None
+ name = None
+
+ for i in output:
+ if i.startswith('Properties on '):
+ target = i[15+len(dir_path)+1:-3].replace(os.path.sep, '/')
+ elif not i.startswith(' '):
+ name = i.strip()
+ else:
+ v = actual_props.get(target, {})
+ v[name] = i.strip()
+ actual_props[target] = v
+
+ v1 = diff_props(expected_props, actual_props, 'actual', True)
+ v2 = diff_props(actual_props, expected_props, 'expected', False)
+
+ if not v1 or not v2:
+ logger.warn('Actual properties: %s', actual_props)
+ raise svntest.Failure("Properties unequal")
+
+def simple_checksum_verify(expected_checksums):
+
+ for path, checksum in expected_checksums:
+ exit_code, output, errput = svntest.main.run_svn(None, 'info', path)
+ if exit_code:
+ raise svntest.Failure()
+ if checksum:
+ if not svntest.verify.RegexOutput('Checksum: ' + checksum,
+ match_all=False).matches(output):
+ raise svntest.Failure("did not get expected checksum " + checksum)
+ if not checksum:
+ if svntest.verify.RegexOutput('Checksum: ',
+ match_all=False).matches(output):
+ raise svntest.Failure("unexpected checksum")
+
+
+def run_and_verify_status_no_server(wc_dir, expected_status):
+ "same as svntest.actions.run_and_verify_status(), but without '-u'"
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'st', '-q', '-v',
+ wc_dir)
+ actual = svntest.tree.build_tree_from_status(output)
+ try:
+ svntest.tree.compare_trees("status", actual, expected_status.old_tree())
+ except svntest.tree.SVNTreeError:
+ svntest.verify.display_trees(None, 'STATUS OUTPUT TREE',
+ expected_status.old_tree(), actual)
+ logger.warn("ACTUAL STATUS TREE:")
+ svntest.tree.dump_tree_script(actual, wc_dir + os.sep)
+ raise
+
+
+def basic_upgrade(sbox):
+ "basic upgrade behavior"
+
+ replace_sbox_with_tarfile(sbox, 'basic_upgrade.tar.bz2')
+
+ # Attempt to use the working copy, this should give an error
+ svntest.actions.run_and_verify_svn(None, wc_is_too_old_regex,
+ 'info', sbox.wc_dir)
+
+ # Upgrade on something anywhere within a versioned subdir gives a
+ # 'not a working copy root' error. Upgrade on something without any
+ # versioned parent gives a 'not a working copy' error.
+ # Both cases use the same error code.
+ not_wc = ".*(E155007|E155019).*%s'.*not a working copy.*"
+ os.mkdir(sbox.ospath('X'))
+ svntest.actions.run_and_verify_svn(None, not_wc % 'X',
+ 'upgrade', sbox.ospath('X'))
+
+ # Upgrade on a non-existent subdir within an old WC gives a
+ # 'not a working copy' error.
+ svntest.actions.run_and_verify_svn(None, not_wc % 'Y',
+ 'upgrade', sbox.ospath('Y'))
+ # Upgrade on a versioned file within an old WC gives a
+ # 'not a working copy' error.
+ svntest.actions.run_and_verify_svn(None, not_wc % 'mu',
+ 'upgrade', sbox.ospath('A/mu'))
+ # Upgrade on a versioned dir within an old WC gives a
+ # 'not a working copy' error.
+ svntest.actions.run_and_verify_svn(None, not_wc % 'A',
+ 'upgrade', sbox.ospath('A'))
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # Actually check the format number of the upgraded working copy
+ check_format(sbox, get_current_format())
+
+ # Now check the contents of the working copy
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+ check_pristine(sbox, ['iota', 'A/mu'])
+
+def upgrade_with_externals(sbox):
+ "upgrade with externals"
+
+ # Create wc from tarfile, uses the same structure of the wc as the tests
+ # in externals_tests.py.
+ replace_sbox_with_tarfile(sbox, 'upgrade_with_externals.tar.bz2')
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # Actually check the format number of the upgraded working copy
+ check_format(sbox, get_current_format())
+ check_pristine(sbox, ['iota', 'A/mu',
+ 'A/D/x/lambda', 'A/D/x/E/alpha'])
+
+def upgrade_1_5_body(sbox, subcommand):
+ replace_sbox_with_tarfile(sbox, 'upgrade_1_5.tar.bz2')
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ subcommand, sbox.wc_dir)
+
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # Check the format of the working copy
+ check_format(sbox, get_current_format())
+
+ # Now check the contents of the working copy
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+ check_pristine(sbox, ['iota', 'A/mu'])
+
+
+def upgrade_1_5(sbox):
+ "test upgrading from a 1.5-era working copy"
+ return upgrade_1_5_body(sbox, 'info')
+
+
+def update_1_5(sbox):
+ "test updating a 1.5-era working copy"
+
+ # The 'update' printed:
+ # Skipped 'svn-test-work\working_copies\upgrade_tests-3'
+ # Summary of conflicts:
+ # Skipped paths: 1
+ return upgrade_1_5_body(sbox, 'update')
+
+
+def logs_left_1_5(sbox):
+ "test upgrading from a 1.5-era wc with stale logs"
+
+ replace_sbox_with_tarfile(sbox, 'logs_left_1_5.tar.bz2')
+
+ # Try to upgrade, this should give an error
+ expected_stderr = (".*Cannot upgrade with existing logs; .*")
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'upgrade', sbox.wc_dir)
+
+
+def upgrade_wcprops(sbox):
+ "test upgrading a working copy with wcprops"
+
+ replace_sbox_with_tarfile(sbox, 'upgrade_wcprops.tar.bz2')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # Make sure that .svn/all-wcprops has disappeared
+ dot_svn = svntest.main.get_admin_name()
+ if os.path.exists(os.path.join(sbox.wc_dir, dot_svn, 'all-wcprops')):
+ raise svntest.Failure("all-wcprops file still exists")
+
+ # Just for kicks, let's see if the wcprops are what we'd expect them
+ # to be. (This could be smarter.)
+ expected_dav_caches = {
+ '' :
+ b'(svn:wc:ra_dav:version-url 41 /svn-test-work/local_tmp/repos/!svn/ver/1)',
+ 'iota' :
+ b'(svn:wc:ra_dav:version-url 46 /svn-test-work/local_tmp/repos/!svn/ver/1/iota)',
+ }
+ check_dav_cache(sbox.wc_dir, 1, expected_dav_caches)
+
+# Poor mans relocate to fix up an 1.0 (xml style) working copy to refer to a
+# valid repository, so svn upgrade can do its work on it
+def xml_entries_relocate(path, from_url, to_url):
+ adm_name = svntest.main.get_admin_name()
+ entries = os.path.join(path, adm_name, 'entries')
+ txt = open(entries).read().replace('url="' + from_url, 'url="' + to_url)
+ os.chmod(entries, svntest.main.S_ALL_RWX)
+ open(entries, 'w').write(txt)
+
+ for dirent in os.listdir(path):
+ item_path = os.path.join(path, dirent)
+
+ if dirent == svntest.main.get_admin_name():
+ continue
+
+ if os.path.isdir(os.path.join(item_path, adm_name)):
+ xml_entries_relocate(item_path, from_url, to_url)
+
+# Poor mans relocate to fix up an working copy to refer to a
+# valid repository, so svn upgrade can do its work on it
+def simple_entries_replace(path, from_url, to_url):
+ adm_name = svntest.main.get_admin_name()
+ entries = os.path.join(path, adm_name, 'entries')
+ txt = open(entries).read().replace(from_url, to_url)
+ os.chmod(entries, svntest.main.S_ALL_RWX)
+ open(entries, 'wb').write(txt.encode())
+
+ for dirent in os.listdir(path):
+ item_path = os.path.join(path, dirent)
+
+ if dirent == svntest.main.get_admin_name():
+ continue
+
+ if os.path.isdir(os.path.join(item_path, adm_name)):
+ simple_entries_replace(item_path, from_url, to_url)
+
+
+def basic_upgrade_1_0(sbox):
+ "test upgrading a working copy created with 1.0.0"
+
+ sbox.build(create_wc = False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_1_0.tar.bz2')
+
+ url = sbox.repo_url
+
+ # This is non-canonical by the rules of svn_uri_canonicalize, it gets
+ # written into the entries file and upgrade has to canonicalize.
+ non_canonical_url = url[:-1] + '%%%02x' % ord(url[-1])
+ xml_entries_relocate(sbox.wc_dir, 'file:///1.0.0/repos', non_canonical_url)
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+ # And the separate working copy below COPIED or check_format() fails
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade',
+ os.path.join(sbox.wc_dir, 'COPIED', 'G'))
+
+ # Actually check the format number of the upgraded working copy
+ check_format(sbox, get_current_format())
+
+ # Now check the contents of the working copy
+ # #### This working copy is not just a basic tree,
+ # fix with the right data once we get here
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev=7),
+ 'B' : Item(status=' ', wc_rev='7'),
+ 'B/mu' : Item(status=' ', wc_rev='7'),
+ 'B/D' : Item(status=' ', wc_rev='7'),
+ 'B/D/H' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/psi' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/omega' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/zeta' : Item(status='MM', wc_rev='7'),
+ 'B/D/H/chi' : Item(status=' ', wc_rev='7'),
+ 'B/D/gamma' : Item(status=' ', wc_rev='9'),
+ 'B/D/G' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/tau' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/rho' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/pi' : Item(status=' ', wc_rev='7'),
+ 'B/B' : Item(status=' ', wc_rev='7'),
+ 'B/B/lambda' : Item(status=' ', wc_rev='7'),
+ 'MKDIR' : Item(status='A ', wc_rev='0'),
+ 'MKDIR/MKDIR' : Item(status='A ', wc_rev='0'),
+ 'A' : Item(status=' ', wc_rev='7'),
+ 'A/B' : Item(status=' ', wc_rev='7'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='7'),
+ 'A/D' : Item(status=' ', wc_rev='7'),
+ 'A/D/G' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/rho' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/pi' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/tau' : Item(status=' ', wc_rev='7'),
+ 'A/D/H' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/psi' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/omega' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/zeta' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/chi' : Item(status=' ', wc_rev='7'),
+ 'A/D/gamma' : Item(status=' ', wc_rev='7'),
+ 'A/mu' : Item(status=' ', wc_rev='7'),
+ 'iota' : Item(status=' ', wc_rev='7'),
+ 'COPIED' : Item(status=' ', wc_rev='10'),
+ 'DELETED' : Item(status='D ', wc_rev='10'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ expected_infos = [ {
+ 'Node Kind': 'directory',
+ 'Schedule': 'normal',
+ 'Revision': '7',
+ 'Last Changed Author' : 'Bert',
+ 'Last Changed Rev' : '7'
+ } ]
+ svntest.actions.run_and_verify_info(expected_infos, sbox.wc_dir)
+
+ expected_infos = [ {
+ 'Node Kind': 'directory',
+ 'Schedule': 'delete',
+ 'Revision': '10',
+ 'Last Changed Author' : 'Bert',
+ 'Last Changed Rev' : '10'
+ } ]
+ svntest.actions.run_and_verify_info(expected_infos,
+ os.path.join(sbox.wc_dir, 'DELETED'))
+
+ check_pristine(sbox, ['iota', 'A/mu', 'A/D/H/zeta'])
+
+# Helper function for the x3 tests.
+def do_x3_upgrade(sbox, expected_error=[]):
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, expected_error,
+ 'upgrade', sbox.wc_dir)
+
+ if expected_error != []:
+ return
+
+ # Actually check the format number of the upgraded working copy
+ check_format(sbox, get_current_format())
+
+ # Now check the contents of the working copy
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='2'),
+ 'A' : Item(status=' ', wc_rev='2'),
+ 'A/D' : Item(status=' ', wc_rev='2'),
+ 'A/D/H' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/psi' : Item(status='D ', wc_rev='2'),
+ 'A/D/H/new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/D/H/chi' : Item(status='R ', copied='+', wc_rev='-'),
+ 'A/D/gamma' : Item(status='D ', wc_rev='2'),
+ 'A/D/G' : Item(status=' ', wc_rev='2'),
+ 'A/B_new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B_new/B' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B_new/B/E' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/B_new/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_new/B/E/beta' : Item(status='R ', copied='+', wc_rev='-'),
+ 'A/B_new/B/new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B_new/B/lambda' : Item(status='R ', copied='+', wc_rev='-'),
+ 'A/B_new/B/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B_new/E' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/B_new/E/alpha' : Item(status=' M', copied='+', wc_rev='-'),
+ 'A/B_new/E/beta' : Item(status='RM', copied='+', wc_rev='-'),
+ 'A/B_new/lambda' : Item(status='R ', copied='+', wc_rev='-'),
+ 'A/B_new/new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B_new/F' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B' : Item(status=' ', wc_rev='2'),
+ 'A/B/E' : Item(status=' ', wc_rev='2'),
+ 'A/B/E/beta' : Item(status='RM', copied='+', wc_rev='-'),
+ 'A/B/E/alpha' : Item(status=' M', wc_rev='2'),
+ 'A/B/F' : Item(status=' ', wc_rev='2'),
+ 'A/B/lambda' : Item(status='R ', copied='+', wc_rev='-'),
+ 'A/B/new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/G_new' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/G_new/rho' : Item(status='R ', copied='+', wc_rev='-'),
+ 'iota' : Item(status=' ', wc_rev='2'),
+ 'A_new' : Item(status='A ', wc_rev='0'),
+ 'A_new/alpha' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ simple_property_verify(sbox.wc_dir, {
+ 'A/B_new/E/beta' : {'x3' : '3x',
+ 'svn:eol-style': 'native'},
+ 'A/B/E/beta' : {'s' : 't',
+ 'svn:eol-style': 'native'},
+ 'A/B_new/B/E/alpha' : {'svn:eol-style': 'native'},
+ 'A/B/E/alpha' : {'q': 'r',
+ 'svn:eol-style': 'native'},
+ 'A_new/alpha' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/new' : {'svn:eol-style': 'native'},
+ 'A/B_new/E/alpha' : {'svn:eol-style': 'native',
+ 'u': 'v'},
+ 'A/B_new/B/E' : {'q': 'r'},
+ 'A/B_new/lambda' : {'svn:eol-style': 'native'},
+ 'A/B_new/E' : {'x3': '3x'},
+ 'A/B_new/new' : {'svn:eol-style': 'native'},
+ 'A/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/E/beta' : {'svn:eol-style': 'native'},
+ 'A/B_new/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B/new' : {'svn:eol-style': 'native'},
+ 'A/G_new/rho' : {'svn:eol-style': 'native'}
+ })
+
+ svntest.actions.run_and_verify_svn('Reverted.*', [],
+ 'revert', '-R', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='2'),
+ 'A' : Item(status=' ', wc_rev='2'),
+ 'A/D' : Item(status=' ', wc_rev='2'),
+ 'A/D/H' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/omega' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/psi' : Item(status=' ', wc_rev='2'),
+ 'A/D/H/chi' : Item(status=' ', wc_rev='2'),
+ 'A/D/gamma' : Item(status=' ', wc_rev='2'),
+ 'A/D/G' : Item(status=' ', wc_rev='2'),
+ 'A/B' : Item(status=' ', wc_rev='2'),
+ 'A/B/F' : Item(status=' ', wc_rev='2'),
+ 'A/B/E' : Item(status=' ', wc_rev='2'),
+ 'A/B/E/beta' : Item(status=' ', wc_rev='2'),
+ 'A/B/E/alpha' : Item(status=' ', wc_rev='2'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='2'),
+ 'iota' : Item(status=' ', wc_rev='2'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ simple_property_verify(sbox.wc_dir, {
+ 'A/B/E/beta' : {'svn:eol-style': 'native'},
+# 'A/B/lambda' : {'svn:eol-style': 'native'},
+ 'A/B/E/alpha' : {'svn:eol-style': 'native'}
+ })
+
+@Issue(2530)
+def x3_1_4_0(sbox):
+ "3x same wc upgrade 1.4.0 test"
+
+ replace_sbox_with_tarfile(sbox, 'wc-3x-1.4.0.tar.bz2', dir='wc-1.4.0')
+
+ do_x3_upgrade(sbox, expected_error='.*E155016: The properties of.*are in an '
+ 'indeterminate state and cannot be upgraded. See issue #2530.')
+
+@Issue(3811)
+def x3_1_4_6(sbox):
+ "3x same wc upgrade 1.4.6 test"
+
+ replace_sbox_with_tarfile(sbox, 'wc-3x-1.4.6.tar.bz2', dir='wc-1.4.6')
+
+ do_x3_upgrade(sbox)
+
+@Issue(3811)
+def x3_1_6_12(sbox):
+ "3x same wc upgrade 1.6.12 test"
+
+ replace_sbox_with_tarfile(sbox, 'wc-3x-1.6.12.tar.bz2', dir='wc-1.6.12')
+
+ do_x3_upgrade(sbox)
+
+def missing_dirs(sbox):
+ "missing directories and obstructing files"
+
+ # tarball wc looks like:
+ # svn co URL wc
+ # svn cp wc/A/B wc/A/B_new
+ # rm -rf wc/A/B/E wc/A/D wc/A/B_new/E wc/A/B_new/F
+ # touch wc/A/D wc/A/B_new/F
+
+ replace_sbox_with_tarfile(sbox, 'missing-dirs.tar.bz2')
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ 'A/mu' : Item(status=' ', wc_rev='1'),
+ 'A/C' : Item(status=' ', wc_rev='1'),
+ 'A/D' : Item(status='! ', wc_rev='1'),
+ 'A/B' : Item(status=' ', wc_rev='1'),
+ 'A/B/F' : Item(status=' ', wc_rev='1'),
+ 'A/B/E' : Item(status='! ', wc_rev='1'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ 'A/B_new' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/B_new/E' : Item(status='! ', wc_rev='-'),
+ 'A/B_new/F' : Item(status='! ', wc_rev='-'),
+ 'A/B_new/lambda' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+def missing_dirs2(sbox):
+ "missing directories and obstructing dirs"
+
+ replace_sbox_with_tarfile(sbox, 'missing-dirs.tar.bz2')
+ os.remove(sbox.ospath('A/D'))
+ os.remove(sbox.ospath('A/B_new/F'))
+ os.mkdir(sbox.ospath('A/D'))
+ os.mkdir(sbox.ospath('A/B_new/F'))
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ 'A/mu' : Item(status=' ', wc_rev='1'),
+ 'A/C' : Item(status=' ', wc_rev='1'),
+ 'A/D' : Item(status='! ', wc_rev='1'),
+ 'A/B' : Item(status=' ', wc_rev='1'),
+ 'A/B/F' : Item(status=' ', wc_rev='1'),
+ 'A/B/E' : Item(status='! ', wc_rev='1'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='1'),
+ 'iota' : Item(status=' ', wc_rev='1'),
+ 'A/B_new' : Item(status='A ', wc_rev='-', copied='+'),
+ 'A/B_new/E' : Item(status='! ', wc_rev='-'),
+ 'A/B_new/F' : Item(status='! ', wc_rev='-'),
+ 'A/B_new/lambda' : Item(status=' ', wc_rev='-', copied='+'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3808)
+def delete_and_keep_local(sbox):
+ "check status delete and delete --keep-local"
+
+ replace_sbox_with_tarfile(sbox, 'wc-delete.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='0'),
+ 'Normal' : Item(status=' ', wc_rev='1'),
+ 'Deleted-Keep-Local': Item(status='D ', wc_rev='1'),
+ 'Deleted' : Item(status='D ', wc_rev='1'),
+ })
+
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ # Deleted-Keep-Local should still exist after the upgrade
+ if not os.path.exists(os.path.join(sbox.wc_dir, 'Deleted-Keep-Local')):
+ raise svntest.Failure('wc/Deleted-Keep-Local should exist')
+
+ # Deleted should be removed after the upgrade as it was
+ # schedule delete and doesn't contain unversioned changes.
+ if os.path.exists(os.path.join(sbox.wc_dir, 'Deleted')):
+ raise svntest.Failure('wc/Deleted should not exist')
+
+
+def dirs_only_upgrade(sbox):
+ "upgrade a wc without files"
+
+ replace_sbox_with_tarfile(sbox, 'dirs-only.tar.bz2')
+
+ expected_output = ["Upgraded '%s'\n" % (sbox.ospath('').rstrip(os.path.sep)),
+ "Upgraded '%s'\n" % (sbox.ospath('A'))]
+
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3898)
+def delete_in_copy_upgrade(sbox):
+ "upgrade a delete within a copy"
+
+ wc_dir = sbox.wc_dir
+ replace_sbox_with_tarfile(sbox, 'delete-in-copy.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'A/B-copied' : Item(status='A ', copied='+', wc_rev='-'),
+ 'A/B-copied/lambda' : Item(status=' ', copied='+', wc_rev='-'),
+ 'A/B-copied/E' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B-copied/E/alpha' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B-copied/E/beta' : Item(status='D ', copied='+', wc_rev='-'),
+ 'A/B-copied/F' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', '-R',
+ sbox.ospath('A/B-copied/E'))
+
+ expected_status.tweak('A/B-copied/E',
+ 'A/B-copied/E/alpha',
+ 'A/B-copied/E/beta',
+ status=' ')
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ simple_checksum_verify([[sbox.ospath('A/B-copied/E/alpha'),
+ 'b347d1da69df9a6a70433ceeaa0d46c8483e8c03']])
+
+
+def replaced_files(sbox):
+ "upgrade with base and working replaced files"
+
+ wc_dir = sbox.wc_dir
+ replace_sbox_with_tarfile(sbox, 'replaced-files.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+
+ # A is a checked-out dir containing A/f and A/g, then
+ # svn cp wc/A wc/B
+ # svn rm wc/A/f wc/B/f
+ # svn cp wc/A/g wc/A/f # A/f replaced by copied A/g
+ # svn cp wc/A/g wc/B/f # B/f replaced by copied A/g (working-only)
+ # svn rm wc/A/g wc/B/g
+ # touch wc/A/g wc/B/g
+ # svn add wc/A/g wc/B/g # A/g replaced, B/g replaced (working-only)
+ # svn ps pX vX wc/A/g
+ # svn ps pY vY wc/B/g
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='5'),
+ 'A' : Item(status=' ', wc_rev='5'),
+ 'A/f' : Item(status='R ', wc_rev='-', copied='+'),
+ 'A/g' : Item(status='RM', wc_rev='5'),
+ 'B' : Item(status='A ', wc_rev='-', copied='+'),
+ 'B/f' : Item(status='R ', wc_rev='-', copied='+'),
+ 'B/g' : Item(status='RM', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+ simple_property_verify(sbox.wc_dir, {
+ 'A/f' : {'pAg' : 'vAg' },
+ 'A/g' : {'pX' : 'vX' },
+ 'B/f' : {'pAg' : 'vAg' },
+ 'B/g' : {'pY' : 'vY' },
+ })
+
+ simple_checksum_verify([
+ [sbox.ospath('A/f'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
+ [sbox.ospath('A/g'), None],
+ [sbox.ospath('B/f'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
+ [sbox.ospath('B/g'), None]])
+
+ svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert',
+ sbox.ospath('A/f'), sbox.ospath('B/f'),
+ sbox.ospath('A/g'), sbox.ospath('B/g'))
+
+ simple_property_verify(sbox.wc_dir, {
+ 'A/f' : {'pAf' : 'vAf' },
+ 'A/g' : {'pAg' : 'vAg' },
+ 'B/f' : {'pAf' : 'vAf' },
+ 'B/g' : {'pAg' : 'vAg' },
+ })
+
+ simple_checksum_verify([
+ [sbox.ospath('A/f'), '958eb2d755df2d9e0de6f7b835aec16b64d83f6f'],
+ [sbox.ospath('A/g'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
+ [sbox.ospath('B/f'), '958eb2d755df2d9e0de6f7b835aec16b64d83f6f'],
+ [sbox.ospath('B/g'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9']])
+
+def upgrade_with_scheduled_change(sbox):
+ "upgrade 1.6.x wc with a scheduled change"
+
+ replace_sbox_with_tarfile(sbox, 'upgrade_with_scheduled_change.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.add({
+ 'A/scheduled_file_1' : Item(status='A ', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3777)
+def tree_replace1(sbox):
+ "upgrade 1.6 with tree replaced"
+
+ replace_sbox_with_tarfile(sbox, 'tree-replace1.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' M', wc_rev=17),
+ 'B' : Item(status='R ', copied='+', wc_rev='-'),
+ 'B/f' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/g' : Item(status='D ', wc_rev=17),
+ 'B/h' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/C' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/C/f' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/D' : Item(status='D ', wc_rev=17),
+ 'B/D/f' : Item(status='D ', wc_rev=17),
+ 'B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E/f' : Item(status=' ', copied='+', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3777)
+def tree_replace2(sbox):
+ "upgrade 1.6 with tree replaced (2)"
+
+ replace_sbox_with_tarfile(sbox, 'tree-replace2.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' M', wc_rev=12),
+ 'B' : Item(status='R ', copied='+', wc_rev='-'),
+ 'B/f' : Item(status='D ', wc_rev=12),
+ 'B/D' : Item(status='D ', wc_rev=12),
+ 'B/g' : Item(status=' ', copied='+', wc_rev='-'),
+ 'B/E' : Item(status=' ', copied='+', wc_rev='-'),
+ 'C' : Item(status='R ', copied='+', wc_rev='-'),
+ 'C/f' : Item(status=' ', copied='+', wc_rev='-'),
+ 'C/D' : Item(status=' ', copied='+', wc_rev='-'),
+ 'C/g' : Item(status='D ', wc_rev=12),
+ 'C/E' : Item(status='D ', wc_rev=12),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3901)
+def depth_exclude(sbox):
+ "upgrade 1.6.x wc that has depth=exclude"
+
+ replace_sbox_with_tarfile(sbox, 'depth_exclude.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status=' ', wc_rev='1'),
+ 'X' : Item(status='A ', copied='+', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3901)
+def depth_exclude_2(sbox):
+ "1.6.x wc that has depth=exclude inside a delete"
+
+ replace_sbox_with_tarfile(sbox, 'depth_exclude_2.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='1'),
+ 'A' : Item(status='D ', wc_rev='1'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3916)
+def add_add_del_del_tc(sbox):
+ "wc with add-add and del-del tree conflicts"
+
+ replace_sbox_with_tarfile(sbox, 'add_add_del_del_tc.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='4'),
+ 'A' : Item(status=' ', wc_rev='4'),
+ 'A/B' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
+ 'X' : Item(status=' ', wc_rev='3'),
+ 'X/Y' : Item(status='! ', treeconflict='C')
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3916)
+def add_add_x2(sbox):
+ "wc with 2 tree conflicts in same entry"
+
+ replace_sbox_with_tarfile(sbox, 'add_add_x2.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev='3'),
+ 'A' : Item(status=' ', wc_rev='3'),
+ 'A/X' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
+ 'A/Y' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(3940)
+def upgrade_with_missing_subdir(sbox):
+ "test upgrading a working copy with missing subdir"
+
+ sbox.build(create_wc = False)
+ replace_sbox_with_tarfile(sbox, 'basic_upgrade.tar.bz2')
+
+ simple_entries_replace(sbox.wc_dir,
+ 'file:///Users/Hyrum/dev/test/greek-1.6.repo',
+ sbox.repo_url)
+
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ 'cafefeed-babe-face-dead-beeff00dfade')
+
+ url = sbox.repo_url
+ wc_dir = sbox.wc_dir
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+
+ # Now remove a subdirectory
+ svntest.main.safe_rmtree(sbox.ospath('A/B'))
+
+ # Now upgrade the working copy and expect a missing subdir
+ expected_output = svntest.verify.UnorderedOutput([
+ "Upgraded '%s'\n" % sbox.wc_dir,
+ "Upgraded '%s'\n" % sbox.ospath('A'),
+ "Skipped '%s'\n" % sbox.ospath('A/B'),
+ "Upgraded '%s'\n" % sbox.ospath('A/C'),
+ "Upgraded '%s'\n" % sbox.ospath('A/D'),
+ "Upgraded '%s'\n" % sbox.ospath('A/D/G'),
+ "Upgraded '%s'\n" % sbox.ospath('A/D/H'),
+ ])
+ svntest.actions.run_and_verify_svn(expected_output, [],
+ 'upgrade', sbox.wc_dir)
+
+ # And now perform an update. (This used to fail with an assertion)
+ expected_output = svntest.wc.State(wc_dir, {
+ 'A/B' : Item(verb='Restored'),
+ 'A/B/E' : Item(status='A '),
+ 'A/B/E/alpha' : Item(status='A '),
+ 'A/B/E/beta' : Item(status='A '),
+ 'A/B/lambda' : Item(status='A '),
+ 'A/B/F' : Item(status='A '),
+ })
+
+ expected_disk = svntest.main.greek_state.copy()
+ expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
+
+ # Do the update and check the results in three ways.
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+@Issue(3994)
+def upgrade_locked(sbox):
+ "upgrade working copy with locked files"
+
+ replace_sbox_with_tarfile(sbox, 'upgrade_locked.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' ', wc_rev=1),
+ 'A' : Item(status='D ', wc_rev=2),
+ 'A/third' : Item(status='D ', writelocked='K', wc_rev=2),
+ 'other' : Item(status='D ', writelocked='K', wc_rev=4),
+ 'iota' : Item(status=' ', writelocked='K', wc_rev=3),
+ })
+
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+@Issue(4015)
+def upgrade_file_externals(sbox):
+ "upgrade with file externals"
+
+ sbox.build()
+ replace_sbox_with_tarfile(sbox, 'upgrade_file_externals.tar.bz2')
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ '07146bbd-0b64-4aaf-ab70-cd76a0df2d41')
+
+ expected_output = svntest.verify.RegexOutput(b'r2 committed.*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-m', 'r2',
+ 'propset', 'svn:externals',
+ '^/A/B/E EX\n^/A/mu muX',
+ sbox.repo_url + '/A/B/F')
+
+ expected_output = svntest.verify.RegexOutput(b'r3 committed.*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-m', 'r3',
+ 'propset', 'svn:externals',
+ '^/A/B/F FX\n^/A/B/lambda lambdaX',
+ sbox.repo_url + '/A/C')
+
+ expected_output = svntest.verify.RegexOutput(b'r4 committed.*')
+ svntest.actions.run_and_verify_svnmucc(expected_output, [],
+ '-m', 'r4',
+ 'propset', 'pname1', 'pvalue1',
+ sbox.repo_url + '/A/mu',
+ 'propset', 'pname2', 'pvalue2',
+ sbox.repo_url + '/A/B/lambda',
+ 'propset', 'pname3', 'pvalue3',
+ sbox.repo_url + '/A/B/E/alpha')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'file:///tmp/repo', sbox.repo_url,
+ sbox.wc_dir)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/mu' : Item(status=' U'),
+ 'A/B/lambda' : Item(status=' U'),
+ 'A/B/E/alpha' : Item(status=' U'),
+ 'A/C/FX/EX/alpha' : Item(status=' U'),
+ 'A/C/FX/muX' : Item(status=' U'),
+ 'A/C/lambdaX' : Item(status=' U'),
+ 'A/B/F/EX/alpha' : Item(status=' U'),
+ 'A/B/F/muX' : Item(status=' U'),
+ })
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
+ None, None)
+
+ ### simple_property_verify only sees last line of multi-line
+ ### property values such as svn:externals
+ simple_property_verify(sbox.wc_dir, {
+ 'A/mu' : {'pname1' : 'pvalue1' },
+ 'A/B/lambda' : {'pname2' : 'pvalue2' },
+ 'A/B/E/alpha' : {'pname3' : 'pvalue3' },
+ 'A/B/F' : {'svn:externals' : '^/A/mu muX'},
+ 'A/C' : {'svn:externals' : '^/A/B/lambda lambdaX'},
+ 'A/B/F/muX' : {'pname1' : 'pvalue1' },
+ 'A/C/lambdaX' : {'pname2' : 'pvalue2' },
+ })
+
+ simple_property_verify(sbox.ospath('A/C/FX'), {
+ '' : {'svn:externals' : '^/A/mu muX'},
+ 'muX' : {'pname1' : 'pvalue1' },
+ })
+
+ simple_property_verify(sbox.ospath('A/C/FX/EX'), {
+ 'alpha' : {'pname3' : 'pvalue3' },
+ })
+
+@Issue(4035)
+def upgrade_missing_replaced(sbox):
+ "upgrade with missing replaced dir"
+
+ sbox.build(create_wc=False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_missing_replaced.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ 'd7130b12-92f6-45c9-9217-b9f0472c3fab')
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'file:///tmp/repo', sbox.repo_url,
+ sbox.wc_dir)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C',
+ prev_verb='Restored'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='A'),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/B/E', status='! ', treeconflict='C', wc_rev='-',
+ entry_status='R ', entry_rev='1')
+ expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ')
+
+ # This upgrade installs an INCOMPLETE node in WORKING for E, which makes the
+ # database technically invalid... but we did that for 1.7 and nobody noticed.
+
+ # Pass the old status tree to avoid testing via entries-dump
+ # as fetching the entries crashes on the invalid db state.
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
+ None, expected_status)
+
+ svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', '-R',
+ sbox.wc_dir)
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ # And verify that the state is now valid in both the entries an status world.
+ svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
+
+@Issue(4033)
+def upgrade_not_present_replaced(sbox):
+ "upgrade with not-present replaced nodes"
+
+ sbox.build(create_wc=False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_not_present_replaced.tar.bz2')
+
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ 'd7130b12-92f6-45c9-9217-b9f0472c3fab')
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'file:///tmp/repo', sbox.repo_url,
+ sbox.wc_dir)
+
+ expected_output = svntest.wc.State(sbox.wc_dir, {
+ 'A/B/E' : Item(status=' ', treeconflict='C'),
+ 'A/B/E/beta' : Item(status=' ', treeconflict='A'),
+ 'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
+ 'A/B/lambda' : Item(status=' ', treeconflict='C'),
+ })
+ expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ expected_status.tweak('A/B/E', status='R ', treeconflict='C'),
+ expected_status.tweak('A/B/E/beta', status='D '),
+ expected_status.tweak('A/B/E/alpha', status='D '),
+ expected_status.tweak('A/B/lambda', status='R ', treeconflict='C'),
+
+ svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
+ None, expected_status)
+
+@Issue(4307)
+def upgrade_from_1_7_conflict(sbox):
+ "upgrade from 1.7 WC with conflict (format 29)"
+
+ sbox.build(create_wc=False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_from_1_7_wc.tar.bz2')
+
+ # The working copy contains a text conflict, and upgrading such
+ # a working copy used to cause a pointless 'upgrade required' error.
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+def do_iprops_upgrade(nonrootfile, rootfile, sbox):
+
+ wc_dir = sbox.wc_dir
+
+ replace_sbox_with_tarfile(sbox, nonrootfile)
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'file:///tmp/repo', sbox.repo_url, wc_dir)
+
+ expected_output = []
+ expected_disk = svntest.wc.State('', {
+ 'E' : Item(),
+ 'E/alpha' : Item(contents="This is the file 'alpha'.\n"),
+ 'E/beta' : Item(contents="This is the file 'beta'.\n"),
+ 'F' : Item(),
+ 'lambda' : Item(contents="This is the file 'lambda'.\n"),
+ })
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(),
+ 'E' : Item(switched='S'),
+ 'E/alpha' : Item(),
+ 'E/beta' : Item(),
+ 'F' : Item(),
+ 'lambda' : Item(),
+ })
+ expected_status.tweak(status=' ', wc_rev=2)
+
+ # No inherited props after upgrade until an update
+ expected_iprops = {}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ wc_dir, expected_iprops, expected_explicit_props)
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('E'), expected_iprops, expected_explicit_props)
+
+ # Update populates the inherited props
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ expected_iprops = {sbox.repo_url : {'p' : 'v'},
+ sbox.repo_url + '/A' : {'pA' : 'vA'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ wc_dir, expected_iprops, expected_explicit_props)
+
+ expected_iprops = {sbox.repo_url : {'p' : 'v'},
+ sbox.repo_url + '/X' : {'pX' : 'vX'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('E'), expected_iprops, expected_explicit_props)
+
+ # Now try with a repository root working copy
+ replace_sbox_with_tarfile(sbox, rootfile)
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+ svntest.actions.run_and_verify_svn(None, [], 'relocate',
+ 'file:///tmp/repo', sbox.repo_url, wc_dir)
+
+ # Unswitched inherited props available after upgrade
+ expected_iprops = {wc_dir : {'p' : 'v'},
+ sbox.ospath('A') : {'pA' : 'vA'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B'), expected_iprops, expected_explicit_props)
+
+ # Switched inherited props not populated until update after upgrade
+ expected_iprops = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/E'), expected_iprops, expected_explicit_props)
+
+ expected_disk = svntest.wc.State('', {
+ 'A' : Item(),
+ 'A/B' : Item(),
+ 'A/B/E' : Item(),
+ })
+ expected_status = svntest.wc.State(sbox.wc_dir, {
+ '' : Item(),
+ 'A' : Item(),
+ 'A/B' : Item(),
+ 'A/B/E' : Item(switched='S'),
+ })
+ expected_status.tweak(status=' ', wc_rev=2)
+ svntest.actions.run_and_verify_update(wc_dir,
+ expected_output,
+ expected_disk,
+ expected_status)
+
+ expected_iprops = {wc_dir : {'p' : 'v'},
+ sbox.ospath('A') : {'pA' : 'vA'}}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B'), expected_iprops, expected_explicit_props)
+
+ expected_iprops = {sbox.repo_url : {'p' : 'v'},
+ sbox.repo_url + '/X' : {'pX' : 'vX'}}
+ expected_explicit_props = {}
+ svntest.actions.run_and_verify_inherited_prop_xml(
+ sbox.ospath('A/B/E'), expected_iprops, expected_explicit_props)
+
+def iprops_upgrade(sbox):
+ "inherited properties after upgrade from 1.7"
+
+ sbox.build()
+
+ sbox.simple_copy('A', 'X')
+ sbox.simple_propset('p', 'v', '')
+ sbox.simple_propset('pA', 'vA', 'A')
+ sbox.simple_propset('pX', 'vX', 'X')
+ sbox.simple_commit()
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ '8f4d0ebe-2ebf-4f62-ad11-804fd88c2382')
+
+ do_iprops_upgrade('iprops_upgrade_nonroot.tar.bz2',
+ 'iprops_upgrade_root.tar.bz2',
+ sbox)
+
+def iprops_upgrade1_6(sbox):
+ "inherited properties after upgrade from 1.6"
+
+ sbox.build()
+
+ sbox.simple_copy('A', 'X')
+ sbox.simple_propset('p', 'v', '')
+ sbox.simple_propset('pA', 'vA', 'A')
+ sbox.simple_propset('pX', 'vX', 'X')
+ sbox.simple_commit()
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ '8f4d0ebe-2ebf-4f62-ad11-804fd88c2382')
+
+ do_iprops_upgrade('iprops_upgrade_nonroot1_6.tar.bz2',
+ 'iprops_upgrade_root1_6.tar.bz2',
+ sbox)
+
+def changelist_upgrade_1_6(sbox):
+ "upgrade from 1.6 with changelist"
+
+ sbox.build(create_wc = False)
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ 'aa4c97bd-2e1a-4e55-a1e5-3db22cff2673')
+ replace_sbox_with_tarfile(sbox, 'changelist_upgrade_1_6.tar.bz2')
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+ exit_code, output, errput = svntest.main.run_svn(None, 'info', sbox.wc_dir,
+ '--depth', 'infinity',
+ '--changelist', 'foo')
+ paths = [x for x in output if x[:6] == 'Path: ']
+ expected_paths = ['Path: %s\n' % sbox.ospath('A/D/gamma')]
+ if paths != expected_paths:
+ raise svntest.Failure("changelist not matched")
+
+
+def upgrade_1_7_dir_external(sbox):
+ "upgrade from 1.7 with dir external"
+
+ sbox.build(create_wc = False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_1_7_dir_external.tar.bz2')
+
+ # This fails for 'make check EXCLUSIVE_WC_LOCKS=1' giving an error:
+ # svn: warning: W200033: sqlite[S5]: database is locked
+ svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir)
+
+@SkipUnless(svntest.wc.python_sqlite_can_read_wc)
+def auto_analyze(sbox):
+ """automatic SQLite ANALYZE"""
+
+ sbox.build(create_wc = False)
+
+ replace_sbox_with_tarfile(sbox, 'wc-without-stat1.tar.bz2')
+ svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
+ '52ec7e4b-e5f0-451d-829f-f05d5571b4ab')
+
+ # Don't use svn to do relocate as that will add the table.
+ svntest.wc.sqlite_exec(sbox.wc_dir,
+ "update repository "
+ "set root ='" + sbox.repo_url + "'")
+ val = svntest.wc.sqlite_stmt(sbox.wc_dir,
+ "select 1 from sqlite_master "
+ "where name = 'sqlite_stat1'")
+ if val != []:
+ raise svntest.Failure("initial state failed")
+
+ # Make working copy read-only (but not wc_dir itself as
+ # svntest.main.chmod_tree will not reset it.)
+ for path, subdirs, files in os.walk(sbox.wc_dir):
+ for d in subdirs:
+ os.chmod(os.path.join(path, d), svntest.main.S_ALL_RX)
+ for f in files:
+ os.chmod(os.path.join(path, f), svntest.main.S_ALL_READ)
+
+ state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ svntest.actions.run_and_verify_status(sbox.wc_dir, state)
+
+ svntest.main.chmod_tree(sbox.wc_dir, svntest.main.S_ALL_RW,
+ stat.S_IWGRP | stat.S_IWOTH)
+
+ state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ svntest.actions.run_and_verify_status(sbox.wc_dir, state)
+
+ val = svntest.wc.sqlite_stmt(sbox.wc_dir,
+ "select 1 from sqlite_master "
+ "where name = 'sqlite_stat1'")
+ if val != [(1,)]:
+ raise svntest.Failure("analyze failed")
+
+def upgrade_1_0_with_externals(sbox):
+ "test upgrading 1.0.0 working copy with externals"
+
+ sbox.build(create_wc = False)
+ replace_sbox_with_tarfile(sbox, 'upgrade_1_0_with_externals.tar.bz2')
+
+ url = sbox.repo_url
+
+ # This is non-canonical by the rules of svn_uri_canonicalize, it gets
+ # written into the entries file and upgrade has to canonicalize.
+ non_canonical_url = url[:-1] + '%%%02x' % ord(url[-1])
+ xml_entries_relocate(sbox.wc_dir, 'file:///1.0.0/repos', non_canonical_url)
+
+ externals_propval = 'exdir_G ' + sbox.repo_url + '/A/D/G' + '\n'
+ adm_name = svntest.main.get_admin_name()
+ dir_props_file = os.path.join(sbox.wc_dir, adm_name, 'dir-props')
+ svntest.main.file_write(dir_props_file,
+ ('K 13\n'
+ 'svn:externals\n'
+ 'V %d\n' % len(externals_propval))
+ + externals_propval + '\nEND\n', 'wb')
+
+ # Attempt to use the working copy, this should give an error
+ expected_stderr = wc_is_too_old_regex
+ svntest.actions.run_and_verify_svn(None, expected_stderr,
+ 'info', sbox.wc_dir)
+
+
+ # Now upgrade the working copy
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade', sbox.wc_dir)
+ # And the separate working copy below COPIED or check_format() fails
+ svntest.actions.run_and_verify_svn(None, [],
+ 'upgrade',
+ os.path.join(sbox.wc_dir, 'COPIED', 'G'))
+
+ # Actually check the format number of the upgraded working copy
+ check_format(sbox, get_current_format())
+
+ # Now check the contents of the working copy
+ # #### This working copy is not just a basic tree,
+ # fix with the right data once we get here
+ expected_status = svntest.wc.State(sbox.wc_dir,
+ {
+ '' : Item(status=' M', wc_rev=7),
+ 'B' : Item(status=' ', wc_rev='7'),
+ 'B/mu' : Item(status=' ', wc_rev='7'),
+ 'B/D' : Item(status=' ', wc_rev='7'),
+ 'B/D/H' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/psi' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/omega' : Item(status=' ', wc_rev='7'),
+ 'B/D/H/zeta' : Item(status='MM', wc_rev='7'),
+ 'B/D/H/chi' : Item(status=' ', wc_rev='7'),
+ 'B/D/gamma' : Item(status=' ', wc_rev='9'),
+ 'B/D/G' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/tau' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/rho' : Item(status=' ', wc_rev='7'),
+ 'B/D/G/pi' : Item(status=' ', wc_rev='7'),
+ 'B/B' : Item(status=' ', wc_rev='7'),
+ 'B/B/lambda' : Item(status=' ', wc_rev='7'),
+ 'MKDIR' : Item(status='A ', wc_rev='0'),
+ 'MKDIR/MKDIR' : Item(status='A ', wc_rev='0'),
+ 'A' : Item(status=' ', wc_rev='7'),
+ 'A/B' : Item(status=' ', wc_rev='7'),
+ 'A/B/lambda' : Item(status=' ', wc_rev='7'),
+ 'A/D' : Item(status=' ', wc_rev='7'),
+ 'A/D/G' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/rho' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/pi' : Item(status=' ', wc_rev='7'),
+ 'A/D/G/tau' : Item(status=' ', wc_rev='7'),
+ 'A/D/H' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/psi' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/omega' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/zeta' : Item(status=' ', wc_rev='7'),
+ 'A/D/H/chi' : Item(status=' ', wc_rev='7'),
+ 'A/D/gamma' : Item(status=' ', wc_rev='7'),
+ 'A/mu' : Item(status=' ', wc_rev='7'),
+ 'iota' : Item(status=' ', wc_rev='7'),
+ 'COPIED' : Item(status=' ', wc_rev='10'),
+ 'DELETED' : Item(status='D ', wc_rev='10'),
+ 'exdir_G' : Item(status='X '),
+ })
+ run_and_verify_status_no_server(sbox.wc_dir, expected_status)
+
+########################################################################
+# Run the tests
+
+ # prop states
+ #
+ # .base simple checkout
+ # .base, .revert delete, copy-here
+ # .working add, propset
+ # .base, .working checkout, propset
+ # .base, .revert, .working delete, copy-here, propset
+ # .revert, .working delete, add, propset
+ # .revert delete, add
+ #
+ # 1.3.x (f4)
+ # 1.4.0 (f8, buggy)
+ # 1.4.6 (f8, fixed)
+
+# list all tests here, starting with None:
+test_list = [ None,
+ basic_upgrade,
+ upgrade_with_externals,
+ upgrade_1_5,
+ update_1_5,
+ logs_left_1_5,
+ upgrade_wcprops,
+ basic_upgrade_1_0,
+ # Upgrading from 1.4.0-1.4.5 with specific states fails
+ # See issue #2530
+ x3_1_4_0,
+ x3_1_4_6,
+ x3_1_6_12,
+ missing_dirs,
+ missing_dirs2,
+ delete_and_keep_local,
+ dirs_only_upgrade,
+ delete_in_copy_upgrade,
+ replaced_files,
+ upgrade_with_scheduled_change,
+ tree_replace1,
+ tree_replace2,
+ depth_exclude,
+ depth_exclude_2,
+ add_add_del_del_tc,
+ add_add_x2,
+ upgrade_with_missing_subdir,
+ upgrade_locked,
+ upgrade_file_externals,
+ upgrade_missing_replaced,
+ upgrade_not_present_replaced,
+ upgrade_from_1_7_conflict,
+ iprops_upgrade,
+ iprops_upgrade1_6,
+ changelist_upgrade_1_6,
+ upgrade_1_7_dir_external,
+ auto_analyze,
+ upgrade_1_0_with_externals,
+ ]
+
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
diff --git a/subversion/tests/cmdline/upgrade_tests_data/README b/subversion/tests/cmdline/upgrade_tests_data/README
new file mode 100644
index 0000000..60a0db4
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/README
@@ -0,0 +1,37 @@
+OVERVIEW
+
+This directory contains working copies (and other data) used to
+test Subversion's working copy ugrade process. The files are
+typically stored as a .tar.bz2 in order to hide their "funny"
+format data from Subversion's normal administrative processes.
+
+
+=============================================================================
+
+FORMAT_*
+
+The format_*.tar.bz2 data files each represent a checkout of
+^/trunk/notes/wc-ng at approximately revision 917840. These
+checkouts were performed using specific revisions of Subversion
+in order to produce a .svn administrative area with the required
+format.
+
+SVN_WC__VERSION Subversion revision used
+--------------- ------------------------
+12 (a) 877576
+12 (b) 879401
+13 879813
+14 880116
+15 886350
+16 917838
+
+There are two "format 12" working copies:
+ (a) the wcprop values are stored in .svn/all-wcprops
+ (b) the wcprop values are stored in BASE_NODE.dav_cache
+
+You'll note that ($revision + 1) is the revision where the
+format was bumped, or the wcprops were moved into the
+database. Therefore, these working copies represent the last
+revision of Subversion to use the specific format value.
+
+=============================================================================
diff --git a/subversion/tests/cmdline/upgrade_tests_data/add_add_del_del_tc.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/add_add_del_del_tc.tar.bz2
new file mode 100644
index 0000000..967baac
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/add_add_del_del_tc.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/add_add_x2.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/add_add_x2.tar.bz2
new file mode 100644
index 0000000..e8e7ac4
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/add_add_x2.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/basic_upgrade.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/basic_upgrade.tar.bz2
new file mode 100644
index 0000000..a71a474
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/basic_upgrade.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/changelist_upgrade_1_6.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/changelist_upgrade_1_6.tar.bz2
new file mode 100644
index 0000000..78d3a66
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/changelist_upgrade_1_6.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2
new file mode 100644
index 0000000..2dbf82b
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/delete-in-copy.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/depth_exclude.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/depth_exclude.tar.bz2
new file mode 100644
index 0000000..e11bd89
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/depth_exclude.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/depth_exclude_2.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/depth_exclude_2.tar.bz2
new file mode 100644
index 0000000..965c3a1
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/depth_exclude_2.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/dirs-only.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/dirs-only.tar.bz2
new file mode 100644
index 0000000..64db319
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/dirs-only.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot.tar.bz2
new file mode 100644
index 0000000..2e47aaa
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot1_6.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot1_6.tar.bz2
new file mode 100644
index 0000000..820ca5c
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_nonroot1_6.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root.tar.bz2
new file mode 100644
index 0000000..11df646
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root1_6.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root1_6.tar.bz2
new file mode 100644
index 0000000..fbb363a
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/iprops_upgrade_root1_6.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/logs_left_1_5.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/logs_left_1_5.tar.bz2
new file mode 100644
index 0000000..9035dcc
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/logs_left_1_5.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/missing-dirs.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/missing-dirs.tar.bz2
new file mode 100644
index 0000000..e758643
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/missing-dirs.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/replaced-files.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/replaced-files.tar.bz2
new file mode 100644
index 0000000..bbcb87a
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/replaced-files.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/tree-replace1.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/tree-replace1.tar.bz2
new file mode 100644
index 0000000..51024ce
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/tree-replace1.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/tree-replace2.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/tree-replace2.tar.bz2
new file mode 100644
index 0000000..82692a9
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/tree-replace2.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0.tar.bz2
new file mode 100644
index 0000000..dcf46b8
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2
new file mode 100644
index 0000000..c3d8da5
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_5.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_5.tar.bz2
new file mode 100644
index 0000000..61706ad
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_5.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_7_dir_external.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_7_dir_external.tar.bz2
new file mode 100644
index 0000000..b9f3f9a
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_7_dir_external.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent.tar.bz2
new file mode 100644
index 0000000..e77896d
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent_repos.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent_repos.tar.bz2
new file mode 100644
index 0000000..16b79e3
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_absent_repos.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_file_externals.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_file_externals.tar.bz2
new file mode 100644
index 0000000..66fd6cb
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_file_externals.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_from_1_7_wc.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_from_1_7_wc.tar.bz2
new file mode 100644
index 0000000..b2d385b
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_from_1_7_wc.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_locked.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_locked.tar.bz2
new file mode 100644
index 0000000..1f0783a
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_locked.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_missing_replaced.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_missing_replaced.tar.bz2
new file mode 100644
index 0000000..a63e6b3
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_missing_replaced.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_not_present_replaced.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_not_present_replaced.tar.bz2
new file mode 100644
index 0000000..bb14297
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_not_present_replaced.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_wcprops.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_wcprops.tar.bz2
new file mode 100644
index 0000000..53eae69
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_wcprops.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_externals.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_externals.tar.bz2
new file mode 100644
index 0000000..78c0192
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_externals.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_scheduled_change.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_scheduled_change.tar.bz2
new file mode 100644
index 0000000..d978843
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_with_scheduled_change.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.0.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.0.tar.bz2
new file mode 100644
index 0000000..ba360d7
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.0.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.6.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.6.tar.bz2
new file mode 100644
index 0000000..8782eff
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.4.6.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.6.12.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.6.12.tar.bz2
new file mode 100644
index 0000000..03ec7f5
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/wc-3x-1.6.12.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/wc-delete.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/wc-delete.tar.bz2
new file mode 100644
index 0000000..2ed2388
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/wc-delete.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/upgrade_tests_data/wc-without-stat1.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/wc-without-stat1.tar.bz2
new file mode 100644
index 0000000..56271cd
--- /dev/null
+++ b/subversion/tests/cmdline/upgrade_tests_data/wc-without-stat1.tar.bz2
Binary files differ
diff --git a/subversion/tests/cmdline/wc_tests.py b/subversion/tests/cmdline/wc_tests.py
new file mode 100755
index 0000000..490e981
--- /dev/null
+++ b/subversion/tests/cmdline/wc_tests.py
@@ -0,0 +1,394 @@
+#!/usr/bin/env python
+#
+# wc_tests.py: testing working-copy operations
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+from __future__ import with_statement
+import shutil, stat, re, os, logging
+
+logger = logging.getLogger()
+
+# Our testing module
+import svntest
+from svntest import wc
+
+# (abbreviation)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+Item = wc.StateItem
+UnorderedOutput = svntest.verify.UnorderedOutput
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def status_through_unversioned_symlink(sbox):
+ """file status through unversioned symlink"""
+
+ sbox.build(read_only = True)
+ state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ os.symlink('A', sbox.ospath('Z'))
+ svntest.actions.run_and_verify_status(sbox.ospath('Z/mu'), state)
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def status_through_versioned_symlink(sbox):
+ """file status through versioned symlink"""
+
+ sbox.build(read_only = True)
+ state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ os.symlink('A', sbox.ospath('Z'))
+ sbox.simple_add('Z')
+ state.add({'Z': Item(status='A ')})
+ svntest.actions.run_and_verify_status(sbox.ospath('Z/mu'), state)
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def status_with_symlink_in_path(sbox):
+ """file status with not-parent symlink"""
+
+ sbox.build(read_only = True)
+ state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
+ os.symlink('A', sbox.ospath('Z'))
+ svntest.actions.run_and_verify_status(sbox.ospath('Z/B/lambda'), state)
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def add_through_unversioned_symlink(sbox):
+ """add file through unversioned symlink"""
+
+ sbox.build(read_only = True)
+ os.symlink('A', sbox.ospath('Z'))
+ sbox.simple_append('A/kappa', 'xyz', True)
+ sbox.simple_add('Z/kappa')
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def add_through_versioned_symlink(sbox):
+ """add file through versioned symlink"""
+
+ sbox.build(read_only = True)
+ os.symlink('A', sbox.ospath('Z'))
+ sbox.simple_add('Z')
+ sbox.simple_append('A/kappa', 'xyz', True)
+ sbox.simple_add('Z/kappa')
+
+@XFail()
+@Issue(4193)
+@SkipUnless(svntest.main.is_posix_os)
+def add_with_symlink_in_path(sbox):
+ """add file with not-parent symlink"""
+
+ sbox.build(read_only = True)
+ os.symlink('A', sbox.ospath('Z'))
+ sbox.simple_append('A/B/kappa', 'xyz', True)
+ sbox.simple_add('Z/B/kappa')
+
+def is_posix_os_and_not_root():
+ if not svntest.main.is_posix_os():
+ return False
+ return os.getuid() != 0
+
+@Issue(4118)
+@SkipUnless(is_posix_os_and_not_root)
+def status_with_inaccessible_wc_db(sbox):
+ """inaccessible .svn/wc.db"""
+
+ sbox.build(read_only = True)
+ os.chmod(sbox.ospath(".svn/wc.db"), 0)
+ svntest.actions.run_and_verify_svn(
+ None,
+ r"[^ ]+ E155016: The working copy database at '.*' is corrupt",
+ "st", sbox.wc_dir)
+
+@Issue(4118)
+def status_with_corrupt_wc_db(sbox):
+ """corrupt .svn/wc.db"""
+
+ sbox.build(read_only = True)
+ with open(sbox.ospath(".svn/wc.db"), 'wb') as fd:
+ fd.write(b'\0' * 17)
+ svntest.actions.run_and_verify_svn(
+ None,
+ r"[^ ]+ E155016: The working copy database at '.*' is corrupt",
+ "st", sbox.wc_dir)
+
+@Issue(4118)
+def status_with_zero_length_wc_db(sbox):
+ """zero-length .svn/wc.db"""
+
+ sbox.build(read_only = True)
+ os.close(os.open(sbox.ospath(".svn/wc.db"), os.O_RDWR | os.O_TRUNC))
+ svntest.actions.run_and_verify_svn(
+ None,
+ r"[^ ]+ E200030:", # SVN_ERR_SQLITE_ERROR
+ "st", sbox.wc_dir)
+
+@Issue(4118)
+def status_without_wc_db(sbox):
+ """missing .svn/wc.db"""
+
+ sbox.build(read_only = True)
+ os.remove(sbox.ospath(".svn/wc.db"))
+ svntest.actions.run_and_verify_svn(
+ None,
+ r"[^ ]+ E155016: The working copy database at '.*' is missing",
+ "st", sbox.wc_dir)
+
+@Issue(4118)
+@Skip() # FIXME: Test fails in-tree because it finds the source WC root
+def status_without_wc_db_and_entries(sbox):
+ """missing .svn/wc.db and .svn/entries"""
+
+ sbox.build(read_only = True)
+ os.remove(sbox.ospath(".svn/wc.db"))
+ os.remove(sbox.ospath(".svn/entries"))
+ svntest.actions.run_and_verify_svn2(
+ None,
+ r"[^ ]+ warning: W155007: '.*' is not a working copy",
+ 0, "st", sbox.wc_dir)
+
+@Issue(4118)
+def status_with_missing_wc_db_and_maybe_valid_entries(sbox):
+ """missing .svn/wc.db, maybe valid .svn/entries"""
+
+ sbox.build(read_only = True)
+ with open(sbox.ospath(".svn/entries"), 'ab') as fd:
+ fd.write(b'something\n')
+ os.remove(sbox.ospath(".svn/wc.db"))
+ svntest.actions.run_and_verify_svn(
+ None,
+ r"[^ ]+ E155036:", # SVN_ERR_WC_UPGRADE_REQUIRED
+ "st", sbox.wc_dir)
+
+
+@Issue(4267)
+def cleanup_below_wc_root(sbox):
+ """cleanup from directory below WC root"""
+
+ sbox.build(read_only = True)
+ svntest.actions.lock_admin_dir(sbox.ospath(""), True)
+ svntest.actions.run_and_verify_svn(None, [],
+ "cleanup", sbox.ospath("A"))
+
+@SkipUnless(svntest.main.is_posix_os)
+@Issue(4383)
+def update_through_unversioned_symlink(sbox):
+ """update through unversioned symlink"""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+ state = svntest.actions.get_virginal_state(wc_dir, 1)
+ symlink = sbox.get_tempname()
+ os.symlink(os.path.abspath(sbox.wc_dir), symlink)
+ expected_output = []
+ expected_disk = []
+ expected_status = []
+ # Subversion 1.8.0 crashes when updating a working copy through a symlink
+ svntest.actions.run_and_verify_update(wc_dir, expected_output,
+ expected_disk, expected_status,
+ [], True, symlink)
+
+@Issue(3549)
+def cleanup_unversioned_items(sbox):
+ """cleanup --remove-unversioned / --remove-ignored"""
+
+ sbox.build(read_only = True)
+ wc_dir = sbox.wc_dir
+
+ # create some unversioned items
+ os.mkdir(sbox.ospath('dir1'))
+ os.mkdir(sbox.ospath('dir2'))
+ contents = "This is an unversioned file\n."
+ svntest.main.file_write(sbox.ospath('dir1/dir1_child1'), contents)
+ svntest.main.file_write(sbox.ospath('dir2/dir2_child1'), contents)
+ os.mkdir(sbox.ospath('dir2/foo_child2'))
+ svntest.main.file_write(sbox.ospath('file_foo'), contents),
+ os.mkdir(sbox.ospath('dir_foo'))
+ svntest.main.file_write(sbox.ospath('dir_foo/foo_child1'), contents)
+ os.mkdir(sbox.ospath('dir_foo/foo_child2'))
+ # a file that matches a default ignore pattern
+ svntest.main.file_write(sbox.ospath('foo.o'), contents)
+
+ # ignore some of the unversioned items
+ sbox.simple_propset('svn:ignore', '*_foo', '.')
+
+ os.chdir(wc_dir)
+
+ expected_output = [
+ ' M .\n',
+ '? dir1\n',
+ '? dir2\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status')
+ expected_output += [
+ 'I dir_foo\n',
+ 'I file_foo\n',
+ 'I foo.o\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status', '--no-ignore')
+
+ expected_output = [
+ 'D dir1\n',
+ 'D dir2\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'cleanup', '--remove-unversioned')
+ expected_output = [
+ ' M .\n',
+ 'I dir_foo\n',
+ 'I file_foo\n',
+ 'I foo.o\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status', '--no-ignore')
+
+ # remove ignored items, with an empty global-ignores list
+ expected_output = [
+ 'D dir_foo\n',
+ 'D file_foo\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'cleanup', '--remove-ignored',
+ '--config-option',
+ 'config:miscellany:global-ignores=')
+
+ # the file matching global-ignores should still be present
+ expected_output = [
+ ' M .\n',
+ 'I foo.o\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'status', '--no-ignore')
+
+ # un-ignore the file matching global ignores, making it unversioned,
+ # and remove it with --remove-unversioned
+ expected_output = [
+ 'D foo.o\n',
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], 'cleanup', '--remove-unversioned',
+ '--config-option',
+ 'config:miscellany:global-ignores=')
+ expected_output = [
+ ' M .\n',
+ ]
+ svntest.actions.run_and_verify_svn(expected_output,
+ [], 'status', '--no-ignore')
+
+def cleanup_unversioned_items_in_locked_wc(sbox):
+ """cleanup unversioned items in locked WC should fail"""
+
+ sbox.build(read_only = True)
+
+ contents = "This is an unversioned file\n."
+ svntest.main.file_write(sbox.ospath('unversioned_file'), contents)
+
+ svntest.actions.lock_admin_dir(sbox.ospath(""), True)
+ for option in ['--remove-unversioned', '--remove-ignored']:
+ svntest.actions.run_and_verify_svn(None,
+ "svn: E155004: Working copy locked;.*",
+ "cleanup", option,
+ sbox.ospath(""))
+
+def cleanup_dir_external(sbox):
+ """cleanup --include-externals"""
+
+ sbox.build(read_only = True)
+
+ # configure a directory external
+ sbox.simple_propset("svn:externals", "^/A A_ext", ".")
+ sbox.simple_update()
+
+ svntest.actions.lock_admin_dir(sbox.ospath("A_ext"), True)
+ svntest.actions.run_and_verify_svn(["Performing cleanup on external " +
+ "item at '%s'.\n" % sbox.ospath("A_ext")],
+ [], "cleanup", '--include-externals',
+ sbox.ospath(""))
+
+@Issue(4390)
+def checkout_within_locked_wc(sbox):
+ """checkout within a locked working copy"""
+
+ sbox.build(read_only = True)
+
+ # lock working copy and create outstanding work queue items
+ svntest.actions.lock_admin_dir(sbox.ospath(""), True, True)
+ expected_output = [
+ "A %s\n" % sbox.ospath("nested-wc/alpha"),
+ "A %s\n" % sbox.ospath("nested-wc/beta"),
+ "Checked out revision 1.\n"
+ ]
+ svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output),
+ [], "checkout", sbox.repo_url + '/A/B/E',
+ sbox.ospath("nested-wc"))
+
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ status_through_unversioned_symlink,
+ status_through_versioned_symlink,
+ status_with_symlink_in_path,
+ add_through_unversioned_symlink,
+ add_through_versioned_symlink,
+ add_with_symlink_in_path,
+ status_with_inaccessible_wc_db,
+ status_with_corrupt_wc_db,
+ status_with_zero_length_wc_db,
+ status_without_wc_db,
+ status_without_wc_db_and_entries,
+ status_with_missing_wc_db_and_maybe_valid_entries,
+ cleanup_below_wc_root,
+ update_through_unversioned_symlink,
+ cleanup_unversioned_items,
+ cleanup_unversioned_items_in_locked_wc,
+ cleanup_dir_external,
+ checkout_within_locked_wc,
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/subversion/tests/diacritical.txt b/subversion/tests/diacritical.txt
new file mode 100644
index 0000000..8e6fb0e
--- /dev/null
+++ b/subversion/tests/diacritical.txt
@@ -0,0 +1,41 @@
+-*- coding: utf-8 -*-
+
+This is the source of the test data used by the normalized unicode
+string comparison tests.
+
+
+Whole word: Ṩůḇṽá¸È‘šḯá»á¹‹
+
+Individual letters:
+
+char name NFC UCS-4 NFC UTF-8 NFD UCS-4 NFD UTF-8
+
+Ṩ S with dot above and below \u1E68 \xe1\xb9\xa8 S\u0323\u0307 S\xcc\xa3\xcc\x87
+ů u with ring \u016F \xc5\xaf u\u030A u\xcc\x8a
+ḇ b with macron below \u1E07 \xe1\xb8\x87 b\u0331 b\xcc\xb1
+á¹½ v with tilde \u1E7D \xe1\xb9\xbd v\u0303 v\xcc\x83
+Ḡe with breve and cedilla \u1E1D \xe1\xb8\x9d e\u0327\u0306 e\xcc\xa7\xcc\x86
+È‘ r with double grave \u0211 \xc8\x91 r\u030F r\xcc\x8f
+Å¡ s with caron \u0161 \xc5\xa1 s\u030C s\xcc\x8c
+ḯ i with diaeresis and acute \u1E2F \xe1\xb8\xaf i\u0308\u0301 i\xcc\x88\xcc\x81
+á» o with grave and hook \u1EDD \xe1\xbb\x9d o\u031B\u0300 o\xcc\x9b\xcc\x80
+ṋ n with circumflex below \u1E4B \xe1\xb9\x8b n\u032D n\xcc\xad
+
+Combining diacriticals:
+
+char name UCS-4 UTF-8
+
+ ̇ dot \u0307 \xcc\x87
+ ̣ dot below \u0323 \xcc\xa3
+ ÌŠ ring \u030A \xcc\x8a
+ ̱ macron below \u0331 \xcc\xb1
+ ̃ tilde \u0303 \xcc\x83
+ ̆ breve \u0306 \xcc\x86
+ ̧ cedilla \u0327 \xcc\xa7
+ Ì double grave \u030F \xcc\x8f
+ ̌ caron \u030C \xcc\x8c
+ ̈ diaeresis \u0308 \xcc\x88
+ Ì acute \u0301 \xcc\x81
+ ̀ grave \u0300 \xcc\x80
+ Ì› horn \u031B \xcc\x9b
+ Ì­ circumflex below \u032D \xcc\xad
diff --git a/subversion/tests/greek-tree.txt b/subversion/tests/greek-tree.txt
new file mode 100644
index 0000000..a1c11e8
--- /dev/null
+++ b/subversion/tests/greek-tree.txt
@@ -0,0 +1,53 @@
+
+ A iota
+ _______________________//|\
+ / ______________/ | \_____________
+ mu / | \
+ / | \
+ B C D
+ _____/|\_____ _____________/|\
+ / | \ / | \
+ / | \ / / \___
+ lambda | F / / \
+ E gamma / \
+ / \ / |
+ / \ ________/ |
+ alpha beta / H
+ / _______/|\______
+ / / | \
+ G / | \
+ ________/|\_______ chi psi omega
+ / | \
+ / | \
+ / | \
+ pi rho tau
+
+
+The Greek Tree is the standard Subversion testing tree. Capital
+letters are directories and greek letter names are files. By
+convention, the initial content of each file is a single line, such as
+"This is the file 'iota'.", followed by a newline.
+
+<============================< Same as... >================================>
+
+ /
+ /iota
+ /A/
+ /A/mu
+ /A/B/
+ /A/B/lambda
+ /A/B/E/
+ /A/B/E/alpha
+ /A/B/E/beta
+ /A/B/F/
+ /A/C/
+ /A/D/
+ /A/D/gamma
+ /A/D/H/
+ /A/D/H/chi
+ /A/D/H/psi
+ /A/D/H/omega
+ /A/D/G/
+ /A/D/G/pi
+ /A/D/G/rho
+ /A/D/G/tau
diff --git a/subversion/tests/libsvn_client/client-test.c b/subversion/tests/libsvn_client/client-test.c
new file mode 100644
index 0000000..c4f3448
--- /dev/null
+++ b/subversion/tests/libsvn_client/client-test.c
@@ -0,0 +1,1442 @@
+/*
+ * Regression tests for logic in the libsvn_client library.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#define SVN_DEPRECATED
+
+#include <limits.h>
+#include "svn_mergeinfo.h"
+#include "../../libsvn_client/mergeinfo.h"
+#include "../../libsvn_client/client.h"
+#include "svn_pools.h"
+#include "svn_client.h"
+#include "private/svn_client_mtcc.h"
+#include "svn_repos.h"
+#include "svn_subst.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_wc_private.h"
+#include "svn_props.h"
+#include "svn_hash.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+
+
+/* Create a repository with a filesystem based on OPTS in a subdir NAME,
+ * commit the standard Greek tree as revision 1, and set *REPOS_URL to
+ * the URL we will use to access it.
+ *
+ * ### This always returns a file: URL. We should upgrade this to use the
+ * test suite's specified URL scheme instead. */
+static svn_error_t *
+create_greek_repos(const char **repos_url,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_revnum_t committed_rev;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ /* Create a filesytem and repository. */
+ SVN_ERR(svn_test__create_repos(
+ &repos, svn_test_data_path(name, pool), opts, pool));
+
+ /* Prepare and commit a txn containing the Greek tree. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), 0 /* rev */,
+ 0 /* flags */, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &committed_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(committed_rev));
+
+ SVN_ERR(svn_uri_get_file_url_from_dirent(
+ repos_url, svn_test_data_path(name, pool), pool));
+ return SVN_NO_ERROR;
+}
+
+
+typedef struct mergeinfo_catalog_item {
+ const char *path;
+ const char *unparsed_mergeinfo;
+ svn_boolean_t remains;
+} mergeinfo_catalog_item;
+
+#define MAX_ITEMS 10
+
+static mergeinfo_catalog_item elide_testcases[][MAX_ITEMS] = {
+ { {"/foo", "/bar: 1-4", TRUE},
+ {"/foo/beep/baz", "/bar/beep/baz: 1-4", FALSE},
+ { NULL }},
+ { {"/foo", "/bar: 1-4", TRUE},
+ {"/foo/beep/baz", "/blaa/beep/baz: 1-4", TRUE},
+ { NULL }},
+ { {"/", "/gah: 1-4", TRUE},
+ {"/foo/beep/baz", "/gah/foo/beep/baz: 1-4", FALSE},
+ { NULL }}
+};
+
+static svn_error_t *
+test_elide_mergeinfo_catalog(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *iterpool;
+
+ iterpool = svn_pool_create(pool);
+
+ for (i = 0;
+ i < sizeof(elide_testcases) / sizeof(elide_testcases[0]);
+ i++)
+ {
+ svn_mergeinfo_catalog_t mergeinfo_catalog;
+ mergeinfo_catalog_item *item;
+
+ svn_pool_clear(iterpool);
+
+ mergeinfo_catalog = apr_hash_make(iterpool);
+ for (item = elide_testcases[i]; item->path; item++)
+ {
+ svn_mergeinfo_t mergeinfo;
+
+ SVN_ERR(svn_mergeinfo_parse(&mergeinfo, item->unparsed_mergeinfo,
+ iterpool));
+ apr_hash_set(mergeinfo_catalog, item->path, APR_HASH_KEY_STRING,
+ mergeinfo);
+ }
+
+ SVN_ERR(svn_client__elide_mergeinfo_catalog(mergeinfo_catalog,
+ iterpool));
+
+ for (item = elide_testcases[i]; item->path; item++)
+ {
+ apr_hash_t *mergeinfo = apr_hash_get(mergeinfo_catalog, item->path,
+ APR_HASH_KEY_STRING);
+ if (item->remains && !mergeinfo)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Elision for test case #%d incorrectly "
+ "elided '%s'", i, item->path);
+ if (!item->remains && mergeinfo)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Elision for test case #%d failed to "
+ "elide '%s'", i, item->path);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_args_to_target_array(apr_pool_t *pool)
+{
+ apr_size_t i;
+ apr_pool_t *iterpool;
+ svn_client_ctx_t *ctx;
+ static struct {
+ const char *input;
+ const char *output; /* NULL means an error is expected. */
+ } const tests[] = {
+ { ".", "" },
+ { ".@BASE", "@BASE" },
+ { "foo///bar", "foo/bar" },
+ { "foo///bar@13", "foo/bar@13" },
+ { "foo///bar@HEAD", "foo/bar@HEAD" },
+ { "foo///bar@{1999-12-31}", "foo/bar@{1999-12-31}" },
+ { "http://a//b////", "http://a/b" },
+ { "http://a///b@27", "http://a/b@27" },
+ { "http://a/b//@COMMITTED", "http://a/b@COMMITTED" },
+ { "foo///bar@1:2", "foo/bar@1:2" },
+ { "foo///bar@baz", "foo/bar@baz" },
+ { "foo///bar@", "foo/bar@" },
+ { "foo///bar///@13", "foo/bar@13" },
+ { "foo///bar@@13", "foo/bar@@13" },
+ { "foo///@bar@HEAD", "foo/@bar@HEAD" },
+ { "foo@///bar", "foo@/bar" },
+ { "foo@HEAD///bar", "foo@HEAD/bar" },
+ };
+
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ iterpool = svn_pool_create(pool);
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *input = tests[i].input;
+ const char *expected_output = tests[i].output;
+ apr_array_header_t *targets;
+ apr_getopt_t *os;
+ const int argc = 2;
+ const char *argv[3] = { 0 };
+ apr_status_t apr_err;
+ svn_error_t *err;
+
+ argv[0] = "opt-test";
+ argv[1] = input;
+ argv[2] = NULL;
+
+ apr_err = apr_getopt_init(&os, iterpool, argc, argv);
+ if (apr_err)
+ return svn_error_wrap_apr(apr_err,
+ "Error initializing command line arguments");
+
+ err = svn_client_args_to_target_array2(&targets, os, NULL, ctx, FALSE,
+ iterpool);
+
+ if (expected_output)
+ {
+ const char *actual_output;
+
+ if (err)
+ return err;
+ if (argc - 1 != targets->nelts)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Passed %d target(s) to "
+ "svn_client_args_to_target_array() but "
+ "got %d back.",
+ argc - 1,
+ targets->nelts);
+
+ actual_output = APR_ARRAY_IDX(targets, 0, const char *);
+
+ if (! svn_path_is_canonical(actual_output, iterpool))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Input '%s' to "
+ "svn_client_args_to_target_array() should "
+ "have returned a canonical path but "
+ "'%s' is not.",
+ input,
+ actual_output);
+
+ if (strcmp(expected_output, actual_output) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Input '%s' to "
+ "svn_client_args_to_target_array() should "
+ "have returned '%s' but returned '%s'.",
+ input,
+ expected_output,
+ actual_output);
+ }
+ else
+ {
+ if (! err)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Unexpected success in passing '%s' "
+ "to svn_client_args_to_target_array().",
+ input);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* A helper function for test_patch().
+ * It compares a patched or reject file against expected content using the
+ * specified EOL. It also deletes the file if the check was successful. */
+static svn_error_t *
+check_patch_result(const char *path, const char **expected_lines, const char *eol,
+ int num_expected_lines, apr_pool_t *pool)
+{
+ svn_stream_t *stream;
+ apr_pool_t *iterpool;
+ int i;
+
+ SVN_ERR(svn_stream_open_readonly(&stream, path, pool, pool));
+ i = 0;
+ iterpool = svn_pool_create(pool);
+ while (TRUE)
+ {
+ svn_boolean_t eof;
+ svn_stringbuf_t *line;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ if (i < num_expected_lines)
+ if (strcmp(expected_lines[i++], line->data) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "%s line %d didn't match the expected line "
+ "(strlen=%d vs strlen=%d)", path, i,
+ (int)strlen(expected_lines[i-1]),
+ (int)strlen(line->data));
+
+ if (eof)
+ break;
+ }
+ svn_pool_destroy(iterpool);
+
+ SVN_TEST_ASSERT(i == num_expected_lines);
+ SVN_ERR(svn_stream_close(stream));
+ SVN_ERR(svn_io_remove_file2(path, FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* A baton for the patch collection function. */
+struct patch_collection_baton
+{
+ apr_hash_t *patched_tempfiles;
+ apr_hash_t *reject_tempfiles;
+ apr_pool_t *state_pool;
+};
+
+/* Collect all the patch information we're interested in. */
+static svn_error_t *
+patch_collection_func(void *baton,
+ svn_boolean_t *filtered,
+ const char *canon_path_from_patchfile,
+ const char *patch_abspath,
+ const char *reject_abspath,
+ apr_pool_t *scratch_pool)
+{
+ struct patch_collection_baton *pcb = baton;
+
+ if (patch_abspath)
+ apr_hash_set(pcb->patched_tempfiles,
+ apr_pstrdup(pcb->state_pool, canon_path_from_patchfile),
+ APR_HASH_KEY_STRING,
+ apr_pstrdup(pcb->state_pool, patch_abspath));
+
+ if (reject_abspath)
+ apr_hash_set(pcb->reject_tempfiles,
+ apr_pstrdup(pcb->state_pool, canon_path_from_patchfile),
+ APR_HASH_KEY_STRING,
+ apr_pstrdup(pcb->state_pool, reject_abspath));
+
+ if (filtered)
+ *filtered = FALSE;
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_patch(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repos_url;
+ const char *wc_path;
+ svn_opt_revision_t rev;
+ svn_opt_revision_t peg_rev;
+ svn_client_ctx_t *ctx;
+ apr_file_t *patch_file;
+ struct patch_collection_baton pcb;
+ const char *patch_file_path;
+ const char *patched_tempfile_path;
+ const char *reject_tempfile_path;
+ const char *key;
+ int i;
+#define NL APR_EOL_STR
+#define UNIDIFF_LINES 7
+ const char *unidiff_patch[UNIDIFF_LINES] = {
+ "Index: A/D/gamma" NL,
+ "===================================================================\n",
+ "--- A/D/gamma\t(revision 1)" NL,
+ "+++ A/D/gamma\t(working copy)" NL,
+ "@@ -1 +1 @@" NL,
+ "-This is really the file 'gamma'." NL,
+ "+It is really the file 'gamma'." NL
+ };
+#define EXPECTED_GAMMA_LINES 1
+ const char *expected_gamma[EXPECTED_GAMMA_LINES] = {
+ "This is the file 'gamma'."
+ };
+#define EXPECTED_GAMMA_REJECT_LINES 5
+ const char *expected_gamma_reject[EXPECTED_GAMMA_REJECT_LINES] = {
+ "--- A/D/gamma",
+ "+++ A/D/gamma",
+ "@@ -1,1 +1,1 @@",
+ "-This is really the file 'gamma'.",
+ "+It is really the file 'gamma'."
+ };
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-patch-repos", opts, pool));
+
+ /* Check out the HEAD revision */
+
+ /* Put wc inside an unversioned directory. Checking out a 1.7 wc
+ directly inside a 1.6 wc doesn't work reliably, an intervening
+ unversioned directory prevents the problems. */
+ wc_path = svn_test_data_path("test-patch", pool);
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ wc_path = svn_dirent_join(wc_path, "test-patch-wc", pool);
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+ SVN_ERR(svn_client_checkout3(NULL, repos_url, wc_path,
+ &peg_rev, &rev, svn_depth_infinity,
+ TRUE, FALSE, ctx, pool));
+
+ /* Create the patch file. */
+ patch_file_path = svn_dirent_join_many(
+ pool, svn_test_data_path("test-patch", pool),
+ "test-patch.diff", SVN_VA_NULL);
+ SVN_ERR(svn_io_file_open(&patch_file, patch_file_path,
+ (APR_READ | APR_WRITE | APR_CREATE | APR_TRUNCATE),
+ APR_OS_DEFAULT, pool));
+ for (i = 0; i < UNIDIFF_LINES; i++)
+ {
+ apr_size_t len = strlen(unidiff_patch[i]);
+ SVN_ERR(svn_io_file_write(patch_file, unidiff_patch[i], &len, pool));
+ SVN_TEST_ASSERT(len == strlen(unidiff_patch[i]));
+ }
+ SVN_ERR(svn_io_file_flush(patch_file, pool));
+
+ /* Apply the patch. */
+ pcb.patched_tempfiles = apr_hash_make(pool);
+ pcb.reject_tempfiles = apr_hash_make(pool);
+ pcb.state_pool = pool;
+ SVN_ERR(svn_client_patch(patch_file_path, wc_path, FALSE, 0, FALSE,
+ FALSE, FALSE, patch_collection_func, &pcb,
+ ctx, pool));
+ SVN_ERR(svn_io_file_close(patch_file, pool));
+
+ SVN_TEST_ASSERT(apr_hash_count(pcb.patched_tempfiles) == 1);
+ key = "A/D/gamma";
+ patched_tempfile_path = apr_hash_get(pcb.patched_tempfiles, key,
+ APR_HASH_KEY_STRING);
+ SVN_ERR(check_patch_result(patched_tempfile_path, expected_gamma, "\n",
+ EXPECTED_GAMMA_LINES, pool));
+ SVN_TEST_ASSERT(apr_hash_count(pcb.reject_tempfiles) == 1);
+ key = "A/D/gamma";
+ reject_tempfile_path = apr_hash_get(pcb.reject_tempfiles, key,
+ APR_HASH_KEY_STRING);
+ SVN_ERR(check_patch_result(reject_tempfile_path, expected_gamma_reject,
+ APR_EOL_STR, EXPECTED_GAMMA_REJECT_LINES, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_wc_add_scenarios(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repos_url;
+ const char *wc_path;
+ svn_revnum_t committed_rev;
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t rev, peg_rev;
+ const char *new_dir_path;
+ const char *ex_file_path;
+ const char *ex_dir_path;
+ const char *ex2_dir_path;
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-wc-add-repos", opts, pool));
+ committed_rev = 1;
+
+ wc_path = svn_test_data_path("test-wc-add", pool);
+
+ /* Remove old test data from the previous run */
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+ /* Checkout greek tree as wc_path */
+ SVN_ERR(svn_client_checkout3(NULL, repos_url, wc_path, &peg_rev, &rev,
+ svn_depth_infinity, FALSE, FALSE, ctx, pool));
+
+ /* Now checkout again as wc_path/NEW */
+ new_dir_path = svn_dirent_join(wc_path, "NEW", pool);
+ SVN_ERR(svn_client_checkout3(NULL, repos_url, new_dir_path, &peg_rev, &rev,
+ svn_depth_infinity, FALSE, FALSE,
+ ctx, pool));
+
+ ex_dir_path = svn_dirent_join(wc_path, "NEW_add", pool);
+ ex2_dir_path = svn_dirent_join(wc_path, "NEW_add2", pool);
+ SVN_ERR(svn_io_dir_make(ex_dir_path, APR_OS_DEFAULT, pool));
+ SVN_ERR(svn_io_dir_make(ex2_dir_path, APR_OS_DEFAULT, pool));
+
+ SVN_ERR(svn_io_open_uniquely_named(NULL, &ex_file_path, wc_path, "new_file",
+ NULL, svn_io_file_del_none, pool, pool));
+
+ /* Now use an access baton to do some add operations like an old client
+ might do */
+ {
+ svn_wc_adm_access_t *adm_access, *adm2;
+ svn_boolean_t locked;
+
+ SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, wc_path, TRUE, -1, NULL, NULL,
+ pool));
+
+ /* ### The above svn_wc_adm_open3 creates a new svn_wc__db_t
+ ### instance. The svn_wc_add3 below doesn't work while the
+ ### original svn_wc__db_t created by svn_client_create_context
+ ### remains open. Closing the wc-context gets around the
+ ### problem but is obviously a hack. */
+ SVN_ERR(svn_wc_context_destroy(ctx->wc_ctx));
+ SVN_ERR(svn_wc_context_create(&ctx->wc_ctx, NULL, pool, pool));
+
+ /* Fix up copy as add with history */
+ SVN_ERR(svn_wc_add3(new_dir_path, adm_access, svn_depth_infinity,
+ repos_url, committed_rev, NULL, NULL, NULL, NULL,
+ pool));
+
+ /* Verify if the paths are locked now */
+ SVN_ERR(svn_wc_locked(&locked, wc_path, pool));
+ SVN_TEST_ASSERT(locked && "wc_path locked");
+ SVN_ERR(svn_wc_locked(&locked, new_dir_path, pool));
+ SVN_TEST_ASSERT(locked && "new_path locked");
+
+ SVN_ERR(svn_wc_adm_retrieve(&adm2, adm_access, new_dir_path, pool));
+ SVN_TEST_ASSERT(adm2 != NULL && "available in set");
+
+ /* Add local (new) file */
+ SVN_ERR(svn_wc_add3(ex_file_path, adm_access, svn_depth_unknown, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+
+ /* Add local (new) directory */
+ SVN_ERR(svn_wc_add3(ex_dir_path, adm_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc_adm_retrieve(&adm2, adm_access, ex_dir_path, pool));
+ SVN_TEST_ASSERT(adm2 != NULL && "available in set");
+
+ /* Add empty directory with copy trail */
+ SVN_ERR(svn_wc_add3(ex2_dir_path, adm_access, svn_depth_infinity,
+ repos_url, committed_rev, NULL, NULL, NULL, NULL,
+ pool));
+
+ SVN_ERR(svn_wc_adm_retrieve(&adm2, adm_access, ex2_dir_path, pool));
+ SVN_TEST_ASSERT(adm2 != NULL && "available in set");
+
+ SVN_ERR(svn_wc_adm_close2(adm_access, pool));
+ }
+
+ /* Some simple status calls to verify that the paths are added */
+ {
+ svn_wc_status3_t *status;
+
+ SVN_ERR(svn_wc_status3(&status, ctx->wc_ctx, new_dir_path, pool, pool));
+
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added
+ && status->copied
+ && !strcmp(status->repos_relpath, "NEW"));
+
+ SVN_ERR(svn_wc_status3(&status, ctx->wc_ctx, ex_file_path, pool, pool));
+
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added
+ && !status->copied);
+
+ SVN_ERR(svn_wc_status3(&status, ctx->wc_ctx, ex_dir_path, pool, pool));
+
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added
+ && !status->copied);
+
+ SVN_ERR(svn_wc_status3(&status, ctx->wc_ctx, ex2_dir_path, pool, pool));
+
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added
+ && status->copied);
+ }
+
+ /* ### Add a commit? */
+
+ return SVN_NO_ERROR;
+}
+
+/* This is for issue #3234. */
+static svn_error_t *
+test_copy_crash(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *sources;
+ svn_opt_revision_t rev;
+ svn_client_copy_source_t source;
+ svn_client_ctx_t *ctx;
+ const char *dest;
+ const char *repos_url;
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-copy-crash", opts, pool));
+
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ rev.kind = svn_opt_revision_head;
+ dest = svn_path_url_add_component2(repos_url, "A/E", pool);
+ source.path = svn_path_url_add_component2(repos_url, "A/B", pool);
+ source.revision = &rev;
+ source.peg_revision = &rev;
+ sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *));
+ APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = &source;
+
+ /* This shouldn't crash. */
+ SVN_ERR(svn_client_copy6(sources, dest, FALSE, TRUE, FALSE, NULL, NULL, NULL,
+ ctx, pool));
+
+ return SVN_NO_ERROR;
+}
+
+#ifdef TEST16K_ADD
+static svn_error_t *
+test_16k_add(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_opt_revision_t rev;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+ const char *wc_path;
+ svn_opt_revision_t peg_rev;
+ apr_array_header_t *targets;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ int i;
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-16k-repos", opts, pool));
+
+ /* Check out the HEAD revision */
+
+ /* Put wc inside an unversioned directory. Checking out a 1.7 wc
+ directly inside a 1.6 wc doesn't work reliably, an intervening
+ unversioned directory prevents the problems. */
+ wc_path = svn_test_data_path("test-16k", pool);
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ wc_path = svn_dirent_join(wc_path, "trunk", pool);
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+ SVN_ERR(svn_client_checkout3(NULL, repos_url, wc_path,
+ &peg_rev, &rev, svn_depth_infinity,
+ TRUE, FALSE, ctx, pool));
+
+ for (i = 0; i < 16384; i++)
+ {
+ const char *path;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_io_open_unique_file3(NULL, &path, wc_path,
+ svn_io_file_del_none,
+ iterpool, iterpool));
+
+ SVN_ERR(svn_client_add5(path, svn_depth_unknown, FALSE, FALSE, FALSE,
+ FALSE, ctx, iterpool));
+ }
+
+ targets = apr_array_make(pool, 1, sizeof(const char *));
+ APR_ARRAY_PUSH(targets, const char *) = wc_path;
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_client_commit5(targets, svn_depth_infinity, FALSE, FALSE, TRUE,
+ NULL, NULL, NULL, NULL, ctx, iterpool));
+
+
+ return SVN_NO_ERROR;
+}
+#endif
+
+static svn_error_t *
+test_youngest_common_ancestor(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repos_url;
+ const char *repos_uuid = "fake-uuid"; /* the functions we call don't care */
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t head_rev = { svn_opt_revision_head, { 0 } };
+ svn_opt_revision_t zero_rev = { svn_opt_revision_number, { 0 } };
+ svn_client_copy_source_t source;
+ apr_array_header_t *sources;
+ const char *dest;
+ svn_client__pathrev_t *yc_ancestor;
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-youngest-common-ancestor", opts, pool));
+
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ /* Copy a file into dir 'A', keeping its own basename. */
+ sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *));
+ source.path = svn_path_url_add_component2(repos_url, "iota", pool);
+ source.peg_revision = &head_rev;
+ source.revision = &head_rev;
+ APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = &source;
+ dest = svn_path_url_add_component2(repos_url, "A", pool);
+ SVN_ERR(svn_client_copy6(sources, dest, TRUE /* copy_as_child */,
+ FALSE /* make_parents */,
+ FALSE /* ignore_externals */,
+ NULL, NULL, NULL, ctx, pool));
+
+ /* Test: YCA(iota@2, A/iota@2) is iota@1. */
+ SVN_ERR(svn_client__get_youngest_common_ancestor(
+ &yc_ancestor,
+ svn_client__pathrev_create_with_relpath(
+ repos_url, repos_uuid, 2, "iota", pool),
+ svn_client__pathrev_create_with_relpath(
+ repos_url, repos_uuid, 2, "A/iota", pool),
+ NULL, ctx, pool, pool));
+ SVN_TEST_STRING_ASSERT(svn_client__pathrev_relpath(yc_ancestor, pool),
+ "iota");
+ SVN_TEST_ASSERT(yc_ancestor->rev == 1);
+
+ /* Copy the root directory (at revision 0) into A as 'ROOT'. */
+ sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *));
+ source.path = repos_url;
+ source.peg_revision = &zero_rev;
+ source.revision = &zero_rev;
+ APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = &source;
+ dest = svn_path_url_add_component2(repos_url, "A/ROOT", pool);
+ SVN_ERR(svn_client_copy6(sources, dest, FALSE /* copy_as_child */,
+ FALSE /* make_parents */,
+ FALSE /* ignore_externals */,
+ NULL, NULL, NULL, ctx, pool));
+
+ /* Test: YCA(''@0, A/ROOT@3) is ''@0 (handled as a special case). */
+ SVN_ERR(svn_client__get_youngest_common_ancestor(
+ &yc_ancestor,
+ svn_client__pathrev_create_with_relpath(
+ repos_url, repos_uuid, 0, "", pool),
+ svn_client__pathrev_create_with_relpath(
+ repos_url, repos_uuid, 3, "A/ROOT", pool),
+ NULL, ctx, pool, pool));
+ SVN_TEST_STRING_ASSERT(svn_client__pathrev_relpath(yc_ancestor, pool), "");
+ SVN_TEST_ASSERT(yc_ancestor->rev == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_foreign_repos_copy(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_opt_revision_t rev;
+ svn_opt_revision_t peg_rev;
+ const char *repos_url;
+ const char *repos2_url;
+ const char *wc_path;
+ svn_client_ctx_t *ctx;
+/* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "foreign-copy1", opts, pool));
+ SVN_ERR(create_greek_repos(&repos2_url, "foreign-copy2", opts, pool));
+
+ wc_path = svn_test_data_path("test-foreign-repos-copy", pool);
+
+ /* Remove old test data from the previous run */
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ wc_path = svn_dirent_join(wc_path, "foreign-wc", pool);
+
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+ /* Checkout greek tree as wc_path */
+ SVN_ERR(svn_client_checkout3(NULL, repos_url, wc_path, &peg_rev, &rev,
+ svn_depth_infinity, FALSE, FALSE, ctx, pool));
+
+ SVN_ERR(svn_client__copy_foreign(svn_path_url_add_component2(repos2_url, "A",
+ pool),
+ svn_dirent_join(wc_path, "A-copied", pool),
+ &peg_rev, &rev, svn_depth_infinity, FALSE, FALSE,
+ ctx, pool));
+
+
+ SVN_ERR(svn_client__copy_foreign(svn_path_url_add_component2(repos2_url,
+ "iota",
+ pool),
+ svn_dirent_join(wc_path, "iota-copied", pool),
+ &peg_rev, &rev, svn_depth_infinity, FALSE, FALSE,
+ ctx, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_suggest_mergesources(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repos_url;
+ svn_client_ctx_t *ctx;
+ svn_client__mtcc_t *mtcc;
+ apr_array_header_t *results;
+ svn_opt_revision_t peg_rev;
+ svn_opt_revision_t head_rev;
+ const char *wc_path;
+
+ peg_rev.kind = svn_opt_revision_unspecified;
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "mergesources", opts, pool));
+
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, -1, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_copy("A", 1, "AA", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_commit(NULL, NULL, NULL, mtcc, pool));
+
+ SVN_ERR(svn_client_suggest_merge_sources(
+ &results,
+ svn_path_url_add_component2(repos_url, "AA", pool),
+ &peg_rev, ctx, pool));
+ SVN_TEST_ASSERT(results != NULL);
+ SVN_TEST_ASSERT(results->nelts >= 1);
+ SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(results, 0, const char *),
+ svn_path_url_add_component2(repos_url, "A", pool));
+
+ /* And now test the same thing with a minimal working copy */
+ wc_path = svn_test_data_path("mergesources-wc", pool);
+ svn_test_add_dir_cleanup(wc_path);
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ head_rev.kind = svn_opt_revision_head;
+ SVN_ERR(svn_client_checkout3(NULL,
+ svn_path_url_add_component2(repos_url, "AA", pool),
+ wc_path,
+ &head_rev, &head_rev, svn_depth_empty,
+ FALSE, FALSE, ctx, pool));
+
+
+ SVN_ERR(svn_client_suggest_merge_sources(&results,
+ wc_path,
+ &peg_rev, ctx, pool));
+ SVN_TEST_ASSERT(results != NULL);
+ SVN_TEST_ASSERT(results->nelts >= 1);
+ SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(results, 0, const char *),
+ svn_path_url_add_component2(repos_url, "A", pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static char
+status_to_char(enum svn_wc_status_kind status)
+{
+
+ switch (status)
+ {
+ case svn_wc_status_none: return '.';
+ case svn_wc_status_unversioned: return '?';
+ case svn_wc_status_normal: return '-';
+ case svn_wc_status_added: return 'A';
+ case svn_wc_status_missing: return '!';
+ case svn_wc_status_incomplete: return ':';
+ case svn_wc_status_deleted: return 'D';
+ case svn_wc_status_replaced: return 'R';
+ case svn_wc_status_modified: return 'M';
+ case svn_wc_status_merged: return 'G';
+ case svn_wc_status_conflicted: return 'C';
+ case svn_wc_status_obstructed: return '~';
+ case svn_wc_status_ignored: return 'I';
+ case svn_wc_status_external: return 'X';
+ default: return '*';
+ }
+}
+
+static int
+compare_status_paths(const void *a, const void *b)
+{
+ const svn_client_status_t *const *const sta = a;
+ const svn_client_status_t *const *const stb = b;
+ return svn_path_compare_paths((*sta)->local_abspath, (*stb)->local_abspath);
+}
+
+static svn_error_t *
+remote_only_status_receiver(void *baton, const char *path,
+ const svn_client_status_t *status,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *results = baton;
+ APR_ARRAY_PUSH(results, const svn_client_status_t *) =
+ svn_client_status_dup(status, results->pool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_remote_only_status(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ static const struct remote_only_status_result
+ {
+ const char *relpath;
+ svn_revnum_t revision;
+ enum svn_wc_status_kind node_status;
+ enum svn_wc_status_kind text_status;
+ enum svn_wc_status_kind prop_status;
+ svn_revnum_t ood_changed_rev;
+ enum svn_wc_status_kind repos_node_status;
+ enum svn_wc_status_kind repos_text_status;
+ enum svn_wc_status_kind repos_prop_status;
+ } expected[] = {
+ { ".",
+ +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none,
+ +2, svn_wc_status_modified, svn_wc_status_modified, svn_wc_status_none },
+ { "B",
+ +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none,
+ +2, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none },
+ { "C",
+ +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none,
+ +2, svn_wc_status_deleted, svn_wc_status_none, svn_wc_status_none },
+ { "D",
+ +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none,
+ +2, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none },
+ { "epsilon",
+ -1, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none,
+ +2, svn_wc_status_added, svn_wc_status_modified, svn_wc_status_none },
+ { "mu",
+ +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none,
+ +2, svn_wc_status_modified, svn_wc_status_normal, svn_wc_status_none },
+
+ { NULL }
+ };
+
+ const char *repos_url;
+ const char *wc_path;
+ const char *local_path;
+ apr_file_t *local_file;
+ svn_client_ctx_t *ctx;
+ svn_client__mtcc_t *mtcc;
+ svn_opt_revision_t rev;
+ svn_revnum_t result_rev;
+ svn_string_t *contents = svn_string_create("modified\n", pool);
+ svn_stream_t *contentstream = svn_stream_from_string(contents, pool);
+ const struct remote_only_status_result *ex;
+ svn_stream_mark_t *start;
+ apr_array_header_t *targets;
+ apr_array_header_t *results;
+ int i;
+
+ SVN_ERR(svn_stream_mark(contentstream, &start, pool));
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "test-remote-only-status", opts, pool));
+
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ /* Make some modifications in the repository, creating revision 2. */
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, -1, ctx, pool, pool));
+ SVN_ERR(svn_stream_seek(contentstream, start));
+ SVN_ERR(svn_client__mtcc_add_add_file("A/epsilon", contentstream, NULL,
+ mtcc, pool));
+ SVN_ERR(svn_stream_seek(contentstream, start));
+ SVN_ERR(svn_client__mtcc_add_update_file("A/mu",
+ contentstream, NULL, NULL, NULL,
+ mtcc, pool));
+ SVN_ERR(svn_stream_seek(contentstream, start));
+ SVN_ERR(svn_client__mtcc_add_add_file("A/D/epsilon", contentstream, NULL,
+ mtcc, pool));
+ SVN_ERR(svn_stream_seek(contentstream, start));
+ SVN_ERR(svn_client__mtcc_add_update_file("A/B/lambda",
+ contentstream, NULL, NULL, NULL,
+ mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_delete("A/C", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_commit(NULL, NULL, NULL, mtcc, pool));
+
+ /* Check out a sparse root @r1 of the repository */
+ wc_path = svn_test_data_path("test-remote-only-status-wc", pool);
+ svn_test_add_dir_cleanup(wc_path);
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ rev.kind = svn_opt_revision_number;
+ rev.value.number = 1;
+ SVN_ERR(svn_client_checkout3(NULL,
+ apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL),
+ wc_path, &rev, &rev, svn_depth_immediates,
+ FALSE, FALSE, ctx, pool));
+
+ /* Add a local file; this is a double-check to make sure that
+ remote-only status ignores local changes. */
+ local_path = svn_dirent_join(wc_path, "zeta", pool);
+ SVN_ERR(svn_io_file_create_empty(local_path, pool));
+ SVN_ERR(svn_client_add5(local_path, svn_depth_unknown,
+ FALSE, FALSE, FALSE, FALSE,
+ ctx, pool));
+
+ /* Replace a local dir */
+ local_path = svn_dirent_join(wc_path, "B", pool);
+ targets = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(targets, const char*) = local_path;
+ SVN_ERR(svn_client_delete4(targets, FALSE, FALSE, NULL, NULL, NULL,
+ ctx, pool));
+ SVN_ERR(svn_client_mkdir4(targets, FALSE, NULL, NULL, NULL,
+ ctx, pool));
+
+ /* Modify a local dir's props */
+ local_path = svn_dirent_join(wc_path, "D", pool);
+ targets = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(targets, const char*) = local_path;
+ SVN_ERR(svn_client_propset_local("prop", contents, targets,
+ svn_depth_empty, FALSE, NULL,
+ ctx, pool));
+
+ /* Modify a local file's contents */
+ local_path = svn_dirent_join(wc_path, "mu", pool);
+ SVN_ERR(svn_io_file_open(&local_file, local_path,
+ APR_FOPEN_WRITE | APR_FOPEN_TRUNCATE,
+ 0, pool));
+ SVN_ERR(svn_io_file_write_full(local_file,
+ contents->data, contents->len,
+ NULL, pool));
+ SVN_ERR(svn_io_file_close(local_file, pool));
+
+ /* Run the remote-only status. */
+ results = apr_array_make(pool, 3, sizeof(const svn_client_status_t *));
+ rev.kind = svn_opt_revision_head;
+ SVN_ERR(svn_client_status6(
+ &result_rev, ctx, wc_path, &rev, svn_depth_unknown,
+ TRUE, TRUE, FALSE, FALSE, FALSE, FALSE, NULL,
+ remote_only_status_receiver, results, pool));
+
+ SVN_TEST_ASSERT(result_rev == 2);
+
+ /* Compare the number of results with the expected results */
+ for (i = 0, ex = expected; ex->relpath; ++ex, ++i)
+ ;
+ SVN_TEST_ASSERT(results->nelts == i);
+
+ if (opts->verbose)
+ svn_sort__array(results, compare_status_paths);
+
+ for (i = 0; i < results->nelts; ++i)
+ {
+ const svn_client_status_t *st =
+ APR_ARRAY_IDX(results, i, const svn_client_status_t *);
+
+ const char *relpath =
+ svn_dirent_skip_ancestor(wc_path, st->local_abspath);
+ if (!relpath)
+ relpath = st->local_abspath;
+ if (!*relpath)
+ relpath = ".";
+
+ for (ex = expected; ex->relpath; ++ex)
+ {
+ if (0 == strcmp(relpath, ex->relpath))
+ break;
+ }
+ SVN_TEST_ASSERT(ex->relpath != NULL);
+
+ if (opts->verbose)
+ printf("%c%c%c %2ld %c%c%c %2ld %s\n",
+ status_to_char(st->node_status),
+ status_to_char(st->text_status),
+ status_to_char(st->prop_status),
+ (long)st->revision,
+ status_to_char(st->repos_node_status),
+ status_to_char(st->repos_text_status),
+ status_to_char(st->repos_prop_status),
+ (long)st->ood_changed_rev,
+ relpath);
+
+ SVN_TEST_ASSERT(st->revision == ex->revision);
+ SVN_TEST_ASSERT(st->ood_changed_rev == ex->ood_changed_rev);
+ SVN_TEST_ASSERT(st->node_status == ex->node_status);
+ SVN_TEST_ASSERT(st->repos_node_status == ex->repos_node_status);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_copy_pin_externals(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_opt_revision_t rev;
+ svn_opt_revision_t peg_rev;
+ const char *repos_url;
+ const char *A_url;
+ const char *A_copy_url;
+ const char *wc_path;
+ svn_client_ctx_t *ctx;
+ const svn_string_t *propval;
+ apr_hash_t *externals_to_pin;
+ apr_array_header_t *external_items;
+ apr_array_header_t *copy_sources;
+ svn_wc_external_item2_t items[6];
+ svn_client_copy_source_t copy_source;
+ apr_hash_t *props;
+ apr_array_header_t *pinned_externals_descs;
+ apr_array_header_t *pinned_externals;
+ int i;
+ int num_tested_externals;
+ svn_stringbuf_t *externals_test_prop;
+ struct pin_externals_test_data {
+ const char *src_external_desc;
+ const char *expected_dst_external_desc;
+ } pin_externals_test_data[] = {
+ { "^/A/D/gamma B/gamma", "^/A/D/gamma@2 B/gamma" },
+ { "-r1 ^/A/D/G C/exdir_G", "-r1 ^/A/D/G C/exdir_G" },
+ { "^/A/D/H@1 C/exdir_H", "^/A/D/H@1 C/exdir_H" },
+ { "^/A/D/H C/exdir_H2", "^/A/D/H@2 C/exdir_H2" },
+ { "-r1 ^/A/B D/z/y/z/blah", "-r1 ^/A/B@2 D/z/y/z/blah" } ,
+ { "-r1 ^/A/D@2 exdir_D", "-r1 ^/A/D@2 exdir_D" },
+ /* Dated revision should retain their date string exactly. */
+ { "-r{1970-01-01T00:00} ^/A/C 70s", "-r{1970-01-01T00:00} ^/A/C@2 70s"},
+ { "-r{2004-02-23} ^/svn 1.0", "-r{2004-02-23} ^/svn 1.0"},
+ { NULL },
+ };
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "pin-externals", opts, pool));
+
+ wc_path = svn_test_data_path("pin-externals-working-copy", pool);
+
+ /* Remove old test data from the previous run */
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ /* Configure some externals on ^/A */
+ i = 0;
+ externals_test_prop = svn_stringbuf_create_empty(pool);
+ while (pin_externals_test_data[i].src_external_desc)
+ {
+ svn_stringbuf_appendcstr(externals_test_prop,
+ pin_externals_test_data[i].src_external_desc);
+ svn_stringbuf_appendbyte(externals_test_prop, '\n');
+ i++;
+ }
+ propval = svn_string_create_from_buf(externals_test_prop, pool);
+ A_url = apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL);
+ SVN_ERR(svn_client_propset_remote(SVN_PROP_EXTERNALS, propval,
+ A_url, TRUE, 1, NULL,
+ NULL, NULL, ctx, pool));
+
+ /* Set up parameters for pinning some externals. */
+ externals_to_pin = apr_hash_make(pool);
+
+ items[0].url = "^/A/D/gamma";
+ items[0].target_dir = "B/gamma";
+ items[1].url = "^/A/B";
+ items[1].target_dir = "D/z/y/z/blah";
+ items[2].url = "^/A/D/H";
+ items[2].target_dir = "C/exdir_H2";
+ items[3].url= "^/A/D";
+ items[3].target_dir= "exdir_D";
+ items[4].url = "^/A/C";
+ items[4].target_dir = "70s";
+ /* Also add an entry which doesn't match any actual definition. */
+ items[5].url = "^/this/does/not/exist";
+ items[5].target_dir = "in/test/data";
+
+ external_items = apr_array_make(pool, 2, sizeof(svn_wc_external_item2_t *));
+ for (i = 0; i < sizeof(items) / sizeof(items[0]); i++)
+ APR_ARRAY_PUSH(external_items, svn_wc_external_item2_t *) = &items[i];
+ svn_hash_sets(externals_to_pin, A_url, external_items);
+
+ /* Copy ^/A to ^/A_copy, pinning two non-pinned externals. */
+ copy_source.path = A_url;
+ copy_source.revision = &rev;
+ copy_source.peg_revision = &peg_rev;
+ copy_sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *));
+ APR_ARRAY_PUSH(copy_sources, svn_client_copy_source_t *) = &copy_source;
+ A_copy_url = apr_pstrcat(pool, repos_url, "/A_copy", SVN_VA_NULL);
+ SVN_ERR(svn_client_copy7(copy_sources, A_copy_url, FALSE, FALSE,
+ FALSE, FALSE, TRUE, externals_to_pin,
+ NULL, NULL, NULL, ctx, pool));
+
+ /* Verify that externals were pinned as expected. */
+ SVN_ERR(svn_client_propget5(&props, NULL, SVN_PROP_EXTERNALS,
+ A_copy_url, &peg_rev, &rev, NULL,
+ svn_depth_empty, NULL, ctx, pool, pool));
+ propval = svn_hash_gets(props, A_copy_url);
+ SVN_TEST_ASSERT(propval);
+
+ /* Test the unparsed representation of copied externals descriptions. */
+ pinned_externals_descs = svn_cstring_split(propval->data, "\n", FALSE, pool);
+ for (i = 0; i < pinned_externals_descs->nelts; i++)
+ {
+ const char *externals_desc;
+ const char *expected_desc;
+
+ externals_desc = APR_ARRAY_IDX(pinned_externals_descs, i, const char *);
+ expected_desc = pin_externals_test_data[i].expected_dst_external_desc;
+ SVN_TEST_STRING_ASSERT(externals_desc, expected_desc);
+ }
+ /* Ensure all test cases were tested. */
+ SVN_TEST_ASSERT(i == (sizeof(pin_externals_test_data) /
+ sizeof(pin_externals_test_data[0]) - 1));
+
+ SVN_ERR(svn_wc_parse_externals_description3(&pinned_externals, A_copy_url,
+ propval->data, TRUE, pool));
+
+ /* For completeness, test the parsed representation, too */
+ num_tested_externals = 0;
+ for (i = 0; i < pinned_externals->nelts; i++)
+ {
+ svn_wc_external_item2_t *item;
+
+ item = APR_ARRAY_IDX(pinned_externals, i, svn_wc_external_item2_t *);
+ if (strcmp(item->url, "^/A/D/gamma") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "B/gamma");
+ /* Pinned to r2. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 2);
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 2);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->url, "^/A/D/G") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "C/exdir_G");
+ /* Not pinned. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 1);
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_head);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->url, "^/A/D/H") == 0)
+ {
+ if (strcmp(item->target_dir, "C/exdir_H") == 0)
+ {
+ /* Was already pinned to r1. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 1);
+ SVN_TEST_ASSERT(item->peg_revision.kind ==
+ svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 1);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->target_dir, "C/exdir_H2") == 0)
+ {
+ /* Pinned to r2. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 2);
+ SVN_TEST_ASSERT(item->peg_revision.kind ==
+ svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 2);
+ num_tested_externals++;
+ }
+ else
+ SVN_TEST_ASSERT(FALSE); /* unknown external */
+ }
+ else if (strcmp(item->url, "^/A/B") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "D/z/y/z/blah");
+ /* Pinned to r2. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 1);
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 2);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->url, "^/A/D") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "exdir_D");
+ /* Pinned to r2. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->revision.value.number == 1);
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 2);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->url, "^/A/C") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "70s");
+ /* Pinned to r2. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_date);
+ /* Don't bother testing the exact date value here. */
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number);
+ SVN_TEST_ASSERT(item->peg_revision.value.number == 2);
+ num_tested_externals++;
+ }
+ else if (strcmp(item->url, "^/svn") == 0)
+ {
+ SVN_TEST_STRING_ASSERT(item->target_dir, "1.0");
+ /* Was and not in externals_to_pin, operative revision was a date. */
+ SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_date);
+ /* Don't bother testing the exact date value here. */
+ SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_head);
+ num_tested_externals++;
+ }
+ else
+ SVN_TEST_ASSERT(FALSE); /* unknown URL */
+ }
+
+ /* Ensure all test cases were tested. */
+ SVN_TEST_ASSERT(num_tested_externals == (sizeof(pin_externals_test_data) /
+ sizeof(pin_externals_test_data[0])
+ - 1));
+
+ return SVN_NO_ERROR;
+}
+
+/* issue #4560 */
+static svn_error_t *
+test_copy_pin_externals_select_subtree(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_opt_revision_t rev;
+ svn_opt_revision_t peg_rev;
+ const char *repos_url;
+ const char *A_copy_url;
+ const char *B_url;
+ const char *wc_path;
+ svn_client_ctx_t *ctx;
+ apr_hash_t *externals_to_pin;
+ apr_array_header_t *external_items;
+ apr_array_header_t *copy_sources;
+ svn_wc_external_item2_t item;
+ svn_client_copy_source_t copy_source;
+ apr_hash_t *props;
+ int i;
+ struct test_data {
+ const char *subtree_relpath;
+ const char *src_external_desc;
+ const char *expected_dst_external_desc;
+ } test_data[] = {
+ /* Note: these externals definitions contain extra whitespace on
+ purpose, to test that the pinning logic doesn't make
+ whitespace-only changes to values that aren't pinned. */
+
+ /* External on A/B will be pinned. */
+ { "B", "^/A/D/gamma gamma-ext", "^/A/D/gamma@3 gamma-ext" },
+
+ /* External on A/D won't be pinned. */
+ { "D", "^/A/B/F F-ext", "^/A/B/F F-ext" } ,
+
+ { NULL },
+ };
+
+ /* Create a filesytem and repository containing the Greek tree. */
+ SVN_ERR(create_greek_repos(&repos_url, "pin-externals-select-subtree",
+ opts, pool));
+
+ wc_path = svn_test_data_path("pin-externals-select-subtree-wc", pool);
+
+ /* Remove old test data from the previous run */
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(wc_path, pool));
+ svn_test_add_dir_cleanup(wc_path);
+
+ rev.kind = svn_opt_revision_head;
+ peg_rev.kind = svn_opt_revision_unspecified;
+ SVN_ERR(svn_client_create_context(&ctx, pool));
+
+ /* Configure externals. */
+ i = 0;
+ while (test_data[i].subtree_relpath)
+ {
+ const char *subtree_relpath;
+ const char *url;
+ const svn_string_t *propval;
+
+ subtree_relpath = test_data[i].subtree_relpath;
+ propval = svn_string_create(test_data[i].src_external_desc, pool);
+
+ url = apr_pstrcat(pool, repos_url, "/A/", subtree_relpath, SVN_VA_NULL);
+ SVN_ERR(svn_client_propset_remote(SVN_PROP_EXTERNALS, propval,
+ url, TRUE, 1, NULL,
+ NULL, NULL, ctx, pool));
+ i++;
+ }
+
+ /* Set up parameters for pinning externals on A/B. */
+ externals_to_pin = apr_hash_make(pool);
+
+ item.url = "^/A/D/gamma";
+ item.target_dir = "gamma-ext";
+
+ external_items = apr_array_make(pool, 2, sizeof(svn_wc_external_item2_t *));
+ APR_ARRAY_PUSH(external_items, svn_wc_external_item2_t *) = &item;
+ B_url = apr_pstrcat(pool, repos_url, "/A/B", SVN_VA_NULL);
+ svn_hash_sets(externals_to_pin, B_url, external_items);
+
+ /* Copy ^/A to ^/A_copy, pinning externals on ^/A/B. */
+ copy_source.path = apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL);
+ copy_source.revision = &rev;
+ copy_source.peg_revision = &peg_rev;
+ copy_sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *));
+ APR_ARRAY_PUSH(copy_sources, svn_client_copy_source_t *) = &copy_source;
+ A_copy_url = apr_pstrcat(pool, repos_url, "/A_copy", SVN_VA_NULL);
+ SVN_ERR(svn_client_copy7(copy_sources, A_copy_url, FALSE, FALSE,
+ FALSE, FALSE, TRUE, externals_to_pin,
+ NULL, NULL, NULL, ctx, pool));
+
+ /* Verify that externals were pinned as expected. */
+ i = 0;
+ while (test_data[i].subtree_relpath)
+ {
+ const char *subtree_relpath;
+ const char *url;
+ const svn_string_t *propval;
+ svn_stringbuf_t *externals_desc;
+ const char *expected_desc;
+
+ subtree_relpath = test_data[i].subtree_relpath;
+ url = apr_pstrcat(pool, A_copy_url, "/", subtree_relpath, SVN_VA_NULL);
+
+ SVN_ERR(svn_client_propget5(&props, NULL, SVN_PROP_EXTERNALS,
+ url, &peg_rev, &rev, NULL,
+ svn_depth_empty, NULL, ctx, pool, pool));
+ propval = svn_hash_gets(props, url);
+ SVN_TEST_ASSERT(propval);
+ externals_desc = svn_stringbuf_create(propval->data, pool);
+ svn_stringbuf_strip_whitespace(externals_desc);
+ expected_desc = test_data[i].expected_dst_external_desc;
+ SVN_TEST_STRING_ASSERT(externals_desc->data, expected_desc);
+
+ i++;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* ========================================================================== */
+
+
+static int max_threads = 3;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_elide_mergeinfo_catalog,
+ "test svn_client__elide_mergeinfo_catalog"),
+ SVN_TEST_PASS2(test_args_to_target_array,
+ "test svn_client_args_to_target_array"),
+ SVN_TEST_OPTS_PASS(test_wc_add_scenarios, "test svn_wc_add3 scenarios"),
+ SVN_TEST_OPTS_PASS(test_foreign_repos_copy, "test foreign repository copy"),
+ SVN_TEST_OPTS_PASS(test_patch, "test svn_client_patch"),
+ SVN_TEST_OPTS_PASS(test_copy_crash, "test a crash in svn_client_copy5"),
+#ifdef TEST16K_ADD
+ SVN_TEST_OPTS_PASS(test_16k_add, "test adding 16k files"),
+#endif
+ SVN_TEST_OPTS_PASS(test_youngest_common_ancestor, "test youngest_common_ancestor"),
+ SVN_TEST_OPTS_PASS(test_suggest_mergesources,
+ "test svn_client_suggest_merge_sources"),
+ SVN_TEST_OPTS_PASS(test_remote_only_status,
+ "test svn_client_status6 with ignore_local_mods"),
+ SVN_TEST_OPTS_PASS(test_copy_pin_externals,
+ "test svn_client_copy7 with externals_to_pin"),
+ SVN_TEST_OPTS_PASS(test_copy_pin_externals_select_subtree,
+ "pin externals on selected subtrees only"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_client/conflicts-test.c b/subversion/tests/libsvn_client/conflicts-test.c
new file mode 100644
index 0000000..0bcb464
--- /dev/null
+++ b/subversion/tests/libsvn_client/conflicts-test.c
@@ -0,0 +1,5275 @@
+/*
+ * Regression tests for the conflict resolver in the libsvn_client library.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#define SVN_DEPRECATED
+
+#include "svn_client.h"
+#include "svn_dirent_uri.h"
+#include "svn_hash.h"
+#include "svn_props.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+#include "../libsvn_wc/utils.h"
+
+struct status_baton
+{
+ svn_client_status_t *status;
+ apr_pool_t *result_pool;
+};
+
+/* Implements svn_client_status_func_t */
+static svn_error_t *
+status_func(void *baton, const char *path,
+ const svn_client_status_t *status,
+ apr_pool_t *scratch_pool)
+{
+ struct status_baton *sb = baton;
+
+ sb->status = svn_client_status_dup(status, sb->result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+struct info_baton
+{
+ svn_client_info2_t *info;
+ apr_pool_t *result_pool;
+};
+
+/* Implements svn_client_info_receiver2_t */
+static svn_error_t *
+info_func(void *baton, const char *abspath_or_url,
+ const svn_client_info2_t *info,
+ apr_pool_t *scratch_pool)
+{
+ struct info_baton *ib = baton;
+
+ ib->info = svn_client_info2_dup(info, ib->result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which checks offered conflict resolution options. */
+static svn_error_t *
+assert_conflict_options(const apr_array_header_t *actual,
+ const svn_client_conflict_option_id_t *expected,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *actual_str = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *expected_str = svn_stringbuf_create_empty(pool);
+ int i;
+
+ for (i = 0; i < actual->nelts; i++)
+ {
+ svn_client_conflict_option_t *opt;
+ svn_client_conflict_option_id_t id;
+
+ opt = APR_ARRAY_IDX(actual, i, svn_client_conflict_option_t *);
+
+ if (i > 0)
+ svn_stringbuf_appendcstr(actual_str, ", ");
+
+ id = svn_client_conflict_option_get_id(opt);
+ svn_stringbuf_appendcstr(actual_str, apr_itoa(pool, id));
+ }
+
+ for (i = 0; expected[i] >= 0; i++)
+ {
+ if (i > 0)
+ svn_stringbuf_appendcstr(expected_str, ", ");
+
+ svn_stringbuf_appendcstr(expected_str, apr_itoa(pool, expected[i]));
+ }
+
+ SVN_TEST_STRING_ASSERT(actual_str->data, expected_str->data);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+assert_tree_conflict_options(svn_client_conflict_t *conflict,
+ svn_client_ctx_t *ctx,
+ const svn_client_conflict_option_id_t *expected,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *actual;
+
+ SVN_ERR(svn_client_conflict_tree_get_resolution_options(&actual, conflict,
+ ctx, pool, pool));
+ SVN_ERR(assert_conflict_options(actual, expected, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+assert_prop_conflict_options(svn_client_conflict_t *conflict,
+ svn_client_ctx_t *ctx,
+ const svn_client_conflict_option_id_t *expected,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *actual;
+
+ SVN_ERR(svn_client_conflict_prop_get_resolution_options(&actual, conflict,
+ ctx, pool, pool));
+ SVN_ERR(assert_conflict_options(actual, expected, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+assert_text_conflict_options(svn_client_conflict_t *conflict,
+ svn_client_ctx_t *ctx,
+ const svn_client_conflict_option_id_t *expected,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *actual;
+
+ SVN_ERR(svn_client_conflict_text_get_resolution_options(&actual, conflict,
+ ctx, pool, pool));
+ SVN_ERR(assert_conflict_options(actual, expected, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ * The following tests verify resolution of "incoming file add vs.
+ * local file obstruction upon merge" tree conflicts.
+ */
+
+/* Some paths we'll care about. */
+static const char *trunk_path = "A";
+static const char *branch_path = "A_branch";
+static const char *branch2_path = "A_branch2";
+static const char *new_file_name = "newfile.txt";
+static const char *new_file_name_branch = "newfile-on-branch.txt";
+static const char *deleted_file_name = "mu";
+static const char *deleted_dir_name = "B";
+static const char *deleted_dir_child = "lambda";
+static const char *new_dir_name = "newdir";
+
+/* File property content. */
+static const char *propval_trunk = "This is a property on the trunk.";
+static const char *propval_branch = "This is a property on the branch.";
+static const char *propval_different = "This is a different property value.";
+
+/* File content. */
+static const char *modified_file_content =
+ "This is a modified file\n";
+static const char *modified_file_on_branch_content =
+ "This is a modified file on the branch\n";
+static const char *added_file_on_branch_content =
+ "This is a file added on the branch\n";
+static const char *modified_file_in_working_copy_content =
+ "This is a modified file in the working copy\n";
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_file_add_vs_file_add_merge_conflict(svn_test__sandbox_t *b,
+ svn_boolean_t do_switch)
+{
+ static const char *new_file_path;
+ svn_client_ctx_t *ctx;
+ static const char *trunk_url;
+ svn_opt_revision_t opt_rev;
+ svn_client_status_t *status;
+ struct status_baton sb;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Add new files on trunk and the branch which occupy the same path
+ * but have different content and properties. */
+ new_file_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_trunk, new_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ new_file_path = svn_relpath_join(branch_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ /* NB: Ensure that the file content's length
+ * differs between the two branches! Tests are
+ * run with sleep for timestamps disabled. */
+ "This is a new file on the branch\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_branch, new_file_path));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path, SVN_VA_NULL);
+
+ if (do_switch)
+ {
+ svn_revnum_t result_rev;
+
+ /* This should raise an "incoming add vs local add" conflict. */
+ SVN_ERR(svn_client_switch3(&result_rev, sbox_wc_path(b, branch_path),
+ trunk_url, &opt_rev, &opt_rev,
+ svn_depth_infinity, TRUE, TRUE, FALSE, FALSE,
+ ctx, b->pool));
+
+ opt_rev.kind = svn_opt_revision_head;
+ }
+ else
+ {
+ SVN_ERR(sbox_wc_commit(b, ""));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Run a merge from the trunk to the branch.
+ * This should raise an "incoming add vs local obstruction" conflict. */
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ opt_rev.kind = svn_opt_revision_working;
+ }
+
+ /* Ensure that the file has the expected status. */
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ if (do_switch)
+ {
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ }
+ else
+ {
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_normal);
+ }
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ svn_client_conflict_option_incoming_added_file_replace_and_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ svn_client_conflict_option_incoming_added_file_replace_and_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ /* Ensure that the expected tree conflict is present. */
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ if (do_switch)
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_added);
+ else
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_obstructed);
+ SVN_TEST_ASSERT(svn_client_conflict_get_incoming_change(conflict) ==
+ svn_wc_conflict_action_add);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_added_file_text_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_file_text_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_file_add_vs_file_add_merge_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_file_path = svn_relpath_join(branch_path, new_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ ctx, b->pool));
+
+ /* Ensure that the file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ /* ### Shouldn't there be a property conflict? The trunk wins. */
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+
+ /* We should have a text conflict instead of a tree conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the merged property value. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_trunk);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_added_file_replace_and_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_added_file_replace_and_merge", opts, pool));
+
+ SVN_ERR(create_wc_with_file_add_vs_file_add_merge_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_file_path = svn_relpath_join(branch_path, new_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(
+ svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_file_replace_and_merge,
+ ctx, b->pool));
+
+ /* Ensure that the file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ /* ### Shouldn't there be a property conflict? The trunk wins. */
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+
+ /* We should have a text conflict instead of a tree conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the merged property value. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_trunk);
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ * The following tests verify resolution of "incoming dir add vs.
+ * local dir obstruction upon merge" tree conflicts.
+ */
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_dir_add_vs_dir_add_merge_conflict(
+ svn_test__sandbox_t *b,
+ svn_boolean_t file_change_on_trunk,
+ svn_boolean_t with_move,
+ svn_boolean_t file_change_on_branch)
+{
+ static const char *new_dir_path;
+ static const char *new_file_path;
+ svn_client_ctx_t *ctx;
+ static const char *trunk_url;
+ svn_opt_revision_t opt_rev;
+ svn_client_status_t *status;
+ struct status_baton sb;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ const char *move_src_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Add new directories on trunk and the branch which occupy the same path
+ * but have different content and properties. */
+ if (with_move)
+ {
+ /* History starts at ^/newdir.orig, outside of ^/A (the "trunk").
+ * Then a move to ^/A/newdir causes a collision. */
+ move_src_path = apr_pstrcat(b->pool, new_dir_name, ".orig", SVN_VA_NULL);
+ new_dir_path = move_src_path;
+ }
+ else
+ {
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ move_src_path = NULL;
+ }
+
+ SVN_ERR(sbox_wc_mkdir(b, new_dir_path));
+ new_file_path = svn_relpath_join(new_dir_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_trunk, new_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ if (file_change_on_trunk)
+ {
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a change to the new file"
+ "on the trunk\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ if (with_move)
+ {
+ /* Now move the new directory to the colliding path. */
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ sbox_wc_move(b, move_src_path, new_dir_path);
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_mkdir(b, new_dir_path));
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ /* NB: Ensure that the file content's length
+ * differs between the two branches! Tests are
+ * run with sleep for timestamps disabled. */
+ "This is a new file on the branch\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_branch, new_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ if (file_change_on_branch)
+ {
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a change to the new file "
+ "on the branch\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+
+ /* Run a merge from the trunk to the branch. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path, SVN_VA_NULL);
+
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ /* This should raise an "incoming add vs local obstruction" tree conflict. */
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_add_ignore,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ svn_client_conflict_option_incoming_added_dir_replace,
+ svn_client_conflict_option_incoming_added_dir_replace_and_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_add_ignore,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ svn_client_conflict_option_incoming_added_dir_replace,
+ svn_client_conflict_option_incoming_added_dir_replace_and_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ /* Ensure that the expected tree conflict is present. */
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_obstructed);
+ SVN_TEST_ASSERT(svn_client_conflict_get_incoming_change(conflict) ==
+ svn_wc_conflict_action_add);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_added_dir_ignore(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_dir_ignore",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, FALSE, FALSE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_add_ignore, ctx,
+ b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_added_dir_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_dir_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, FALSE, FALSE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_merge, ctx,
+ b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+
+ /* Ensure that the file has the expected status. */
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* The file should now have a text conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the file's merged property value. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_trunk);
+
+ return SVN_NO_ERROR;
+}
+
+/* Same test as above, but with an additional file change on the trunk. */
+static svn_error_t *
+test_merge_incoming_added_dir_merge2(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_dir_merge2",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, TRUE, FALSE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+
+ /* Ensure that the file has the expected status. */
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* The file should now have a text conflict. */
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the file's merged property value. */
+ /* ### Shouldn't there be a property conflict? The trunk wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_trunk);
+
+ return SVN_NO_ERROR;
+}
+
+/* Same test as above, but with an additional move operation on the trunk. */
+static svn_error_t *
+test_merge_incoming_added_dir_merge3(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_dir_merge3",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, TRUE, TRUE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* There should now be an 'add vs add' conflict on the new file. */
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+
+ /* Ensure that the file has the expected status. */
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* We should now have a text conflict in the file. */
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the file's merged property value. */
+ /* ### Shouldn't there be a property conflict? The trunk wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_trunk);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_added_dir_replace(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_added_dir_replace",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, FALSE, FALSE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_replace,
+ ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* This test currently fails to meet expectations. Our merge code doesn't
+ * support a merge of files which were added in the same revision as their
+ * parent directory and were not modified since. */
+static svn_error_t *
+test_merge_incoming_added_dir_replace_and_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b,
+ "merge_incoming_added_dir_replace_and_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, FALSE, FALSE,
+ FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_replace_and_merge,
+ ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* We should have a text conflict in the file. */
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* Same test as above, but with an additional file change on the branch
+ * which makes resolution work as expected. */
+static svn_error_t *
+test_merge_incoming_added_dir_replace_and_merge2(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_added_dir_replace_and_merge2", opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_merge_conflict(b, FALSE, FALSE,
+ TRUE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_dir_replace_and_merge,
+ ctx, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* We should have a text conflict in the file. */
+ new_file_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_incoming_delete_file_merge_conflict(svn_test__sandbox_t *b,
+ svn_boolean_t move,
+ svn_boolean_t do_switch)
+{
+ svn_client_ctx_t *ctx;
+ static const char *trunk_url;
+ svn_opt_revision_t opt_rev;
+ const char *deleted_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ if (move)
+ {
+ const char *move_target_path;
+
+ /* Move a file on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ move_target_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_path, move_target_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ else
+ {
+ /* Delete a file on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(sbox_wc_delete(b, deleted_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+
+ /* Modify a file on the branch. */
+ deleted_path = svn_relpath_join(branch_path, deleted_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, deleted_path, modified_file_on_branch_content));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path,
+ SVN_VA_NULL);
+ if (do_switch)
+ {
+ /* Switch the branch working copy to trunk. */
+ svn_revnum_t result_rev;
+
+ /* This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(svn_client_switch3(&result_rev, sbox_wc_path(b, branch_path),
+ trunk_url, &opt_rev, &opt_rev,
+ svn_depth_infinity,
+ TRUE, FALSE, FALSE, FALSE, ctx, b->pool));
+ }
+ else
+ {
+ /* Commit modifcation and run a merge from the trunk to the branch. */
+ SVN_ERR(sbox_wc_commit(b, ""));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ /* This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming delete ignore' option. */
+static svn_error_t *
+test_merge_incoming_delete_file_ignore(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_delete_file_ignore",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_merge_conflict(b, FALSE, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(branch_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_delete_ignore,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming delete accept' option. */
+static svn_error_t *
+test_merge_incoming_delete_file_accept(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_delete_file_accept",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_merge_conflict(b, FALSE, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(branch_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_delete_accept,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming move file text merge' option for merge. */
+static svn_error_t *
+test_merge_incoming_move_file_text_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *new_file_path;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_stringbuf_t *buf;
+ svn_node_kind_t kind;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_move_file_text_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_merge_conflict(b, TRUE, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(branch_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ new_file_path = svn_relpath_join(branch_path, new_file_name, b->pool);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, new_file_path));
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Ensure that the moved file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, deleted_path));
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the original file was removed. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, deleted_path), &kind, b->pool));
+ SVN_TEST_ASSERT(kind == svn_node_none);
+
+ /* Ensure that the moved file has the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, new_file_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_on_branch_content);
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_incoming_delete_file_update_conflict(svn_test__sandbox_t *b,
+ svn_boolean_t move)
+{
+ const char *deleted_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ if (move)
+ {
+ const char *move_target_path;
+
+ /* Move a file on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ move_target_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_path, move_target_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ else
+ {
+ /* Delete a file on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(sbox_wc_delete(b, deleted_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+
+ /* Update into the past. */
+ SVN_ERR(sbox_wc_update(b, "", 1));
+
+ /* Modify a file in the working copy. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, deleted_path, modified_file_on_branch_content));
+
+ /* Update to HEAD.
+ * This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming delete ignore' option. */
+static svn_error_t *
+test_update_incoming_delete_file_ignore(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_delete_file_ignore",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_update_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_delete_ignore,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming delete accept' option. */
+static svn_error_t *
+test_update_incoming_delete_file_accept(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ svn_client_conflict_t *conflict;
+ svn_node_kind_t node_kind;
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_delete_file_accept",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_update_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_delete_accept,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file is gone. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, deleted_path), &node_kind,
+ b->pool));
+ SVN_TEST_ASSERT(node_kind == svn_node_none);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming move file text merge' option for update. */
+static svn_error_t *
+test_update_incoming_move_file_text_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *new_file_path;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_node_kind_t node_kind;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_move_file_text_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_update_conflict(b, TRUE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file is gone. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, deleted_path), &node_kind,
+ b->pool));
+ SVN_TEST_ASSERT(node_kind == svn_node_none);
+
+ /* Ensure that the moved file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the moved file has the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, new_file_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_on_branch_content);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming move file text merge' option for switch. */
+static svn_error_t *
+test_switch_incoming_move_file_text_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *new_file_path;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ svn_node_kind_t node_kind;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(b, "switch_incoming_move_file_text_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_file_merge_conflict(b, TRUE, TRUE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ deleted_path = svn_relpath_join(branch_path, deleted_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, b->pool));
+
+ /* Ensure that the deleted file is gone. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, deleted_path), &node_kind,
+ b->pool));
+ SVN_TEST_ASSERT(node_kind == svn_node_none);
+
+ /* Ensure that the moved file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_path = svn_relpath_join(branch_path, new_file_name, b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the moved file has the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, new_file_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_on_branch_content);
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_incoming_delete_dir_conflict(svn_test__sandbox_t *b,
+ svn_boolean_t move,
+ svn_boolean_t do_switch,
+ svn_boolean_t local_edit,
+ svn_boolean_t local_add)
+{
+ svn_client_ctx_t *ctx;
+ static const char *trunk_url;
+ svn_opt_revision_t opt_rev;
+ const char *deleted_path;
+ const char *deleted_child_path;
+ const char *new_file_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* On the trunk, add a file inside the dir about to be moved/deleted. */
+ new_file_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(deleted_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ if (move)
+ {
+ const char *move_target_path;
+
+ /* Move a directory on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_dir_name, b->pool);
+ move_target_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_path, move_target_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ else
+ {
+ /* Delete a directory on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_dir_name, b->pool);
+ SVN_ERR(sbox_wc_delete(b, deleted_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+
+ if (local_add)
+ {
+ const char *new_child_path;
+
+ new_child_path = svn_relpath_join(branch_path,
+ svn_relpath_join(deleted_dir_name,
+ new_file_name_branch,
+ b->pool),
+ b->pool);
+ /* Add new file on the branch. */
+ SVN_ERR(sbox_file_write(b, new_child_path, added_file_on_branch_content));
+ SVN_ERR(sbox_wc_add(b, new_child_path));
+ }
+ else
+ {
+ /* Modify a file on the branch. */
+ deleted_child_path = svn_relpath_join(branch_path,
+ svn_relpath_join(deleted_dir_name,
+ deleted_dir_child,
+ b->pool),
+ b->pool);
+ SVN_ERR(sbox_file_write(b, deleted_child_path,
+ modified_file_on_branch_content));
+ }
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path,
+ SVN_VA_NULL);
+ if (do_switch)
+ {
+ /* Switch the branch working copy to trunk. */
+ svn_revnum_t result_rev;
+
+ /* This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(svn_client_switch3(&result_rev, sbox_wc_path(b, branch_path),
+ trunk_url, &opt_rev, &opt_rev,
+ svn_depth_infinity,
+ TRUE, FALSE, FALSE, FALSE, ctx, b->pool));
+ }
+ else
+ {
+ /* Commit modification and run a merge from the trunk to the branch. */
+ SVN_ERR(sbox_wc_commit(b, ""));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ if (local_edit)
+ {
+ /* Modify the file in the working copy. */
+ SVN_ERR(sbox_file_write(b, deleted_child_path,
+ modified_file_in_working_copy_content));
+ }
+
+ /* This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming move dir merge' resolution option. */
+static svn_error_t *
+test_merge_incoming_move_dir(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *moved_to_path;
+ const char *child_path;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_stringbuf_t *buf;
+ svn_opt_revision_t opt_rev;
+ apr_array_header_t *options;
+ svn_client_conflict_option_t *option;
+ apr_array_header_t *possible_moved_to_abspaths;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_move_dir", opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_dir_conflict(b, TRUE, FALSE, FALSE,
+ FALSE));
+
+ deleted_path = svn_relpath_join(branch_path, deleted_dir_name, b->pool);
+ moved_to_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ /* Check possible move destinations for the directory. */
+ SVN_ERR(svn_client_conflict_tree_get_resolution_options(&options, conflict,
+ ctx, b->pool,
+ b->pool));
+ option = svn_client_conflict_option_find_by_id(
+ options, svn_client_conflict_option_incoming_move_dir_merge);
+ SVN_TEST_ASSERT(option != NULL);
+
+ SVN_ERR(svn_client_conflict_option_get_moved_to_abspath_candidates(
+ &possible_moved_to_abspaths, option, b->pool, b->pool));
+
+ /* The resolver finds two possible destinations for the moved folder:
+ *
+ * Possible working copy destinations for moved-away 'A_branch/B' are:
+ * (1): 'A_branch/newdir'
+ * (2): 'A/newdir'
+ * Only one destination can be a move; the others are copies.
+ */
+ SVN_TEST_INT_ASSERT(possible_moved_to_abspaths->nelts, 2);
+ SVN_TEST_STRING_ASSERT(
+ APR_ARRAY_IDX(possible_moved_to_abspaths, 0, const char *),
+ sbox_wc_path(b, moved_to_path));
+ SVN_TEST_STRING_ASSERT(
+ APR_ARRAY_IDX(possible_moved_to_abspaths, 1, const char *),
+ sbox_wc_path(b, svn_relpath_join(trunk_path, new_dir_name, b->pool)));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_client_conflict_option_set_moved_to_abspath(option, 0,
+ ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve(conflict, option, ctx, b->pool));
+
+ /* Ensure that the moved-away directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, moved_to_path));
+
+ /* Ensure that the moved-here directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, moved_to_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, deleted_path));
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the edited file has the expected content. */
+ child_path = svn_relpath_join(moved_to_path, deleted_dir_child,
+ b->pool);
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, child_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_on_branch_content);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test 'incoming move dir merge' resolution option with local mods. */
+static svn_error_t *
+test_merge_incoming_move_dir2(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *moved_to_path;
+ const char *child_path;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_stringbuf_t *buf;
+ svn_opt_revision_t opt_rev;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_move_dir2", opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_dir_conflict(b, TRUE, FALSE, TRUE,
+ FALSE));
+
+ deleted_path = svn_relpath_join(branch_path, deleted_dir_name, b->pool);
+ moved_to_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_dir_merge,
+ ctx, b->pool));
+
+ /* Ensure that the moved-away directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, moved_to_path));
+
+ /* Ensure that the moved-here directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, moved_to_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, deleted_path));
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the edited file has the expected content. */
+ child_path = svn_relpath_join(moved_to_path, deleted_dir_child,
+ b->pool);
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, child_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_in_working_copy_content);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_dir3(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *moved_to_path;
+ const char *child_path;
+ const char *child_url;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct info_baton ib;
+ struct svn_client_status_t *status;
+ svn_stringbuf_t *buf;
+ svn_opt_revision_t opt_rev;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_move_dir3", opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_dir_conflict(b, TRUE, FALSE, FALSE,
+ TRUE));
+
+ deleted_path = svn_relpath_join(branch_path, deleted_dir_name, b->pool);
+ moved_to_path = svn_relpath_join(branch_path, new_dir_name, b->pool);
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_dir_merge,
+ ctx, b->pool));
+
+ /* Ensure that the moved-away directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, moved_to_path));
+
+ /* Ensure that the moved-here directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, moved_to_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, deleted_path));
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the file added on the branch has the expected content. */
+ child_path = svn_relpath_join(branch_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name_branch,
+ b->pool),
+ b->pool);
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, child_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, added_file_on_branch_content);
+
+ /* Ensure that the file added on the branch has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, child_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the file added on the trunk has the expected content. */
+ child_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name,
+ b->pool),
+ b->pool);
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, child_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "This is a new file on the trunk\n");
+
+ /* Ensure that the file added on the trunk has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, child_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Commit and make sure both files are present in the resulting revision. */
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ ib.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_head;
+
+ /* The file added on the branch should be present. */
+ child_url = apr_pstrcat(b->pool, b->repos_url, "/", branch_path, "/",
+ new_dir_name, "/", new_file_name_branch, SVN_VA_NULL);
+ SVN_ERR(svn_client_info4(child_url, &opt_rev, &opt_rev, svn_depth_empty,
+ TRUE, TRUE, TRUE, NULL,
+ info_func, &ib, ctx, b->pool));
+
+ /* The file added on the trunk should be present. */
+ child_url = apr_pstrcat(b->pool, b->repos_url, "/", branch_path, "/",
+ new_dir_name, "/", new_file_name, SVN_VA_NULL);
+ SVN_ERR(svn_client_info4(child_url, &opt_rev, &opt_rev, svn_depth_empty,
+ TRUE, TRUE, TRUE, NULL,
+ info_func, &ib, ctx, b->pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_incoming_delete_vs_local_delete(svn_test__sandbox_t *b)
+{
+ svn_client_ctx_t *ctx;
+ static const char *trunk_url;
+ svn_opt_revision_t opt_rev;
+ const char *copy_src_path;
+ const char *copy_dst_name;
+ const char *copy_dst_path;
+ const char *deleted_file_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* On the trunk, copy "mu" to "mu-copied". */
+ copy_src_path = svn_relpath_join(trunk_path, deleted_file_name, b->pool);
+ copy_dst_name = apr_pstrcat(b->pool, deleted_file_name, "-copied",
+ SVN_VA_NULL);
+ copy_dst_path = svn_relpath_join(trunk_path, copy_dst_name, b->pool);
+ SVN_ERR(sbox_wc_copy(b, copy_src_path, copy_dst_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Merge the file copy to the branch. */
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path, SVN_VA_NULL);
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Now delete the copied file on the trunk. */
+ deleted_file_path = svn_relpath_join(trunk_path, copy_dst_name, b->pool);
+ SVN_ERR(sbox_wc_delete(b, deleted_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Delete the corresponding file on the branch. */
+ deleted_file_path = svn_relpath_join(branch_path, copy_dst_name,
+ b->pool);
+ SVN_ERR(sbox_wc_delete(b, deleted_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Run a merge from the trunk to the branch.
+ * This should raise an "incoming delete vs local delete" tree conflict. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test for the 'incoming delete vs local delete' bug fixed by r1751893. */
+static svn_error_t *
+test_merge_incoming_delete_vs_local_delete(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *copy_dst_name;
+ const char *copy_dst_path;
+ svn_client_conflict_t *conflict;
+ svn_node_kind_t node_kind;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_incoming_delete_vs_local_delete",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_vs_local_delete(b));
+
+ copy_dst_name = apr_pstrcat(b->pool, deleted_file_name, "-copied",
+ SVN_VA_NULL);
+ copy_dst_path = svn_relpath_join(branch_path, copy_dst_name, b->pool);
+
+ /* Resolve the tree conflict. Before r1751893 there was an unintended error.*/
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, copy_dst_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_delete_accept,
+ ctx, b->pool));
+
+ /* The file should be gone. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, copy_dst_path), &node_kind,
+ b->pool));
+ SVN_TEST_ASSERT(node_kind == svn_node_none);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_file_prop(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ apr_array_header_t *resolution_options;
+ svn_client_conflict_option_t *option;
+ const svn_string_t *propval;
+
+ SVN_ERR(svn_test__sandbox_create(b, "merge_file_prop", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Commit conflicting file properties. */
+ SVN_ERR(sbox_wc_propset(b, "prop", "val1", "A/mu"));
+ SVN_ERR(sbox_wc_propset(b, "prop", "val2", "A1/mu"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "A" to "A1". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* The file "mu" should have a property conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 1);
+ SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(props_conflicted, 0, const char *),
+ "prop");
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_base_text,
+ svn_client_conflict_option_incoming_text,
+ svn_client_conflict_option_working_text,
+ svn_client_conflict_option_incoming_text_where_conflicted,
+ svn_client_conflict_option_working_text_where_conflicted,
+ svn_client_conflict_option_merged_text,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_prop_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_prop_get_resolution_options(&resolution_options,
+ conflict, ctx,
+ pool, pool));
+ option = svn_client_conflict_option_find_by_id(
+ resolution_options,
+ svn_client_conflict_option_merged_text);
+ svn_client_conflict_option_set_merged_propval(
+ option, svn_string_create("merged-val", pool));
+
+ /* Resolve the conflict with a merged property value. */
+ SVN_ERR(svn_client_conflict_prop_resolve(conflict, "prop", option,
+ ctx, pool));
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* And it should have the expected property value. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx, sbox_wc_path(b, "A1/mu"),
+ "prop", pool, pool));
+ SVN_TEST_STRING_ASSERT(propval->data, "merged-val");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_file_text_merge_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ const char *base_abspath;
+ const char *working_abspath;
+ const char *incoming_old_abspath;
+ const char *incoming_new_abspath;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_file_text_merge_conflict", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Write initial file content. */
+ SVN_ERR(sbox_file_write(b, "A/mu", "Initial content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file and edit it. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_file_write(b, "A/mu-moved", "New trunk content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "New branch content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "A" to "A1". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* We should now have a text conflict in the file "mu-moved". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* Check available text conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_base_text,
+ svn_client_conflict_option_incoming_text,
+ svn_client_conflict_option_working_text,
+ svn_client_conflict_option_incoming_text_where_conflicted,
+ svn_client_conflict_option_working_text_where_conflicted,
+ svn_client_conflict_option_merged_text,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_text_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Check versions of the text-conflicted file. */
+ SVN_ERR(svn_client_conflict_text_get_contents(&base_abspath,
+ &working_abspath,
+ &incoming_old_abspath,
+ &incoming_new_abspath,
+ conflict, pool, pool));
+
+ SVN_TEST_ASSERT(base_abspath == NULL);
+
+ SVN_ERR(svn_stringbuf_from_file2(&buf, incoming_old_abspath, pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "Initial content.\n");
+
+ SVN_ERR(svn_stringbuf_from_file2(&buf, working_abspath, pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "New branch content.\n");
+
+ SVN_ERR(svn_stringbuf_from_file2(&buf, incoming_new_abspath, pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "New trunk content.\n");
+
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data,
+ "<<<<<<< .working\n"
+ "New branch content.\n"
+ "||||||| .old\n"
+ "Initial content.\n"
+ "=======\n"
+ "New trunk content.\n"
+ ">>>>>>> .new\n");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_edit_file_moved_away(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_edit_file_moved_away", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A/mu", "New trunk content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", move the file. */
+ SVN_ERR(sbox_wc_move(b, "A1/mu", "A1/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "trunk" to "branch". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_local_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by applying the incoming edit to the local
+ * move destination "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_local_move_file_text_merge,
+ ctx, pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* And it should have the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "New trunk content.\n");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_chained_move_local_edit(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_chained_move_local_edit", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file again. */
+ SVN_ERR(sbox_wc_move(b, "A/mu-moved", "A/mu-moved-again"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "New branch content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "trunk" to "branch". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ /* This used to fail around r1764234. The conflict resolver was
+ * unable to detect the move, and didn't offer the
+ * svn_client_conflict_option_incoming_move_file_text_merge option. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved-again". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* The move destination should have the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved-again"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "New branch content.\n");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_dir_with_moved_file(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_dir_with_moved_file", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move a file and then move the dir containing the file. */
+ SVN_ERR(sbox_wc_move(b, "A/B/lambda", "A/B/lambda-moved"));
+ SVN_ERR(sbox_wc_move(b, "A/B", "A/B-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/B/lambda", "New branch content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "trunk" to "branch". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict on the dir. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/B"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_dir_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving the local directory and merging. */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_dir_merge,
+ ctx, pool));
+
+ /* The dir should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/B"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* Ensure that the move source dir has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, "A1/B"),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, pool));
+ status = sb.status;
+ SVN_TEST_INT_ASSERT(status->kind, svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_INT_ASSERT(status->node_status, svn_wc_status_deleted);
+ SVN_TEST_INT_ASSERT(status->text_status, svn_wc_status_normal);
+ SVN_TEST_INT_ASSERT(status->prop_status, svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, "A1/B-moved"));
+
+ /* Ensure that the move destination dir has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, "A1/B-moved"),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, pool));
+ status = sb.status;
+ SVN_TEST_INT_ASSERT(status->kind, svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_INT_ASSERT(status->node_status, svn_wc_status_added);
+ SVN_TEST_INT_ASSERT(status->text_status, svn_wc_status_normal);
+ SVN_TEST_INT_ASSERT(status->prop_status, svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, "A1/B"));
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath, NULL);
+
+ /* We should have another tree conflict on the moved-away file. */
+ SVN_ERR(svn_client_conflict_get(&conflict,
+ sbox_wc_path(b, "A1/B-moved/lambda"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* ### Need to test resolving the conflict on "A1/B-moved/lambda". */
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_file_move_new_line_of_history(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_file_move_new_line_of_history", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", change the line of history of the moved file by
+ * replacing it. */
+ SVN_ERR(sbox_wc_delete(b, "A/mu-moved"));
+ SVN_ERR(sbox_file_write(b, "A/mu-moved", "x"));
+ SVN_ERR(sbox_wc_add(b, "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the replaced file. */
+ SVN_ERR(sbox_wc_move(b, "A/mu-moved", "A/mu-moved-again"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "New branch content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "trunk" to "branch". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ /* The svn_client_conflict_option_incoming_move_file_text_merge option
+ * should not be available, as the "mu" file was actually deleted at
+ * some point (and the remaining move is a part of the new line of
+ * history). */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+run_test_update_incoming_dir_move_with_nested_file_move(
+ const svn_test_opts_t *opts,
+ svn_boolean_t move_parent,
+ svn_boolean_t move_back,
+ svn_boolean_t move_parent_twice,
+ const char *sandbox_name,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ const char *deleted_dir;
+ const char *moved_dir;
+ const char *deleted_file;
+ const char *moved_file;
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+ svn_node_kind_t kind;
+ svn_opt_revision_t opt_rev;
+ svn_client_status_t *status;
+ struct status_baton sb;
+
+ SVN_ERR(svn_test__sandbox_create(b, sandbox_name, opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Move a directory on the trunk into another directory. */
+ deleted_dir = svn_relpath_join(trunk_path, "B", b->pool);
+ moved_dir = svn_relpath_join(trunk_path, "C/B", b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_dir, moved_dir));
+
+ /* Rename a file inside the moved directory. */
+ deleted_file = svn_relpath_join(moved_dir, "lambda" , b->pool);
+ moved_file = svn_relpath_join(moved_dir, "lambda-moved", b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_file, moved_file));
+
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ if (move_parent)
+ {
+ /* Move the directory again. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ deleted_dir = svn_relpath_join(trunk_path, "C/B", b->pool);
+ moved_dir = svn_relpath_join(trunk_path, "D/H/B", b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_dir, moved_dir));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ if (move_back)
+ {
+ /* And back again. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ deleted_dir = svn_relpath_join(trunk_path, "D/H/B", b->pool);
+ moved_dir = svn_relpath_join(trunk_path, "C/B", b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_dir, moved_dir));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ }
+ else if (move_parent_twice)
+ {
+ /* Move the directory again. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ deleted_dir = svn_relpath_join(trunk_path, "D/H", b->pool);
+ moved_dir = svn_relpath_join(trunk_path, "D/G/H", b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_dir, moved_dir));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ moved_dir = svn_relpath_join(trunk_path, "D/G/H/B", b->pool);
+ }
+
+ moved_file = svn_relpath_join(moved_dir, "lambda-moved", b->pool);
+ }
+
+ /* Update into the past. */
+ SVN_ERR(sbox_wc_update(b, "", 1));
+
+ /* Modify a file in the working copy. */
+ deleted_file = svn_relpath_join(trunk_path, "B/lambda", b->pool);
+ SVN_ERR(sbox_file_write(b, deleted_file, modified_file_content));
+
+ /* Update to HEAD.
+ * This should raise an "incoming move vs local edit" tree conflict. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* We should have a tree conflict in the directory "A/B". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A/B"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_dir_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_dir_merge,
+ ctx, pool));
+
+ /* There should now be a tree conflict inside the moved directory,
+ * signaling a missing file. */
+ deleted_file = svn_relpath_join(moved_dir, "lambda" , b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_file),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_edited);
+ SVN_TEST_ASSERT(svn_client_conflict_get_incoming_change(conflict) ==
+ svn_wc_conflict_action_delete);
+
+ /* Make sure the file has the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, deleted_file), pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_content);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* Ensure that the deleted file is gone. */
+ SVN_ERR(svn_io_check_path(sbox_wc_path(b, deleted_file), &kind, b->pool));
+ SVN_TEST_ASSERT(kind == svn_node_none);
+
+ /* Ensure that the moved-target file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, moved_file),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* The file should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, moved_file),
+ ctx, b->pool, b->pool));
+
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Make sure the file has the expected content. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, moved_file), pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_content);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_incoming_dir_move_with_nested_file_move(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return run_test_update_incoming_dir_move_with_nested_file_move(
+ opts, FALSE, FALSE, FALSE,
+ "update_incoming_dir_move_with_nested_file_move", pool);
+}
+
+/* Same test as above, but with a moved parent directory. */
+static svn_error_t *
+test_update_incoming_dir_move_with_parent_move(
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return run_test_update_incoming_dir_move_with_nested_file_move(
+ opts, TRUE, FALSE, FALSE,
+ "update_incoming_dir_move_with_parent_move", pool);
+}
+
+/* Same test as above, but with the parent directory moved back. */
+static svn_error_t *
+test_update_incoming_dir_move_with_parent_moved_back(
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return run_test_update_incoming_dir_move_with_nested_file_move(
+ opts, TRUE, TRUE, FALSE,
+ "update_incoming_dir_move_with_parent_moved_back", pool);
+}
+
+/* Same test as above, but with the parent directory moved twice. */
+static svn_error_t *
+test_update_incoming_dir_move_with_parent_moved_twice(
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return run_test_update_incoming_dir_move_with_nested_file_move(
+ opts, TRUE, FALSE, TRUE,
+ "update_incoming_dir_move_with_parent_moved_twice", pool);
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_file_add_vs_file_add_update_conflict(svn_test__sandbox_t *b)
+{
+ static const char *new_file_path;
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_status_t *status;
+ struct status_baton sb;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Add a new file and commit. */
+ new_file_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", "propval", new_file_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Update into the past. */
+ SVN_ERR(sbox_wc_update(b, "", 1));
+
+ /* Add a different file scheduled for commit. */
+ new_file_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a different new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_different, new_file_path));
+
+ /* Update to HEAD.
+ * This should raise an "incoming add vs local add" tree conflict. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+
+ /* Ensure that the file has the expected status. */
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+
+ /* Ensure that the expected tree conflict is present. */
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_added);
+ SVN_TEST_ASSERT(svn_client_conflict_get_incoming_change(conflict) ==
+ svn_wc_conflict_action_add);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_incoming_added_file_text_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_added_file_text_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_file_add_vs_file_add_update_conflict(b));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ new_file_path = svn_relpath_join(trunk_path, new_file_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ /* Check available tree conflict resolution options.
+ * The list of options remains unchanged after get_details(). */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_added_file_text_merge,
+ ctx, b->pool));
+
+ /* Ensure that the file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_unknown, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_file_path),
+ ctx, b->pool, b->pool));
+
+ /* We should have a text conflict instead of a tree conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Verify the merged property value. ### Should we have a prop conflict? */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_file_prop_merge_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ const svn_string_t *base_propval;
+ const svn_string_t *working_propval;
+ const svn_string_t *incoming_old_propval;
+ const svn_string_t *incoming_new_propval;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_file_prop_merge_conflict", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Add a file property. */
+ SVN_ERR(sbox_wc_propset(b, "prop", "val-initial", "A/mu"));;
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file and edit the property. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_propset(b, "prop", "val-trunk", "A/mu-moved"));;
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the same property. */
+ SVN_ERR(sbox_wc_propset(b, "prop", "val-branch", "A1/mu"));;
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "trunk" to "branch". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* We should now have a property conflict in the file "mu-moved". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 1);
+ SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(props_conflicted, 0, const char *),
+ "prop");
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* Check available property conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_base_text,
+ svn_client_conflict_option_incoming_text,
+ svn_client_conflict_option_working_text,
+ svn_client_conflict_option_incoming_text_where_conflicted,
+ svn_client_conflict_option_working_text_where_conflicted,
+ svn_client_conflict_option_merged_text,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_prop_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Check conflicted property values. */
+ SVN_ERR(svn_client_conflict_prop_get_propvals(&base_propval,
+ &working_propval,
+ &incoming_old_propval,
+ &incoming_new_propval,
+ conflict, "prop", pool));
+ /* ### Is this the proper expectation for base_propval? */
+ SVN_TEST_STRING_ASSERT(base_propval->data, "val-branch");
+ SVN_TEST_STRING_ASSERT(working_propval->data, "val-branch");
+ SVN_TEST_STRING_ASSERT(incoming_old_propval->data, "val-initial");
+ SVN_TEST_STRING_ASSERT(incoming_new_propval->data, "val-trunk");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_file_text_merge_keywords(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_file_text_merge_keywords", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Set svn:keywords on a file. */
+ SVN_ERR(sbox_wc_propset(b, SVN_PROP_KEYWORDS, "Revision", "A/mu"));;
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", begin using keywords in the file and move it. */
+ SVN_ERR(sbox_file_write(b, "A/mu", "$Revision$\n"));
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file and make it equal to what's in trunk. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "$Revision$\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "A" to "A1". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* The file should no longer be in conflict, and should not have a
+ * text conflict, because the contents are identical in "trunk" and
+ * in the "branch". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* And it should have expected contents (with expanded keywords). */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "$Revision: 5 $\n");
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_dir_add_vs_dir_add_update_conflict(
+ svn_test__sandbox_t *b,
+ svn_boolean_t unversioned_obstructions)
+{
+ static const char *new_dir_path;
+ static const char *new_dir_child_path;
+ static const char *new_file_path;
+ static const char *new_file_child_path;
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_status_t *status;
+ struct status_baton sb;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Add new directories on trunk and in the working copy which occupy
+ * the same path but have different content and properties. */
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_mkdir(b, new_dir_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_trunk, new_dir_path));
+ new_file_path = svn_relpath_join(new_dir_path, new_file_name, b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ "This is a new file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_trunk, new_file_path));
+ /* Create a directory and a file which will be obstructed during update. */
+ new_dir_child_path = svn_relpath_join(new_dir_path, "dir_child", b->pool);
+ SVN_ERR(sbox_wc_mkdir(b, new_dir_child_path));
+ new_file_child_path = svn_relpath_join(new_dir_path, "file_child", b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_child_path,
+ "This is a child file on the trunk\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_child_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Update back into the past. */
+ SVN_ERR(sbox_wc_update(b, "", 1));
+
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_mkdir(b, new_dir_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_different, new_dir_path));
+ new_file_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(sbox_file_write(b, new_file_path,
+ /* NB: Ensure that the file content's length
+ * differs! Tests are run with sleep for
+ * timestamps disabled. */
+ "This is a different new file\n"));
+ SVN_ERR(sbox_wc_add(b, new_file_path));
+ SVN_ERR(sbox_wc_propset(b, "prop", propval_different, new_file_path));
+
+ /* Add a file and a directory which obstruct incoming children. */
+ SVN_ERR(sbox_file_write(b, new_dir_child_path,
+ "This is a new file on the trunk\n"));
+ if (!unversioned_obstructions)
+ {
+ SVN_ERR(sbox_wc_mkdir(b, new_file_child_path));
+ SVN_ERR(sbox_wc_add(b, new_dir_child_path));
+ }
+ else
+ SVN_ERR(svn_io_dir_make(sbox_wc_path(b, new_file_child_path),
+ APR_OS_DEFAULT, b->pool));
+
+ /* Update to the HEAD revision.
+ * This should raise an "incoming add vs local add" tree conflict. */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_add_ignore,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_add_ignore,
+ svn_client_conflict_option_incoming_added_dir_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ /* Ensure that the expected tree conflict is present. */
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ SVN_TEST_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_added);
+ SVN_TEST_ASSERT(svn_client_conflict_get_incoming_change(conflict) ==
+ svn_wc_conflict_action_add);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_incoming_added_dir_ignore(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_file_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_added_dir_ignore",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_update_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_add_ignore, ctx,
+ b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* Verify the added dir's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_dir_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Ensure that the newly added file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Verify the added file's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_incoming_added_dir_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_dir_child_path;
+ const char *new_file_path;
+ const char *new_file_child_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_added_dir_merge",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_update_conflict(b, FALSE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_added_dir_merge, ctx,
+ b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Verify the added dir's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_dir_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Ensure that the newly added file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Verify the added file's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+
+ /* Ensure that the obstructing added file child of newdir has the
+ * expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_dir_child_path = svn_relpath_join(new_dir_path, "dir_child", b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_child_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* The file should be a tree conflict victim. */
+ SVN_ERR(svn_client_conflict_get(&conflict,
+ sbox_wc_path(b, new_dir_child_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ tree_conflicted);
+
+ /* Ensure that the obstructing added dir child of newdir has the
+ * expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_child_path = svn_relpath_join(new_dir_path, "file_child", b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_child_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_replaced);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* The directory should be a tree conflict victim. */
+ SVN_ERR(svn_client_conflict_get(&conflict,
+ sbox_wc_path(b, new_file_child_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ tree_conflicted);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_incoming_added_dir_merge2(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ const char *new_dir_path;
+ const char *new_dir_child_path;
+ const char *new_file_path;
+ const char *new_file_child_path;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_opt_revision_t opt_rev;
+ const svn_string_t *propval;
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "update_incoming_added_dir_merge2",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_dir_add_vs_dir_add_update_conflict(b, TRUE));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ new_dir_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_added_dir_merge, ctx,
+ b->pool));
+
+ /* Ensure that the directory has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Verify the added dir's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_dir_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, new_dir_path),
+ ctx, b->pool, b->pool));
+
+ /* The directory should not be in conflict. */
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(!text_conflicted &&
+ props_conflicted->nelts == 0 &&
+ !tree_conflicted);
+
+ /* Ensure that the newly added file has the expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_path = svn_relpath_join(trunk_path,
+ svn_relpath_join(new_dir_name,
+ new_file_name, b->pool),
+ b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_conflicted);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Verify the added file's property value. */
+ /* ### Shouldn't there be a property conflict? The local change wins. */
+ SVN_ERR(svn_wc_prop_get2(&propval, ctx->wc_ctx,
+ sbox_wc_path(b, new_file_path),
+ "prop", b->pool, b->pool));
+ SVN_TEST_STRING_ASSERT(propval->data, propval_different);
+
+ /* Ensure that the obstructing added file child of newdir has the
+ * expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_dir_child_path = svn_relpath_join(new_dir_path, "dir_child", b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_dir_child_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_obstructed);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the obstructing added dir child of newdir has the
+ * expected status. */
+ opt_rev.kind = svn_opt_revision_working;
+ sb.result_pool = b->pool;
+ new_file_child_path = svn_relpath_join(new_dir_path, "file_child", b->pool);
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, new_file_child_path),
+ &opt_rev, svn_depth_empty, TRUE, FALSE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_file);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_obstructed);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+/* Regression test for chrash fixed in r1780259. */
+static svn_error_t *
+test_cherry_pick_moved_file_with_propdel(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ const char *vendor_url;
+ svn_opt_revision_t peg_rev;
+ apr_array_header_t *ranges_to_merge;
+ svn_opt_revision_range_t merge_range;
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(svn_test__sandbox_create(b,
+ "test_cherry_pick_moved_file_with_propdel",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(b, "A"));
+ SVN_ERR(sbox_wc_mkdir(b, "A2"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r1 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Let A/B/E act as a vendor branch of A2/E; A/B/E/lambda has a property. */
+ SVN_ERR(sbox_wc_mkdir(b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(b, "A/B/E"));
+ SVN_ERR(sbox_file_write(b, "A/B/E/lambda", "This is the file lambda.\n"));
+ SVN_ERR(sbox_wc_add(b, "A/B/E/lambda"));
+ SVN_ERR(sbox_wc_propset(b, "propname", "propval", "A/B/E/lambda"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r2 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A/B/E", "A2/E"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r3 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Move vendor's E/lambda a level up and delete the property. */
+ SVN_ERR(sbox_wc_move(b, "A/B/E/lambda", "A/B/lambda"));
+ SVN_ERR(sbox_wc_propset(b, "propname", NULL /* propdel */, "A/B/lambda"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r4 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Move vendor's lambda to a new subdirectory. */
+ SVN_ERR(sbox_wc_mkdir(b, "A/B/newdir"));
+ SVN_ERR(sbox_wc_move(b, "A/B/lambda", "A/B/newdir/lambda"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r5 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Force a cherry-pick merge of A/B@5 to A2/E. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ vendor_url = apr_pstrcat(b->pool, b->repos_url, "/A/B", SVN_VA_NULL);
+ peg_rev.kind = svn_opt_revision_number;
+ peg_rev.value.number = 5;
+ merge_range.start.kind = svn_opt_revision_number;
+ merge_range.start.value.number = 4;
+ merge_range.end.kind = svn_opt_revision_number;
+ merge_range.end.value.number = 5;
+ ranges_to_merge = apr_array_make(b->pool, 1,
+ sizeof(svn_opt_revision_range_t *));
+ APR_ARRAY_PUSH(ranges_to_merge, svn_opt_revision_range_t *) = &merge_range;
+ /* This should raise a "local edit vs incoming delete or move" conflict. */
+ SVN_ERR(svn_client_merge_peg5(vendor_url, ranges_to_merge, &peg_rev,
+ sbox_wc_path(b, "A2/E"), svn_depth_infinity,
+ TRUE, TRUE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A2/E/lambda"),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ /* Try to resolve the conflict. This crashed before r1780259 due to the
+ * fact that a non-existent ancestor property was not accounted for. */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, b->pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_file_text_merge_crlf(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_file_text_merge_crlf", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Edit the file to have CRLF line endings. */
+ SVN_ERR(sbox_file_write(b, "A/mu", "Original content.\r\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "Modified content.\r\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "A" to "A1". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* The file should no longer be in conflict, and should not have a
+ * text conflict, because the contents are identical in "trunk" and
+ * in the "branch". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* And it should have expected contents. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "Modified content.\r\n");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_file_text_merge_native_eol(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t opt_rev;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(
+ b, "merge_incoming_move_file_text_merge_native_eol", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+ /* Set svn:eol-style on a file and edit it. */
+ SVN_ERR(sbox_wc_propset(b, SVN_PROP_EOL_STYLE, "native", "A/mu"));;
+ SVN_ERR(sbox_file_write(b, "A/mu", "Original content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "trunk", move the file. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+ /* On "branch", edit the file. */
+ SVN_ERR(sbox_file_write(b, "A1/mu", "Modified content.\n"));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, pool));
+
+ /* Merge "A" to "A1". */
+ SVN_ERR(svn_client_merge_peg5(svn_path_url_add_component2(b->repos_url, "A",
+ pool),
+ NULL, &opt_rev, sbox_wc_path(b, "A1"),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, pool));
+
+ /* We should have a tree conflict in the file "mu". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu"), ctx,
+ pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(tree_conflicted);
+
+ /* Check available tree conflict resolution options. */
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_delete_ignore,
+ svn_client_conflict_option_incoming_delete_accept,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_incoming_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts, pool));
+ }
+
+ /* Resolve the tree conflict by moving "mu" to "mu-moved". */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict, svn_client_conflict_option_incoming_move_file_text_merge,
+ ctx, pool));
+
+ /* The file should no longer be in conflict, and should not have a
+ * text conflict, because the contents are identical in "trunk" and
+ * in the "branch". */
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text_conflicted,
+ &props_conflicted,
+ &tree_conflicted,
+ conflict, pool, pool));
+ SVN_TEST_ASSERT(!text_conflicted);
+ SVN_TEST_INT_ASSERT(props_conflicted->nelts, 0);
+ SVN_TEST_ASSERT(!tree_conflicted);
+
+ /* And it should have expected contents. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu-moved"),
+ pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "Modified content." APR_EOL_STR);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_cherry_pick_post_move_edit(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ const char *trunk_url;
+ svn_opt_revision_t peg_rev;
+ apr_array_header_t *ranges_to_merge;
+ svn_opt_revision_range_t merge_range;
+ svn_client_ctx_t *ctx;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ svn_stringbuf_t *buf;
+
+ SVN_ERR(svn_test__sandbox_create(b,
+ "test_cherry_pick_post_move_edit",
+ opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b)); /* r1 */
+ /* Create a copy of node "A". */
+ SVN_ERR(sbox_wc_copy(b, "A", "A1"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r2 */
+ /* On "trunk", move the file mu. */
+ SVN_ERR(sbox_wc_move(b, "A/mu", "A/mu-moved"));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r3 */
+ /* On "trunk", edit mu-moved. This will be r4. */
+ SVN_ERR(sbox_file_write(b, "A/mu-moved", "Modified content." APR_EOL_STR));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r4 */
+ /* On "trunk", edit mu-moved. This will be r5, which we'll cherry-pick. */
+ SVN_ERR(sbox_file_write(b, "A/mu-moved",
+ "More modified content." APR_EOL_STR));
+ SVN_ERR(sbox_wc_commit(b, "")); /* r5 */
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* Perform a cherry-pick merge of r5 from A to A1. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/A", SVN_VA_NULL);
+ peg_rev.kind = svn_opt_revision_number;
+ peg_rev.value.number = 5;
+ merge_range.start.kind = svn_opt_revision_number;
+ merge_range.start.value.number = 4;
+ merge_range.end.kind = svn_opt_revision_number;
+ merge_range.end.value.number = 5;
+ ranges_to_merge = apr_array_make(b->pool, 1,
+ sizeof(svn_opt_revision_range_t *));
+ APR_ARRAY_PUSH(ranges_to_merge, svn_opt_revision_range_t *) = &merge_range;
+ /* This should raise a "local delete or move vs incoming edit" conflict. */
+ SVN_ERR(svn_client_merge_peg5(trunk_url, ranges_to_merge, &peg_rev,
+ sbox_wc_path(b, "A1"), svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, "A1/mu-moved"),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(NULL, NULL, &tree_conflicted,
+ conflict, b->pool, b->pool));
+ SVN_TEST_ASSERT(tree_conflicted);
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+ {
+ svn_client_conflict_option_id_t expected_opts[] = {
+ svn_client_conflict_option_postpone,
+ svn_client_conflict_option_accept_current_wc_state,
+ svn_client_conflict_option_local_move_file_text_merge,
+ -1 /* end of list */
+ };
+ SVN_ERR(assert_tree_conflict_options(conflict, ctx, expected_opts,
+ b->pool));
+ }
+
+ /* Try to resolve the conflict. */
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(
+ conflict,
+ svn_client_conflict_option_local_move_file_text_merge,
+ ctx, b->pool));
+
+ /* The node "A1/mu-moved" should no longer exist. */
+ SVN_TEST_ASSERT_ERROR(svn_client_conflict_get(&conflict,
+ sbox_wc_path(b, "A1/mu-moved"),
+ ctx, pool, pool),
+ SVN_ERR_WC_PATH_NOT_FOUND);
+
+ /* And "A1/mu" should have expected contents. */
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, "A1/mu"), pool));
+ SVN_TEST_STRING_ASSERT(buf->data, "More modified content." APR_EOL_STR);
+
+ return SVN_NO_ERROR;
+}
+
+/* A helper function which prepares a working copy for the tests below. */
+static svn_error_t *
+create_wc_with_incoming_delete_dir_conflict_across_branches(
+ svn_test__sandbox_t *b)
+{
+ svn_client_ctx_t *ctx;
+ const char *trunk_url;
+ const char *branch_url;
+ svn_opt_revision_t opt_rev;
+ const char *deleted_path;
+ const char *deleted_child_path;
+ const char *move_target_path;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create a branch of node "A". */
+ SVN_ERR(sbox_wc_copy(b, trunk_path, branch_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Create a second branch ("branch2") of the first branch. */
+ SVN_ERR(sbox_wc_copy(b, branch_path, branch2_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Move a directory on the trunk. */
+ deleted_path = svn_relpath_join(trunk_path, deleted_dir_name, b->pool);
+ move_target_path = svn_relpath_join(trunk_path, new_dir_name, b->pool);
+ SVN_ERR(sbox_wc_move(b, deleted_path, move_target_path));
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ /* Modify a file in that directory on branch2. */
+ deleted_child_path = svn_relpath_join(branch2_path,
+ svn_relpath_join(deleted_dir_name,
+ deleted_dir_child,
+ b->pool),
+ b->pool);
+ SVN_ERR(sbox_file_write(b, deleted_child_path,
+ modified_file_on_branch_content));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ opt_rev.kind = svn_opt_revision_head;
+ opt_rev.value.number = SVN_INVALID_REVNUM;
+ trunk_url = apr_pstrcat(b->pool, b->repos_url, "/", trunk_path,
+ SVN_VA_NULL);
+ branch_url = apr_pstrcat(b->pool, b->repos_url, "/", branch_path,
+ SVN_VA_NULL);
+
+ /* Commit modification and run a merge from the trunk to the branch.
+ * This merge should not raise a conflict. */
+ SVN_ERR(sbox_wc_commit(b, ""));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+ SVN_ERR(svn_client_merge_peg5(trunk_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+
+ /* Commit merge result end run a merge from branch to branch2. */
+ SVN_ERR(sbox_wc_commit(b, ""));
+ SVN_ERR(sbox_wc_update(b, "", SVN_INVALID_REVNUM));
+
+ /* This should raise an "incoming delete vs local edit" tree conflict. */
+ SVN_ERR(svn_client_merge_peg5(branch_url, NULL, &opt_rev,
+ sbox_wc_path(b, branch2_path),
+ svn_depth_infinity,
+ FALSE, FALSE, FALSE, FALSE, FALSE, FALSE,
+ NULL, ctx, b->pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_merge_incoming_move_dir_across_branches(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+ svn_client_ctx_t *ctx;
+ const char *deleted_path;
+ const char *moved_to_path;
+ const char *child_path;
+ svn_client_conflict_t *conflict;
+ struct status_baton sb;
+ struct svn_client_status_t *status;
+ svn_stringbuf_t *buf;
+ svn_opt_revision_t opt_rev;
+ apr_array_header_t *options;
+ svn_client_conflict_option_t *option;
+ apr_array_header_t *possible_moved_to_abspaths;
+
+ SVN_ERR(svn_test__sandbox_create(b,
+ "merge_incoming_move_dir accross branches",
+ opts, pool));
+
+ SVN_ERR(create_wc_with_incoming_delete_dir_conflict_across_branches(b));
+
+ deleted_path = svn_relpath_join(branch2_path, deleted_dir_name, b->pool);
+ moved_to_path = svn_relpath_join(branch2_path, new_dir_name, b->pool);
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ SVN_ERR(svn_client_conflict_get(&conflict, sbox_wc_path(b, deleted_path),
+ ctx, b->pool, b->pool));
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, b->pool));
+
+ SVN_ERR_ASSERT(svn_client_conflict_get_local_change(conflict) ==
+ svn_wc_conflict_reason_edited);
+
+ /* Check possible move destinations for the directory. */
+ SVN_ERR(svn_client_conflict_tree_get_resolution_options(&options, conflict,
+ ctx, b->pool,
+ b->pool));
+ option = svn_client_conflict_option_find_by_id(
+ options, svn_client_conflict_option_incoming_move_dir_merge);
+ SVN_TEST_ASSERT(option != NULL);
+
+ SVN_ERR(svn_client_conflict_option_get_moved_to_abspath_candidates(
+ &possible_moved_to_abspaths, option, b->pool, b->pool));
+
+ /* The resolver finds two possible destinations for the moved folder:
+ *
+ * Possible working copy destinations for moved-away 'A_branch/B' are:
+ * (1): 'A_branch2/newdir'
+ * (2): 'A_branch/newdir'
+ * Only one destination can be a move; the others are copies.
+ */
+ SVN_TEST_INT_ASSERT(possible_moved_to_abspaths->nelts, 2);
+ SVN_TEST_STRING_ASSERT(
+ APR_ARRAY_IDX(possible_moved_to_abspaths, 0, const char *),
+ sbox_wc_path(b, moved_to_path));
+ SVN_TEST_STRING_ASSERT(
+ APR_ARRAY_IDX(possible_moved_to_abspaths, 1, const char *),
+ sbox_wc_path(b, svn_relpath_join(branch_path, new_dir_name, b->pool)));
+
+ /* Resolve the tree conflict. */
+ SVN_ERR(svn_client_conflict_option_set_moved_to_abspath(option, 0,
+ ctx, b->pool));
+ SVN_ERR(svn_client_conflict_tree_resolve(conflict, option, ctx, b->pool));
+
+ /* Ensure that the moved-away directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, deleted_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_deleted);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(!status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_ASSERT(status->moved_from_abspath == NULL);
+ SVN_TEST_STRING_ASSERT(status->moved_to_abspath,
+ sbox_wc_path(b, moved_to_path));
+
+ /* Ensure that the moved-here directory has the expected status. */
+ sb.result_pool = b->pool;
+ opt_rev.kind = svn_opt_revision_working;
+ SVN_ERR(svn_client_status6(NULL, ctx, sbox_wc_path(b, moved_to_path),
+ &opt_rev, svn_depth_empty, TRUE, TRUE,
+ TRUE, TRUE, FALSE, TRUE, NULL,
+ status_func, &sb, b->pool));
+ status = sb.status;
+ SVN_TEST_ASSERT(status->kind == svn_node_dir);
+ SVN_TEST_ASSERT(status->versioned);
+ SVN_TEST_ASSERT(!status->conflicted);
+ SVN_TEST_ASSERT(status->node_status == svn_wc_status_added);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->prop_status == svn_wc_status_none);
+ SVN_TEST_ASSERT(status->copied);
+ SVN_TEST_ASSERT(!status->switched);
+ SVN_TEST_ASSERT(!status->file_external);
+ SVN_TEST_STRING_ASSERT(status->moved_from_abspath,
+ sbox_wc_path(b, deleted_path));
+ SVN_TEST_ASSERT(status->moved_to_abspath == NULL);
+
+ /* Ensure that the edited file has the expected content. */
+ child_path = svn_relpath_join(moved_to_path, deleted_dir_child,
+ b->pool);
+ SVN_ERR(svn_stringbuf_from_file2(&buf, sbox_wc_path(b, child_path),
+ b->pool));
+ SVN_TEST_STRING_ASSERT(buf->data, modified_file_on_branch_content);
+
+ return SVN_NO_ERROR;
+}
+
+/* ========================================================================== */
+
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_file_text_merge,
+ "merge incoming add file text merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_file_replace_and_merge,
+ "merge incoming add file replace and merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_ignore,
+ "merge incoming add dir ignore"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_merge,
+ "merge incoming add dir merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_merge2,
+ "merge incoming add dir merge with file change"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_merge3,
+ "merge incoming add dir merge with move history"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_replace,
+ "merge incoming add dir replace"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_replace_and_merge,
+ "merge incoming add dir replace and merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_added_dir_replace_and_merge2,
+ "merge incoming add dir replace with file change"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_delete_file_ignore,
+ "merge incoming delete file ignore"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_delete_file_accept,
+ "merge incoming delete file accept"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_text_merge,
+ "merge incoming move file text merge"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_delete_file_ignore,
+ "update incoming delete file ignore"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_delete_file_accept,
+ "update incoming delete file accept"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_move_file_text_merge,
+ "update incoming move file text merge"),
+ SVN_TEST_OPTS_PASS(test_switch_incoming_move_file_text_merge,
+ "switch incoming move file text merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_dir,
+ "merge incoming move dir"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_dir2,
+ "merge incoming move dir with local edit"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_dir3,
+ "merge incoming move dir with local add"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_delete_vs_local_delete,
+ "merge incoming delete vs local delete"),
+ SVN_TEST_OPTS_PASS(test_merge_file_prop,
+ "merge file property"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_text_merge_conflict,
+ "merge incoming move file merge with text conflict"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_edit_file_moved_away,
+ "merge incoming edit for a moved-away working file"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_chained_move_local_edit,
+ "merge incoming chained move vs local edit"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_dir_with_moved_file,
+ "merge incoming moved dir with moved file"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_file_move_new_line_of_history,
+ "merge incoming file move with new line of history"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_dir_move_with_nested_file_move,
+ "update incoming dir move with nested file move"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_dir_move_with_parent_move,
+ "update incoming dir move with parent move"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_dir_move_with_parent_moved_back,
+ "update incoming dir move with parent moved back"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_dir_move_with_parent_moved_twice,
+ "update incoming dir move with parent moved twice"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_added_file_text_merge,
+ "update incoming add file text merge"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_prop_merge_conflict,
+ "merge incoming move file merge with prop conflict"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_text_merge_keywords,
+ "merge incoming move file merge with keywords"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_added_dir_ignore,
+ "update incoming add dir ignore"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_added_dir_merge,
+ "update incoming add dir merge"),
+ SVN_TEST_OPTS_PASS(test_update_incoming_added_dir_merge2,
+ "update incoming add dir merge with obstructions"),
+ SVN_TEST_OPTS_PASS(test_cherry_pick_moved_file_with_propdel,
+ "cherry-pick with moved file and propdel"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_text_merge_crlf,
+ "merge incoming move file merge with CRLF eols"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_file_text_merge_native_eol,
+ "merge incoming move file merge with native eols"),
+ SVN_TEST_OPTS_XFAIL(test_cherry_pick_post_move_edit,
+ "cherry-pick edit from moved file"),
+ SVN_TEST_OPTS_PASS(test_merge_incoming_move_dir_across_branches,
+ "merge incoming dir move across branches"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_client/mtcc-test.c b/subversion/tests/libsvn_client/mtcc-test.c
new file mode 100644
index 0000000..2776657
--- /dev/null
+++ b/subversion/tests/libsvn_client/mtcc-test.c
@@ -0,0 +1,848 @@
+/*
+ * Regression tests for mtcc code in the libsvn_client library.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_client.h"
+#include "private/svn_client_mtcc.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+
+/* Baton for verify_commit_callback*/
+struct verify_commit_baton
+{
+ const svn_commit_info_t *commit_info;
+ apr_pool_t *result_pool;
+};
+
+/* Commit result collector for verify_mtcc_commit */
+static svn_error_t *
+verify_commit_callback(const svn_commit_info_t *commit_info,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct verify_commit_baton *vcb = baton;
+
+ vcb->commit_info = svn_commit_info_dup(commit_info, vcb->result_pool);
+ return SVN_NO_ERROR;
+}
+
+/* Create a stream from a c string */
+static svn_stream_t *
+cstr_stream(const char *data, apr_pool_t *result_pool)
+{
+ return svn_stream_from_string(svn_string_create(data, result_pool),
+ result_pool);
+}
+
+static svn_error_t *
+verify_mtcc_commit(svn_client__mtcc_t *mtcc,
+ svn_revnum_t expected_rev,
+ apr_pool_t *pool)
+{
+ struct verify_commit_baton vcb;
+ vcb.commit_info = NULL;
+ vcb.result_pool = pool;
+
+ SVN_ERR(svn_client__mtcc_commit(NULL, verify_commit_callback, &vcb, mtcc, pool));
+
+ SVN_TEST_ASSERT(vcb.commit_info != NULL);
+ SVN_TEST_ASSERT(vcb.commit_info->revision == expected_rev);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Constructs a greek tree as revision 1 in the repository at repos_url */
+static svn_error_t *
+make_greek_tree(const char *repos_url,
+ apr_pool_t *scratch_pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ apr_pool_t *subpool;
+ int i;
+
+ subpool = svn_pool_create(scratch_pool);
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, subpool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, subpool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool));
+
+ for (i = 0; svn_test__greek_tree_nodes[i].path; i++)
+ {
+ if (svn_test__greek_tree_nodes[i].contents)
+ {
+ SVN_ERR(svn_client__mtcc_add_add_file(
+ svn_test__greek_tree_nodes[i].path,
+ cstr_stream(
+ svn_test__greek_tree_nodes[i].contents,
+ subpool),
+ NULL /* src_checksum */,
+ mtcc, subpool));
+ }
+ else
+ {
+ SVN_ERR(svn_client__mtcc_add_mkdir(
+ svn_test__greek_tree_nodes[i].path,
+ mtcc, subpool));
+ }
+ }
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool));
+
+ svn_pool_clear(subpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_mkdir(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-mkdir",
+ opts, pool, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_mkdir("branches", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("trunk", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("branches/1.x", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("tags", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("tags/1.0", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("tags/1.1", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_mkgreek(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-mkgreek",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_copy("A", 1, "greek_A", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_swap(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-swap",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_move("A/B", "B", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_move("A/D", "A/B", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_copy("A/B", 1, "A/D", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_propset(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-propset",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_propset("iota", "key",
+ svn_string_create("val", pool), FALSE,
+ mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("A", "A-key",
+ svn_string_create("val-A", pool), FALSE,
+ mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("A/B", "B-key",
+ svn_string_create("val-B", pool), FALSE,
+ mtcc, pool));
+
+ /* The repository ignores propdeletes of properties that aren't there,
+ so this just works */
+ SVN_ERR(svn_client__mtcc_add_propset("A/D", "D-key", NULL, FALSE,
+ mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 2, ctx, pool, pool));
+ SVN_TEST_ASSERT_ERROR(
+ svn_client__mtcc_add_propset("A", SVN_PROP_MIME_TYPE,
+ svn_string_create("text/plain", pool),
+ FALSE, mtcc, pool),
+ SVN_ERR_ILLEGAL_TARGET);
+
+ SVN_TEST_ASSERT_ERROR(
+ svn_client__mtcc_add_propset("iota", SVN_PROP_IGNORE,
+ svn_string_create("iota", pool),
+ FALSE, mtcc, pool),
+ SVN_ERR_ILLEGAL_TARGET);
+
+ SVN_ERR(svn_client__mtcc_add_propset("iota", SVN_PROP_EOL_STYLE,
+ svn_string_create("LF", pool),
+ FALSE, mtcc, pool));
+
+ SVN_ERR(svn_client__mtcc_add_add_file("ok", cstr_stream("line\nline\n", pool),
+ NULL, mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_add_file("bad", cstr_stream("line\nno\r\n", pool),
+ NULL, mtcc, pool));
+
+ SVN_ERR(svn_client__mtcc_add_propset("ok", SVN_PROP_EOL_STYLE,
+ svn_string_create("LF", pool),
+ FALSE, mtcc, pool));
+
+ SVN_TEST_ASSERT_ERROR(
+ svn_client__mtcc_add_propset("bad", SVN_PROP_EOL_STYLE,
+ svn_string_create("LF", pool),
+ FALSE, mtcc, pool),
+ SVN_ERR_ILLEGAL_TARGET);
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 3, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update_files(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-update-files",
+ opts, pool, pool));
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ /* Update iota with knowledge of the old data */
+ SVN_ERR(svn_client__mtcc_add_update_file(svn_test__greek_tree_nodes[0].path,
+ cstr_stream("new-iota", pool),
+ NULL,
+ cstr_stream(
+ svn_test__greek_tree_nodes[0]
+ .contents,
+ pool),
+ NULL,
+ mtcc, pool));
+
+ SVN_ERR(svn_client__mtcc_add_update_file("A/mu",
+ cstr_stream("new-MU", pool),
+ NULL,
+ NULL, NULL,
+ mtcc, pool));
+
+ /* Set a property on the same node */
+ SVN_ERR(svn_client__mtcc_add_propset("A/mu", "mu-key",
+ svn_string_create("mu-A", pool), FALSE,
+ mtcc, pool));
+ /* And some other node */
+ SVN_ERR(svn_client__mtcc_add_propset("A/B", "B-key",
+ svn_string_create("val-B", pool), FALSE,
+ mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_overwrite(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-overwrite",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_copy("A", 1, "AA", mtcc, pool));
+
+ SVN_TEST_ASSERT_ERROR(svn_client__mtcc_add_mkdir("AA/B", mtcc, pool),
+ SVN_ERR_FS_ALREADY_EXISTS);
+
+ SVN_TEST_ASSERT_ERROR(svn_client__mtcc_add_mkdir("AA/D/H/chi", mtcc, pool),
+ SVN_ERR_FS_ALREADY_EXISTS);
+
+ SVN_ERR(svn_client__mtcc_add_mkdir("AA/BB", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_anchoring(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-anchoring",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ /* Update a file as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "iota",
+ pool),
+ 1, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_update_file("",
+ cstr_stream("new-iota", pool),
+ NULL, NULL, NULL,
+ mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("", "key",
+ svn_string_create("value", pool),
+ FALSE, mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ /* Add a directory as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "BB",
+ pool),
+ 2, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("", mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 3, pool));
+
+ /* Add a file as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "new",
+ pool),
+ 3, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_add_file("", cstr_stream("new", pool), NULL,
+ mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 4, pool));
+
+ /* Delete as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "new",
+ pool),
+ 4, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_delete("", mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 5, pool));
+
+ /* Propset file as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "A/mu",
+ pool),
+ 5, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("", "key",
+ svn_string_create("val", pool),
+ FALSE, mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 6, pool));
+
+ /* Propset dir as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc,
+ svn_path_url_add_component2(repos_url, "A",
+ pool),
+ 6, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("", "key",
+ svn_string_create("val", pool),
+ FALSE, mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 7, pool));
+
+ /* Propset reposroot as root operation */
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 7, ctx, pool, pool));
+ SVN_ERR(svn_client__mtcc_add_propset("", "key",
+ svn_string_create("val", pool),
+ FALSE, mtcc, pool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 8, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_replace_tree(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-replace_tree",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_delete("A", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_delete("iota", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A/B", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("M", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("M/N", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("M/N/O", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Baton for handle_rev */
+struct handle_rev_baton
+{
+ svn_revnum_t last;
+ svn_boolean_t up;
+ svn_boolean_t first;
+
+ /* Per revision handler */
+ svn_txdelta_window_handler_t inner_handler;
+ void *inner_baton;
+
+ /* Swapped between revisions to reconstruct data */
+ svn_stringbuf_t *cur;
+ svn_stringbuf_t *prev;
+
+ /* Pool for some test stuff */
+ apr_pool_t *pool;
+};
+
+/* Implement svn_txdelta_window_handler_t */
+static svn_error_t *
+handle_rev_delta(svn_txdelta_window_t *window,
+ void * baton)
+{
+ struct handle_rev_baton *hrb = baton;
+
+ SVN_ERR(hrb->inner_handler(window, hrb->inner_baton));
+
+ if (!window)
+ {
+ int expected_rev;
+ const char *expected;
+
+ /* Some revisions don't update the revision body */
+ switch (hrb->last)
+ {
+ case 5:
+ expected_rev = 4;
+ break;
+ case 7: /* Not reported */
+ case 8:
+ expected_rev = 6;
+ break;
+ default:
+ expected_rev = (int)hrb->last;
+ }
+
+ expected = apr_psprintf(hrb->pool, "revision-%d", expected_rev);
+
+ SVN_TEST_STRING_ASSERT(hrb->cur->data, expected);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Helper for test_file_revs_both_ways */
+static svn_error_t *
+handle_rev(void *baton,
+ const char *path,
+ svn_revnum_t rev,
+ apr_hash_t *rev_props,
+ svn_boolean_t result_of_merge,
+ svn_txdelta_window_handler_t *delta_handler,
+ void **delta_baton,
+ apr_array_header_t *prop_diffs,
+ apr_pool_t *pool)
+{
+ struct handle_rev_baton *hrb = baton;
+ svn_revnum_t expected_rev = hrb->up ? (hrb->last + 1) : (hrb->last - 1);
+
+ if (expected_rev == 7)
+ expected_rev = hrb->up ? 8 : 6;
+
+ SVN_TEST_ASSERT(rev == expected_rev);
+ SVN_TEST_ASSERT(apr_hash_count(rev_props) >= 3);
+ SVN_TEST_STRING_ASSERT(path, (rev < 5) ? "/iota" : "/mu");
+
+ if (!hrb->first
+ && (rev == (hrb->up ? 5 : 4) || rev == (hrb->up ? 8 : 6)))
+ SVN_TEST_ASSERT(delta_handler == NULL);
+ else
+ SVN_TEST_ASSERT(delta_handler != NULL);
+
+ if (delta_handler)
+ {
+ svn_stringbuf_t *tmp;
+
+ *delta_handler = handle_rev_delta;
+ *delta_baton = hrb;
+
+ /* Swap string buffers, to use previous as original */
+ tmp = hrb->prev;
+ hrb->prev = hrb->cur;
+ hrb->cur = tmp;
+
+ svn_stringbuf_setempty(hrb->cur);
+
+ svn_txdelta_apply(svn_stream_from_stringbuf(hrb->prev, pool),
+ svn_stream_from_stringbuf(hrb->cur, pool),
+ NULL, NULL, pool,
+ &hrb->inner_handler,
+ &hrb->inner_baton);
+ }
+
+ hrb->last = rev;
+ hrb->first = FALSE;
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_revs_both_ways(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ const char *repos_url;
+ svn_ra_session_t *ra;
+ struct handle_rev_baton hrb;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-file-revs",
+ opts, pool, subpool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_add_file("iota",
+ cstr_stream("revision-1", subpool),
+ NULL /* src_checksum */,
+ mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_update_file("iota",
+ cstr_stream("revision-2", subpool),
+ NULL /* src_checksum */, NULL, NULL,
+ mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 2, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_update_file("iota",
+ cstr_stream("revision-3", subpool),
+ NULL /* src_checksum */, NULL, NULL,
+ mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 3, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 3, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_update_file("iota",
+ cstr_stream("revision-4", subpool),
+ NULL /* src_checksum */, NULL, NULL,
+ mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 4, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 4, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_move("iota", "mu", mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 5, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 5, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_update_file("mu",
+ cstr_stream("revision-6", subpool),
+ NULL /* src_checksum */, NULL, NULL,
+ mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 6, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 6, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_delete("mu", mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 7, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_client_open_ra_session2(&ra, repos_url, NULL, ctx, pool, subpool));
+
+ hrb.prev = svn_stringbuf_create("", pool);
+ hrb.cur = svn_stringbuf_create("", pool);
+ hrb.pool = pool;
+
+ svn_pool_clear(subpool);
+ hrb.up = FALSE;
+ hrb.last = 5;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "iota", 4, 1, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 1);
+
+ svn_pool_clear(subpool);
+ hrb.up = TRUE;
+ hrb.last = 0;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "iota", 1, 4, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 4);
+
+ svn_pool_clear(subpool);
+ hrb.up = FALSE;
+ hrb.last = 7;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 6, 1, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 1);
+
+ svn_pool_clear(subpool);
+ hrb.up = TRUE;
+ hrb.last = 0;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 1, 6, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 6);
+
+ /* Ressurect mu */
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 7, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_copy("mu", 6, "mu", mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 8, subpool));
+
+ svn_pool_clear(subpool);
+ hrb.up = TRUE;
+ hrb.last = 0;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 1, SVN_INVALID_REVNUM, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 8);
+
+ svn_pool_clear(subpool);
+ hrb.up = FALSE;
+ hrb.last = 9;
+ hrb.first = TRUE;
+ svn_stringbuf_setempty(hrb.prev);
+ svn_stringbuf_setempty(hrb.cur);
+ SVN_ERR(svn_ra_get_file_revs2(ra, "mu", SVN_INVALID_REVNUM, 1, FALSE,
+ handle_rev, &hrb,
+ subpool));
+ SVN_TEST_ASSERT(hrb.last == 1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_iprops_path_format(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ const char *repos_url;
+ svn_ra_session_t *ra;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-iprops-paths",
+ opts, pool, subpool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A", mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A/B", mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C", mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C/D", mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_propset("", "on-root",
+ svn_string_create("ROOT", subpool),
+ FALSE, mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_propset("A/B", "on-B",
+ svn_string_create("BBBB", subpool),
+ FALSE, mtcc, subpool));
+ SVN_ERR(svn_client__mtcc_add_propset("A/B/C", "Z",
+ svn_string_create("Z", subpool),
+ FALSE, mtcc, subpool));
+ SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool));
+ svn_pool_clear(subpool);
+
+ {
+ apr_array_header_t *iprops;
+ svn_prop_inherited_item_t *ip;
+
+ SVN_ERR(svn_client_open_ra_session2(&ra, repos_url, NULL, ctx,
+ pool, subpool));
+
+ SVN_ERR(svn_ra_get_inherited_props(ra, &iprops, "A/B/C/D", 1,
+ subpool, subpool));
+
+ SVN_TEST_ASSERT(iprops != NULL);
+ SVN_TEST_INT_ASSERT(iprops->nelts, 3);
+
+ ip = APR_ARRAY_IDX(iprops, 0, svn_prop_inherited_item_t *);
+ SVN_TEST_STRING_ASSERT(ip->path_or_url, "");
+
+ ip = APR_ARRAY_IDX(iprops, 1, svn_prop_inherited_item_t *);
+ SVN_TEST_STRING_ASSERT(ip->path_or_url, "A/B");
+
+ ip = APR_ARRAY_IDX(iprops, 2, svn_prop_inherited_item_t *);
+ SVN_TEST_STRING_ASSERT(ip->path_or_url, "A/B/C");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_move_and_delete_ancestor(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_client__mtcc_t *mtcc;
+ svn_client_ctx_t *ctx;
+ const char *repos_url;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-move-and-delete",
+ opts, pool, pool));
+
+ SVN_ERR(make_greek_tree(repos_url, pool));
+
+ SVN_ERR(svn_client_create_context2(&ctx, NULL, pool));
+ SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool));
+
+ SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool));
+
+ SVN_ERR(svn_client__mtcc_add_move("A/B", "B", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_move("A/mu", "mu", mtcc, pool));
+ SVN_ERR(svn_client__mtcc_add_delete("A", mtcc, pool));
+
+ SVN_ERR(verify_mtcc_commit(mtcc, 2, pool));
+
+ return SVN_NO_ERROR;
+
+}
+
+
+/* ========================================================================== */
+
+
+static int max_threads = 3;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_mkdir,
+ "test mtcc mkdir"),
+ SVN_TEST_OPTS_PASS(test_mkgreek,
+ "test making greek tree"),
+ SVN_TEST_OPTS_PASS(test_swap,
+ "swapping some trees"),
+ SVN_TEST_OPTS_PASS(test_propset,
+ "test propset and propdel"),
+ SVN_TEST_OPTS_PASS(test_update_files,
+ "test update files"),
+ SVN_TEST_OPTS_PASS(test_overwrite,
+ "test overwrite"),
+ SVN_TEST_OPTS_PASS(test_anchoring,
+ "test mtcc anchoring for root operations"),
+ SVN_TEST_OPTS_PASS(test_replace_tree,
+ "test mtcc replace tree"),
+ SVN_TEST_OPTS_PASS(test_file_revs_both_ways,
+ "test ra_get_file_revs2 both ways"),
+ SVN_TEST_OPTS_PASS(test_iprops_path_format,
+ "test iprops url format"),
+ SVN_TEST_OPTS_PASS(test_move_and_delete_ancestor,
+ "test move and delete ancestor (issue 4666)"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_delta/delta-window-test.h b/subversion/tests/libsvn_delta/delta-window-test.h
new file mode 100644
index 0000000..e979b94
--- /dev/null
+++ b/subversion/tests/libsvn_delta/delta-window-test.h
@@ -0,0 +1,122 @@
+/* delta-window-test.h -- utilities for delta window output
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifndef SVN_DELTA_WINDOW_TEST_H
+#define SVN_DELTA_WINDOW_TEST_H
+
+#define APR_WANT_STDIO
+#define APR_WANT_STRFUNC
+#include <apr_want.h>
+
+#include "svn_delta.h"
+#include "svn_ctype.h"
+
+static apr_off_t
+delta_window_size_estimate(const svn_txdelta_window_t *window)
+{
+ apr_off_t len;
+ int i;
+
+ if (!window)
+ return 0;
+
+ /* Try to estimate the size of the delta. */
+ for (i = 0, len = 0; i < window->num_ops; ++i)
+ {
+ apr_size_t const offset = window->ops[i].offset;
+ apr_size_t const length = window->ops[i].length;
+ if (window->ops[i].action_code == svn_txdelta_new)
+ {
+ len += 1; /* opcode */
+ len += (length > 255 ? 2 : 1);
+ len += length;
+ }
+ else
+ {
+ len += 1; /* opcode */
+ len += (offset > 255 ? 2 : 1);
+ len += (length > 255 ? 2 : 1);
+ }
+ }
+
+ return len;
+}
+
+
+static apr_off_t
+delta_window_print(const svn_txdelta_window_t *window,
+ const char *tag, FILE *stream)
+{
+ const apr_off_t len = delta_window_size_estimate(window);
+ apr_off_t op_offset = 0;
+ int i;
+
+ if (!window)
+ return 0;
+
+ fprintf(stream, "%s: (WINDOW %" APR_OFF_T_FMT, tag, len);
+ fprintf(stream,
+ " (%" SVN_FILESIZE_T_FMT
+ " %" APR_SIZE_T_FMT " %" APR_SIZE_T_FMT ")",
+ window->sview_offset, window->sview_len, window->tview_len);
+ for (i = 0; i < window->num_ops; ++i)
+ {
+ apr_size_t const offset = window->ops[i].offset;
+ apr_size_t const length = window->ops[i].length;
+ apr_size_t tmp;
+ switch (window->ops[i].action_code)
+ {
+ case svn_txdelta_source:
+ fprintf(stream, "\n%s: (%" APR_OFF_T_FMT " SRC %" APR_SIZE_T_FMT
+ " %" APR_SIZE_T_FMT ")", tag, op_offset, offset, length);
+ break;
+ case svn_txdelta_target:
+ fprintf(stream, "\n%s: (%" APR_OFF_T_FMT " TGT %" APR_SIZE_T_FMT
+ " %" APR_SIZE_T_FMT ")", tag, op_offset, offset, length);
+ break;
+ case svn_txdelta_new:
+ fprintf(stream, "\n%s: (%" APR_OFF_T_FMT " NEW %"
+ APR_SIZE_T_FMT " \"", tag, op_offset, length);
+ for (tmp = offset; tmp < offset + length; ++tmp)
+ {
+ int const dat = window->new_data->data[tmp];
+ if (svn_ctype_iscntrl(dat) || !svn_ctype_isascii(dat))
+ fprintf(stream, "\\%3.3o", dat & 0xff);
+ else if (dat == '\\')
+ fputs("\\\\", stream);
+ else
+ putc(dat, stream);
+ }
+ fputs("\")", stream);
+ break;
+ default:
+ fprintf(stream, "\n%s: (BAD-OP)", tag);
+ }
+
+ op_offset += length;
+ }
+ fputs(")\n", stream);
+ return len;
+}
+
+
+#endif /* SVN_DELTA_WINDOW_TEST_H */
diff --git a/subversion/tests/libsvn_delta/random-test.c b/subversion/tests/libsvn_delta/random-test.c
new file mode 100644
index 0000000..a33a7bb
--- /dev/null
+++ b/subversion/tests/libsvn_delta/random-test.c
@@ -0,0 +1,635 @@
+/*
+ * random-test.c: Test delta generation and application using random data.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include <assert.h>
+
+#define APR_WANT_STDIO
+#define APR_WANT_STRFUNC
+#include <apr_want.h>
+#include <apr_general.h>
+#include <apr_getopt.h>
+#include <apr_file_io.h>
+
+#include "../svn_test.h"
+
+#include "svn_delta.h"
+#include "svn_pools.h"
+#include "svn_error.h"
+
+#include "../../libsvn_delta/delta.h"
+#include "delta-window-test.h"
+
+
+#define DEFAULT_ITERATIONS 60
+#define DEFAULT_MAXLEN (100 * 1024)
+#define DEFAULT_DUMP_FILES 0
+#define DEFAULT_PRINT_WINDOWS 0
+#define SEEDS 50
+#define MAXSEQ 100
+
+
+/* Initialize parameters for the random tests. */
+extern int test_argc;
+extern const char **test_argv;
+
+static void init_params(apr_uint32_t *seed,
+ apr_uint32_t *maxlen, int *iterations,
+ int *dump_files, int *print_windows,
+ const char **random_bytes,
+ apr_size_t *bytes_range,
+ apr_pool_t *pool)
+{
+ apr_getopt_t *opt;
+ char optch;
+ const char *opt_arg;
+ apr_status_t status;
+
+ *seed = (apr_uint32_t) apr_time_now();
+ *maxlen = DEFAULT_MAXLEN;
+ *iterations = DEFAULT_ITERATIONS;
+ *dump_files = DEFAULT_DUMP_FILES;
+ *print_windows = DEFAULT_PRINT_WINDOWS;
+ *random_bytes = NULL;
+ *bytes_range = 256;
+
+ apr_getopt_init(&opt, pool, test_argc, test_argv);
+ while (APR_SUCCESS
+ == (status = apr_getopt(opt, "s:l:n:r:FW", &optch, &opt_arg)))
+ {
+ switch (optch)
+ {
+ case 's':
+ *seed = (apr_uint32_t) atol(opt_arg);
+ break;
+ case 'l':
+ *maxlen = atoi(opt_arg);
+ break;
+ case 'n':
+ *iterations = atoi(opt_arg);
+ break;
+ case 'r':
+ *random_bytes = opt_arg + 1;
+ *bytes_range = strlen(*random_bytes);
+ break;
+ case 'F':
+ *dump_files = !*dump_files;
+ break;
+ case 'W':
+ *print_windows = !*print_windows;
+ break;
+ }
+ }
+}
+
+
+/* Open a temporary file. */
+static apr_file_t *
+open_tempfile(const char *name_template, apr_pool_t *pool)
+{
+ apr_status_t apr_err;
+ apr_file_t *fp = NULL;
+ char *templ = (char *)apr_pstrdup(
+ pool, svn_test_data_path(
+ name_template ? name_template : "tempfile_XXXXXX", pool));
+
+ apr_err = apr_file_mktemp(&fp, templ, 0, pool);
+ assert(apr_err == 0);
+ assert(fp != NULL);
+ return fp;
+}
+
+/* Rewind the file pointer */
+static void rewind_file(apr_file_t *fp)
+{
+ apr_off_t offset = 0;
+#ifndef NDEBUG
+ apr_status_t apr_err =
+#endif
+ apr_file_seek(fp, APR_SET, &offset);
+ assert(apr_err == 0);
+ assert(offset == 0);
+}
+
+
+static void
+dump_file_contents(apr_file_t *fp)
+{
+ static char file_buffer[10240];
+ apr_size_t length = sizeof file_buffer;
+ fputs("--------\n", stdout);
+ do
+ {
+ apr_file_read_full(fp, file_buffer, sizeof file_buffer, &length);
+ fwrite(file_buffer, 1, length, stdout);
+ }
+ while (length == sizeof file_buffer);
+ putc('\n', stdout);
+ rewind_file(fp);
+}
+
+/* Generate a temporary file containing sort-of random data. Diffs
+ between files of random data tend to be pretty boring, so we try to
+ make sure there are a bunch of common substrings between two runs
+ of this function with the same seedbase. */
+static apr_file_t *
+generate_random_file(apr_uint32_t maxlen,
+ apr_uint32_t subseed_base,
+ apr_uint32_t *seed,
+ const char *random_bytes,
+ apr_size_t bytes_range,
+ int dump_files,
+ apr_pool_t *pool)
+{
+ static char file_buffer[10240];
+ char *buf = file_buffer;
+ char *const end = buf + sizeof file_buffer;
+
+ apr_uint32_t len, seqlen;
+ apr_file_t *fp;
+ unsigned long r;
+
+ fp = open_tempfile("random_XXXXXX", pool);
+ len = svn_test_rand(seed) % maxlen; /* We might go over this by a bit. */
+ while (len > 0)
+ {
+ /* Generate a pseudo-random sequence of up to MAXSEQ bytes,
+ where the seed is in the range [seedbase..seedbase+MAXSEQ-1].
+ (Use our own pseudo-random number generator here to avoid
+ clobbering the seed of the libc random number generator.) */
+
+ seqlen = svn_test_rand(seed) % MAXSEQ;
+ if (seqlen > len) seqlen = len;
+ len -= seqlen;
+ r = subseed_base + svn_test_rand(seed) % SEEDS;
+ while (seqlen-- > 0)
+ {
+ const int ch = (random_bytes
+ ? (unsigned)random_bytes[r % bytes_range]
+ : (int)(r % bytes_range));
+ if (buf == end)
+ {
+ apr_size_t ignore_length;
+ apr_file_write_full(fp, file_buffer, sizeof file_buffer,
+ &ignore_length);
+ buf = file_buffer;
+ }
+
+ *buf++ = (char)ch;
+ r = r * 1103515245 + 12345;
+ }
+ }
+
+ if (buf > file_buffer)
+ {
+ apr_size_t ignore_length;
+ apr_file_write_full(fp, file_buffer, buf - file_buffer, &ignore_length);
+ }
+ rewind_file(fp);
+
+ if (dump_files)
+ dump_file_contents(fp);
+
+ return fp;
+}
+
+/* Compare two open files. The file positions may change. */
+static svn_error_t *
+compare_files(apr_file_t *f1, apr_file_t *f2, int dump_files)
+{
+ static char file_buffer_1[10240];
+ static char file_buffer_2[10240];
+
+ char *c1, *c2;
+ apr_off_t pos = 0;
+ apr_size_t len1, len2;
+
+ rewind_file(f1);
+ rewind_file(f2);
+
+ if (dump_files)
+ dump_file_contents(f2);
+
+ do
+ {
+ apr_file_read_full(f1, file_buffer_1, sizeof file_buffer_1, &len1);
+ apr_file_read_full(f2, file_buffer_2, sizeof file_buffer_2, &len2);
+
+ for (c1 = file_buffer_1, c2 = file_buffer_2;
+ c1 < file_buffer_1 + len1 && c2 < file_buffer_2 + len2;
+ ++c1, ++c2, ++pos)
+ {
+ if (*c1 != *c2)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "mismatch at position %"APR_OFF_T_FMT,
+ pos);
+ }
+
+ if (len1 != len2)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "unequal file sizes at position"
+ " %"APR_OFF_T_FMT, pos);
+ }
+ while (len1 == sizeof file_buffer_1);
+ return SVN_NO_ERROR;
+}
+
+
+static apr_file_t *
+copy_tempfile(apr_file_t *fp, apr_pool_t *pool)
+{
+ static char file_buffer[10240];
+ apr_file_t *newfp;
+ apr_size_t length1, length2;
+
+ newfp = open_tempfile("copy_XXXXXX", pool);
+
+ rewind_file(fp);
+ do
+ {
+ apr_file_read_full(fp, file_buffer, sizeof file_buffer, &length1);
+ apr_file_write_full(newfp, file_buffer, length1, &length2);
+ assert(length1 == length2);
+ }
+ while (length1 == sizeof file_buffer);
+
+ rewind_file(fp);
+ rewind_file(newfp);
+ return newfp;
+}
+
+
+
+/* (Note: *LAST_SEED is an output parameter.) */
+static svn_error_t *
+do_random_test(apr_pool_t *pool,
+ apr_uint32_t *last_seed)
+{
+ apr_uint32_t seed, maxlen;
+ apr_size_t bytes_range;
+ int i, iterations, dump_files, print_windows;
+ const char *random_bytes;
+
+ /* Initialize parameters and print out the seed in case we dump core
+ or something. */
+ init_params(&seed, &maxlen, &iterations, &dump_files, &print_windows,
+ &random_bytes, &bytes_range, pool);
+
+ for (i = 0; i < iterations; i++)
+ {
+ /* Generate source and target for the delta and its application. */
+ apr_uint32_t subseed_base = svn_test_rand((*last_seed = seed, &seed));
+ apr_file_t *source = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, pool);
+ apr_file_t *target = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, pool);
+ apr_file_t *source_copy = copy_tempfile(source, pool);
+ apr_file_t *target_regen = open_tempfile(NULL, pool);
+
+ svn_txdelta_stream_t *txdelta_stream;
+ svn_txdelta_window_handler_t handler;
+ svn_stream_t *stream;
+ void *handler_baton;
+
+ /* Set up a four-stage pipeline: create a delta, convert it to
+ svndiff format, parse it back into delta format, and apply it
+ to a copy of the source file to see if we get the same target
+ back. */
+ apr_pool_t *delta_pool = svn_pool_create(pool);
+
+ /* Make stage 4: apply the text delta. */
+ svn_txdelta_apply(svn_stream_from_aprfile(source_copy, delta_pool),
+ svn_stream_from_aprfile(target_regen, delta_pool),
+ NULL, NULL, delta_pool, &handler, &handler_baton);
+
+ /* Make stage 3: reparse the text delta. */
+ stream = svn_txdelta_parse_svndiff(handler, handler_baton, TRUE,
+ delta_pool);
+
+ /* Make stage 2: encode the text delta in svndiff format using
+ varying svndiff versions and compression levels. */
+ svn_txdelta_to_svndiff3(&handler, &handler_baton, stream, i % 3,
+ i % 10, delta_pool);
+
+ /* Make stage 1: create the text delta. */
+ svn_txdelta2(&txdelta_stream,
+ svn_stream_from_aprfile(source, delta_pool),
+ svn_stream_from_aprfile(target, delta_pool),
+ FALSE,
+ delta_pool);
+
+ SVN_ERR(svn_txdelta_send_txstream(txdelta_stream,
+ handler,
+ handler_baton,
+ delta_pool));
+
+ svn_pool_destroy(delta_pool);
+
+ SVN_ERR(compare_files(target, target_regen, dump_files));
+
+ apr_file_close(source);
+ apr_file_close(target);
+ apr_file_close(source_copy);
+ apr_file_close(target_regen);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_test_driver_t. */
+static svn_error_t *
+random_test(apr_pool_t *pool)
+{
+ apr_uint32_t seed;
+ svn_error_t *err = do_random_test(pool, &seed);
+ if (err)
+ fprintf(stderr, "SEED: %lu\n", (unsigned long)seed);
+ return err;
+}
+
+
+
+/* (Note: *LAST_SEED is an output parameter.) */
+static svn_error_t *
+do_random_combine_test(apr_pool_t *pool,
+ apr_uint32_t *last_seed)
+{
+ apr_uint32_t seed, maxlen;
+ apr_size_t bytes_range;
+ int i, iterations, dump_files, print_windows;
+ const char *random_bytes;
+
+ /* Initialize parameters and print out the seed in case we dump core
+ or something. */
+ init_params(&seed, &maxlen, &iterations, &dump_files, &print_windows,
+ &random_bytes, &bytes_range, pool);
+
+ for (i = 0; i < iterations; i++)
+ {
+ /* Generate source and target for the delta and its application. */
+ apr_uint32_t subseed_base = svn_test_rand((*last_seed = seed, &seed));
+ apr_file_t *source = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, pool);
+ apr_file_t *middle = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, pool);
+ apr_file_t *target = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, pool);
+ apr_file_t *source_copy = copy_tempfile(source, pool);
+ apr_file_t *middle_copy = copy_tempfile(middle, pool);
+ apr_file_t *target_regen = open_tempfile(NULL, pool);
+
+ svn_txdelta_stream_t *txdelta_stream_A;
+ svn_txdelta_stream_t *txdelta_stream_B;
+ svn_txdelta_window_handler_t handler;
+ svn_stream_t *stream;
+ void *handler_baton;
+
+ /* Set up a four-stage pipeline: create two deltas, combine them
+ and convert the result to svndiff format, parse that back
+ into delta format, and apply it to a copy of the source file
+ to see if we get the same target back. */
+ apr_pool_t *delta_pool = svn_pool_create(pool);
+
+ /* Make stage 4: apply the text delta. */
+ svn_txdelta_apply(svn_stream_from_aprfile(source_copy, delta_pool),
+ svn_stream_from_aprfile(target_regen, delta_pool),
+ NULL, NULL, delta_pool, &handler, &handler_baton);
+
+ /* Make stage 3: reparse the text delta. */
+ stream = svn_txdelta_parse_svndiff(handler, handler_baton, TRUE,
+ delta_pool);
+
+ /* Make stage 2: encode the text delta in svndiff format using
+ varying svndiff versions and compression levels. */
+ svn_txdelta_to_svndiff3(&handler, &handler_baton, stream, i % 3,
+ i % 10, delta_pool);
+
+ /* Make stage 1: create the text deltas. */
+
+ svn_txdelta2(&txdelta_stream_A,
+ svn_stream_from_aprfile(source, delta_pool),
+ svn_stream_from_aprfile(middle, delta_pool),
+ FALSE,
+ delta_pool);
+
+ svn_txdelta2(&txdelta_stream_B,
+ svn_stream_from_aprfile(middle_copy, delta_pool),
+ svn_stream_from_aprfile(target, delta_pool),
+ FALSE,
+ delta_pool);
+
+ {
+ svn_txdelta_window_t *window_A;
+ svn_txdelta_window_t *window_B;
+ svn_txdelta_window_t *composite;
+ apr_pool_t *wpool = svn_pool_create(delta_pool);
+
+ do
+ {
+ SVN_ERR(svn_txdelta_next_window(&window_A, txdelta_stream_A,
+ wpool));
+ if (print_windows)
+ delta_window_print(window_A, "A ", stdout);
+ SVN_ERR(svn_txdelta_next_window(&window_B, txdelta_stream_B,
+ wpool));
+ if (print_windows)
+ delta_window_print(window_B, "B ", stdout);
+ if (!window_B)
+ break;
+ assert(window_A != NULL || window_B->src_ops == 0);
+ if (window_B->src_ops == 0)
+ {
+ composite = window_B;
+ composite->sview_len = 0;
+ }
+ else
+ composite = svn_txdelta_compose_windows(window_A, window_B,
+ wpool);
+ if (print_windows)
+ delta_window_print(composite, "AB", stdout);
+
+ /* The source view length should not be 0 if there are
+ source copy ops in the window. */
+ if (composite
+ && composite->sview_len == 0 && composite->src_ops > 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "combined delta window is inconsistent");
+
+ SVN_ERR(handler(composite, handler_baton));
+ svn_pool_clear(wpool);
+ }
+ while (composite != NULL);
+ svn_pool_destroy(wpool);
+ }
+
+ svn_pool_destroy(delta_pool);
+
+ SVN_ERR(compare_files(target, target_regen, dump_files));
+
+ apr_file_close(source);
+ apr_file_close(middle);
+ apr_file_close(target);
+ apr_file_close(source_copy);
+ apr_file_close(middle_copy);
+ apr_file_close(target_regen);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_test_driver_t. */
+static svn_error_t *
+random_combine_test(apr_pool_t *pool)
+{
+ apr_uint32_t seed;
+ svn_error_t *err = do_random_combine_test(pool, &seed);
+ if (err)
+ fprintf(stderr, "SEED: %lu\n", (unsigned long)seed);
+ return err;
+}
+
+
+/* (Note: *LAST_SEED is an output parameter.) */
+static svn_error_t *
+do_random_txdelta_to_svndiff_stream_test(apr_pool_t *pool,
+ apr_uint32_t *last_seed)
+{
+ apr_uint32_t seed;
+ apr_uint32_t maxlen;
+ apr_size_t bytes_range;
+ int i;
+ int iterations;
+ int dump_files;
+ int print_windows;
+ const char *random_bytes;
+ apr_pool_t *iterpool;
+
+ /* Initialize parameters and print out the seed in case we dump core
+ or something. */
+ init_params(&seed, &maxlen, &iterations, &dump_files, &print_windows,
+ &random_bytes, &bytes_range, pool);
+
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < iterations; i++)
+ {
+ apr_uint32_t subseed_base;
+ apr_file_t *source;
+ apr_file_t *target;
+ apr_file_t *source_copy;
+ apr_file_t *new_target;
+ svn_txdelta_stream_t *txstream;
+ svn_stream_t *delta_stream;
+ svn_txdelta_window_handler_t handler;
+ void *handler_baton;
+ svn_stream_t *push_stream;
+
+ svn_pool_clear(iterpool);
+
+ /* Generate source and target for the delta and its application. */
+ *last_seed = seed;
+ subseed_base = svn_test_rand(&seed);
+ source = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, iterpool);
+ target = generate_random_file(maxlen, subseed_base, &seed,
+ random_bytes, bytes_range,
+ dump_files, iterpool);
+ source_copy = copy_tempfile(source, iterpool);
+ new_target = open_tempfile(NULL, iterpool);
+
+ /* Create a txdelta stream that turns the source into target;
+ turn it into a generic readable svn_stream_t. */
+ svn_txdelta2(&txstream,
+ svn_stream_from_aprfile2(source, TRUE, iterpool),
+ svn_stream_from_aprfile2(target, TRUE, iterpool),
+ FALSE, iterpool);
+ delta_stream = svn_txdelta_to_svndiff_stream(txstream, i % 3, i % 10,
+ iterpool);
+
+ /* Apply it to a copy of the source file to see if we get the
+ same target back. */
+ svn_txdelta_apply(svn_stream_from_aprfile2(source_copy, TRUE, iterpool),
+ svn_stream_from_aprfile2(new_target, TRUE, iterpool),
+ NULL, NULL, iterpool, &handler, &handler_baton);
+ push_stream = svn_txdelta_parse_svndiff(handler, handler_baton, TRUE,
+ iterpool);
+ SVN_ERR(svn_stream_copy3(delta_stream, push_stream, NULL, NULL,
+ iterpool));
+
+ SVN_ERR(compare_files(target, new_target, dump_files));
+
+ apr_file_close(source);
+ apr_file_close(target);
+ apr_file_close(source_copy);
+ apr_file_close(new_target);
+ }
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_test_driver_t. */
+static svn_error_t *
+random_txdelta_to_svndiff_stream_test(apr_pool_t *pool)
+{
+ apr_uint32_t seed;
+ svn_error_t *err = do_random_txdelta_to_svndiff_stream_test(pool, &seed);
+ if (err)
+ fprintf(stderr, "SEED: %lu\n", (unsigned long)seed);
+ return err;
+}
+
+/* Change to 1 to enable the unit test for the delta combiner's range index: */
+#if 0
+#include "range-index-test.h"
+#endif
+
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(random_test,
+ "random delta test"),
+ SVN_TEST_PASS2(random_combine_test,
+ "random combine delta test"),
+ SVN_TEST_PASS2(random_txdelta_to_svndiff_stream_test,
+ "random txdelta to svndiff stream test"),
+#ifdef SVN_RANGE_INDEX_TEST_H
+ SVN_TEST_PASS2(random_range_index_test,
+ "random range index test"),
+#endif
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_delta/range-index-test.h b/subversion/tests/libsvn_delta/range-index-test.h
new file mode 100644
index 0000000..f330a6a
--- /dev/null
+++ b/subversion/tests/libsvn_delta/range-index-test.h
@@ -0,0 +1,195 @@
+/*
+ * range-index-test.h: An extension for random-test.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifndef SVN_RANGE_INDEX_TEST_H
+#define SVN_RANGE_INDEX_TEST_H
+
+#include "../../libsvn_delta/compose_delta.c"
+
+static range_index_node_t *prev_node, *prev_prev_node;
+static apr_size_t
+walk_range_index(range_index_node_t *node, const char **msg)
+{
+ apr_off_t ret;
+
+ if (node == NULL)
+ return 0;
+
+ ret = walk_range_index(node->left, msg);
+ if (ret > 0)
+ return ret;
+
+ if (prev_node != NULL
+ && node->target_offset > 0
+ && (prev_node->offset >= node->offset
+ || (prev_node->limit >= node->limit)))
+ {
+ ret = node->target_offset;
+ node->target_offset = -node->target_offset;
+ *msg = "Oops, the previous node ate me.";
+ return ret;
+ }
+ if (prev_prev_node != NULL
+ && prev_node->target_offset > 0
+ && prev_prev_node->limit > node->offset)
+ {
+ ret = prev_node->target_offset;
+ prev_node->target_offset = -prev_node->target_offset;
+ *msg = "Arrgh, my neighbours are conspiring against me.";
+ return ret;
+ }
+ prev_prev_node = prev_node;
+ prev_node = node;
+
+ return walk_range_index(node->right, msg);
+}
+
+
+static void
+print_node_data(range_index_node_t *node, const char *msg, apr_off_t ndx)
+{
+ if (-node->target_offset == ndx)
+ {
+ printf(" * Node: [%3"APR_SIZE_T_FMT
+ ",%3"APR_SIZE_T_FMT
+ ") = %-5"APR_SIZE_T_FMT"%s\n",
+ node->offset, node->limit, -node->target_offset, msg);
+ }
+ else
+ {
+ printf(" Node: [%3"APR_SIZE_T_FMT
+ ",%3"APR_SIZE_T_FMT
+ ") = %"APR_SIZE_T_FMT"\n",
+ node->offset, node->limit,
+ node->target_offset);
+ }
+}
+
+static void
+print_range_index_r(range_index_node_t *node, const char *msg, apr_off_t ndx)
+{
+ if (node == NULL)
+ return;
+
+ print_range_index_r(node->left, msg, ndx);
+ print_node_data(node, msg, ndx);
+ print_range_index_r(node->right, msg, ndx);
+}
+
+static void
+print_range_index_i(range_index_node_t *node, const char *msg, apr_off_t ndx)
+{
+ if (node == NULL)
+ return;
+
+ while (node->prev)
+ node = node->prev;
+
+ do
+ {
+ print_node_data(node, msg, ndx);
+ node = node->next;
+ }
+ while (node);
+}
+
+static void
+print_range_index(range_index_node_t *node, const char *msg, apr_off_t ndx)
+{
+ printf(" (recursive)\n");
+ print_range_index_r(node, msg, ndx);
+ printf(" (iterative)\n");
+ print_range_index_i(node, msg, ndx);
+}
+
+
+static void
+check_copy_count(int src_cp, int tgt_cp)
+{
+ printf("Source copies: %d Target copies: %d\n", src_cp, tgt_cp);
+ if (src_cp > tgt_cp)
+ printf("WARN: More source than target copies; inefficient combiner?\n");
+}
+
+
+static svn_error_t *
+random_range_index_test(apr_pool_t *pool)
+{
+ apr_uint32_t seed, maxlen;
+ apr_size_t bytes_range;
+ int i, iterations, dump_files, print_windows;
+ const char *random_bytes;
+ range_index_t *ndx;
+ int tgt_cp = 0, src_cp = 0;
+
+ /* Initialize parameters and print out the seed in case we dump core
+ or something. */
+ init_params(&seed, &maxlen, &iterations, &dump_files, &print_windows,
+ &random_bytes, &bytes_range, pool);
+
+ /* ### This test is expected to fail randomly at the moment, so don't
+ enable it by default. --xbc */
+
+ ndx = create_range_index(pool);
+ for (i = 1; i <= iterations; ++i)
+ {
+ apr_size_t offset = svn_test_rand(&seed) % 47;
+ apr_size_t limit = offset + svn_test_rand(&seed) % 16 + 1;
+ range_list_node_t *list, *r;
+ apr_size_t ret;
+ const char *msg2;
+
+ printf("%3d: Inserting [%3"APR_SIZE_T_FMT",%3"APR_SIZE_T_FMT") ...",
+ i, offset, limit);
+ splay_range_index(offset, ndx);
+ list = build_range_list(offset, limit, ndx);
+ insert_range(offset, limit, i, ndx);
+ prev_prev_node = prev_node = NULL;
+ ret = walk_range_index(ndx->tree, &msg2);
+ if (ret == 0)
+ {
+ for (r = list; r; r = r->next)
+ printf(" %s[%3"APR_SIZE_T_FMT",%3"APR_SIZE_T_FMT")",
+ (r->kind == range_from_source ?
+ (++src_cp, "S") : (++tgt_cp, "T")),
+ r->offset, r->limit);
+ free_range_list(list, ndx);
+ printf(" OK\n");
+ }
+ else
+ {
+ printf(" Ooops!\n");
+ print_range_index(ndx->tree, msg2, ret);
+ check_copy_count(src_cp, tgt_cp);
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "insert_range");
+ }
+ }
+
+ printf("Final tree state:\n");
+ print_range_index(ndx->tree, "", iterations + 1);
+ check_copy_count(src_cp, tgt_cp);
+ return SVN_NO_ERROR;
+}
+
+
+#endif /* SVN_RANGE_INDEX_TEST_H */
diff --git a/subversion/tests/libsvn_delta/svndiff-stream-test.c b/subversion/tests/libsvn_delta/svndiff-stream-test.c
new file mode 100644
index 0000000..402df51
--- /dev/null
+++ b/subversion/tests/libsvn_delta/svndiff-stream-test.c
@@ -0,0 +1,79 @@
+/*
+ * svndiff-stream-test.c: test svndiff streams
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_delta.h"
+#include "../svn_test.h"
+
+static svn_error_t *
+null_window(svn_txdelta_window_t **window,
+ void *baton, apr_pool_t *pool)
+{
+ *window = NULL;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_txdelta_to_svndiff_stream_small_reads(apr_pool_t *pool)
+{
+ svn_txdelta_stream_t *txstream;
+ svn_stream_t *svndiff_stream;
+ char buf[64];
+ apr_size_t len;
+
+ txstream = svn_txdelta_stream_create(NULL, null_window, NULL, pool);
+ svndiff_stream = svn_txdelta_to_svndiff_stream(txstream, 0, 0, pool);
+
+ len = 3;
+ SVN_ERR(svn_stream_read_full(svndiff_stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, 3);
+ SVN_TEST_ASSERT(memcmp(buf, "SVN", len) == 0);
+
+ len = 1;
+ SVN_ERR(svn_stream_read_full(svndiff_stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, 1);
+ SVN_TEST_ASSERT(memcmp(buf, "\x00", len) == 0);
+
+ /* Test receiving the EOF. */
+ len = sizeof(buf);
+ SVN_ERR(svn_stream_read_full(svndiff_stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, 0);
+
+ /* Test reading after the EOF. */
+ len = sizeof(buf);
+ SVN_ERR(svn_stream_read_full(svndiff_stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, 0);
+
+ return SVN_NO_ERROR;
+}
+
+static int max_threads = -1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+{
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_txdelta_to_svndiff_stream_small_reads,
+ "test svn_txdelta_to_svndiff_stream() small reads"),
+ SVN_TEST_NULL
+};
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_delta/svndiff-test.c b/subversion/tests/libsvn_delta/svndiff-test.c
new file mode 100644
index 0000000..161c36a
--- /dev/null
+++ b/subversion/tests/libsvn_delta/svndiff-test.c
@@ -0,0 +1,108 @@
+/* svndiff-test.c -- test driver for text deltas
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <ctype.h>
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+
+#include "../svn_test.h"
+
+#include "svn_base64.h"
+#include "svn_quoprint.h"
+#include "svn_pools.h"
+#include "svn_delta.h"
+#include "svn_error.h"
+
+
+int
+main(int argc, char **argv)
+{
+ svn_error_t *err;
+ apr_status_t apr_err;
+ apr_file_t *source_file;
+ apr_file_t *target_file;
+ svn_stream_t *stdout_stream;
+ svn_txdelta_stream_t *txdelta_stream;
+ svn_txdelta_window_handler_t svndiff_handler;
+ svn_stream_t *encoder;
+ void *svndiff_baton;
+ apr_pool_t *pool;
+ int version = 0;
+
+ if (argc < 3)
+ {
+ printf("usage: %s source target [version]\n", argv[0]);
+ exit(0);
+ }
+
+ apr_initialize();
+ pool = svn_pool_create(NULL);
+ apr_err = apr_file_open(&source_file, argv[1], (APR_READ | APR_BINARY),
+ APR_OS_DEFAULT, pool);
+ if (apr_err)
+ {
+ fprintf(stderr, "unable to open \"%s\" for reading\n", argv[1]);
+ exit(1);
+ }
+
+ apr_err = apr_file_open(&target_file, argv[2], (APR_READ | APR_BINARY),
+ APR_OS_DEFAULT, pool);
+ if (apr_err)
+ {
+ fprintf(stderr, "unable to open \"%s\" for reading\n", argv[2]);
+ exit(1);
+ }
+ if (argc == 4)
+ version = atoi(argv[3]);
+
+ svn_txdelta2(&txdelta_stream,
+ svn_stream_from_aprfile(source_file, pool),
+ svn_stream_from_aprfile(target_file, pool),
+ FALSE,
+ pool);
+
+ err = svn_stream_for_stdout(&stdout_stream, pool);
+ if (err)
+ svn_handle_error2(err, stdout, TRUE, "svndiff-test: ");
+
+#ifdef QUOPRINT_SVNDIFFS
+ encoder = svn_quoprint_encode(stdout_stream, pool);
+#else
+ encoder = svn_base64_encode2(stdout_stream, TRUE, pool);
+#endif
+ /* use maximum compression level */
+ svn_txdelta_to_svndiff3(&svndiff_handler, &svndiff_baton,
+ encoder, version, 9, pool);
+ err = svn_txdelta_send_txstream(txdelta_stream,
+ svndiff_handler,
+ svndiff_baton,
+ pool);
+ if (err)
+ svn_handle_error2(err, stdout, TRUE, "svndiff-test: ");
+
+ apr_file_close(source_file);
+ apr_file_close(target_file);
+ svn_pool_destroy(pool);
+ apr_terminate();
+ exit(0);
+}
diff --git a/subversion/tests/libsvn_delta/vdelta-test.c b/subversion/tests/libsvn_delta/vdelta-test.c
new file mode 100644
index 0000000..f214a0e
--- /dev/null
+++ b/subversion/tests/libsvn_delta/vdelta-test.c
@@ -0,0 +1,269 @@
+/* vdelta-test.c -- test driver for text deltas
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#define APR_WANT_STDIO
+#include <apr_want.h>
+
+#include <apr_general.h>
+#include <assert.h>
+
+#include "../svn_test.h"
+
+#include "svn_ctype.h"
+#include "svn_delta.h"
+#include "svn_error.h"
+#include "svn_pools.h"
+
+#include "../../libsvn_delta/delta.h"
+#include "delta-window-test.h"
+
+static apr_off_t
+print_delta_window(const svn_txdelta_window_t *window,
+ const char *tag, int quiet, FILE *stream)
+{
+ if (quiet)
+ return delta_window_size_estimate(window);
+ else
+ return delta_window_print(window, tag, stream);
+}
+
+
+static void
+do_one_diff(apr_file_t *source_file, apr_file_t *target_file,
+ int *count, apr_off_t *len,
+ int quiet, apr_pool_t *pool,
+ const char *tag, FILE* stream)
+{
+ svn_txdelta_stream_t *delta_stream = NULL;
+ svn_txdelta_window_t *delta_window = NULL;
+ apr_pool_t *fpool = svn_pool_create(pool);
+ apr_pool_t *wpool = svn_pool_create(pool);
+
+ *count = 0;
+ *len = 0;
+ svn_txdelta2(&delta_stream,
+ svn_stream_from_aprfile(source_file, fpool),
+ svn_stream_from_aprfile(target_file, fpool),
+ FALSE,
+ fpool);
+ do {
+ svn_error_t *err;
+ err = svn_txdelta_next_window(&delta_window, delta_stream, wpool);
+ if (err)
+ svn_handle_error2(err, stderr, TRUE, "vdelta-test: ");
+ if (delta_window != NULL)
+ {
+ *len += print_delta_window(delta_window, tag, quiet, stream);
+ svn_pool_clear(wpool);
+ ++*count;
+ }
+ } while (delta_window != NULL);
+ fprintf(stream, "%s: (LENGTH %" APR_OFF_T_FMT " +%d)\n", tag, *len, *count);
+
+ svn_pool_destroy(fpool);
+ svn_pool_destroy(wpool);
+}
+
+
+static void
+do_one_test_cycle(apr_file_t *source_file_A, apr_file_t *target_file_A,
+ apr_file_t *source_file_B, apr_file_t *target_file_B,
+ int quiet, apr_pool_t *pool)
+{
+ int count_A = 0;
+ apr_off_t len_A = 0;
+
+ int count_B = 0;
+ apr_off_t len_B = 0;
+
+ do_one_diff(source_file_A, target_file_A,
+ &count_A, &len_A, quiet, pool, "A ", stdout);
+
+ if (source_file_B)
+ {
+ apr_pool_t *fpool = svn_pool_create(pool);
+ apr_pool_t *wpool = svn_pool_create(pool);
+ svn_txdelta_stream_t *stream_A = NULL;
+ svn_txdelta_stream_t *stream_B = NULL;
+ svn_txdelta_window_t *window_A = NULL;
+ svn_txdelta_window_t *window_B = NULL;
+ svn_txdelta_window_t *window_AB = NULL;
+ int count_AB = 0;
+ apr_off_t len_AB = 0;
+
+ putc('\n', stdout);
+ do_one_diff(source_file_B, target_file_B,
+ &count_B, &len_B, quiet, pool, "B ", stdout);
+
+ putc('\n', stdout);
+
+ {
+ apr_off_t offset = 0;
+
+ apr_file_seek(source_file_A, APR_SET, &offset);
+ apr_file_seek(target_file_A, APR_SET, &offset);
+ apr_file_seek(source_file_B, APR_SET, &offset);
+ apr_file_seek(target_file_B, APR_SET, &offset);
+ }
+
+ svn_txdelta2(&stream_A,
+ svn_stream_from_aprfile(source_file_A, fpool),
+ svn_stream_from_aprfile(target_file_A, fpool),
+ FALSE,
+ fpool);
+ svn_txdelta2(&stream_B,
+ svn_stream_from_aprfile(source_file_B, fpool),
+ svn_stream_from_aprfile(target_file_B, fpool),
+ FALSE,
+ fpool);
+
+ for (count_AB = 0; count_AB < count_B; ++count_AB)
+ {
+ svn_error_t *err;
+
+ err = svn_txdelta_next_window(&window_A, stream_A, wpool);
+ if (err)
+ svn_handle_error2(err, stderr, TRUE, "vdelta-test: ");
+ err = svn_txdelta_next_window(&window_B, stream_B, wpool);
+ if (err)
+ svn_handle_error2(err, stderr, TRUE, "vdelta-test: ");
+
+ /* Note: It's not possible that window_B is null, we already
+ counted the number of windows in the second delta. */
+ assert(window_A != NULL || window_B->src_ops == 0);
+ if (window_B->src_ops == 0)
+ {
+ window_AB = window_B;
+ window_AB->sview_len = 0;
+ }
+ else
+ window_AB = svn_txdelta_compose_windows(window_A, window_B,
+ wpool);
+ len_AB += print_delta_window(window_AB, "AB", quiet, stdout);
+ svn_pool_clear(wpool);
+ }
+
+ fprintf(stdout, "AB: (LENGTH %" APR_OFF_T_FMT " +%d)\n",
+ len_AB, count_AB);
+ }
+}
+
+
+static apr_file_t *
+open_binary_read(const char *path, apr_pool_t *pool)
+{
+ apr_status_t apr_err;
+ apr_file_t *fp;
+
+ apr_err = apr_file_open(&fp, path, (APR_READ | APR_BINARY),
+ APR_OS_DEFAULT, pool);
+
+ if (apr_err)
+ {
+ fprintf(stderr, "unable to open \"%s\" for reading\n", path);
+ exit(1);
+ }
+
+ return fp;
+}
+
+
+int
+main(int argc, char **argv)
+{
+ apr_file_t *source_file_A = NULL;
+ apr_file_t *target_file_A = NULL;
+
+ apr_file_t *source_file_B = NULL;
+ apr_file_t *target_file_B = NULL;
+
+ apr_pool_t *pool;
+ int quiet = 0;
+ int repeat = 1;
+
+ while (argc > 1)
+ {
+ const char *const arg = argv[1];
+ if (arg[0] != '-')
+ break;
+
+ if (arg[1] == 'q')
+ quiet = 1;
+ else if (svn_ctype_isdigit(arg[1]))
+ repeat = atoi(arg + 1);
+ else
+ break;
+ --argc; ++argv;
+ }
+
+ apr_initialize();
+ pool = svn_pool_create(NULL);
+
+ if (argc == 2)
+ {
+ target_file_A = open_binary_read(argv[1], pool);
+ }
+ else if (argc == 3)
+ {
+ source_file_A = open_binary_read(argv[1], pool);
+ target_file_A = open_binary_read(argv[2], pool);
+ }
+ else if (argc == 4)
+ {
+ source_file_A = open_binary_read(argv[1], pool);
+ target_file_A = open_binary_read(argv[2], pool);
+ source_file_B = open_binary_read(argv[2], pool);
+ target_file_B = open_binary_read(argv[3], pool);
+ }
+ else
+ {
+ fprintf(stderr,
+ "Usage: vdelta-test [-q] [-<repeat>] <target>\n"
+ " or: vdelta-test [-q] [-<repeat>] <source> <target>\n"
+ " or: vdelta-test [-q] [-<repeat>] "
+ "<source> <intermediate> <target>\n");
+ exit(1);
+ }
+
+ while (0 < repeat--)
+ {
+ apr_off_t offset = 0;
+
+ do_one_test_cycle(source_file_A, target_file_A,
+ source_file_B, target_file_B,
+ quiet, pool);
+
+ if (source_file_A) apr_file_seek(source_file_A, APR_SET, &offset);
+ if (target_file_A) apr_file_seek(target_file_A, APR_SET, &offset);
+ if (source_file_B) apr_file_seek(source_file_B, APR_SET, &offset);
+ if (target_file_B) apr_file_seek(target_file_B, APR_SET, &offset);
+ }
+
+ if (source_file_A) apr_file_close(source_file_A);
+ if (target_file_A) apr_file_close(target_file_A);
+ if (source_file_B) apr_file_close(source_file_B);
+ if (target_file_B) apr_file_close(source_file_B);
+
+ svn_pool_destroy(pool);
+ apr_terminate();
+ exit(0);
+}
diff --git a/subversion/tests/libsvn_delta/vdelta_1.txt b/subversion/tests/libsvn_delta/vdelta_1.txt
new file mode 100644
index 0000000..40a0e16
--- /dev/null
+++ b/subversion/tests/libsvn_delta/vdelta_1.txt
@@ -0,0 +1 @@
+abcdxabcdabcdyabcdabcdabcdabcdabcdabcdz \ No newline at end of file
diff --git a/subversion/tests/libsvn_delta/window-test.c b/subversion/tests/libsvn_delta/window-test.c
new file mode 100644
index 0000000..28f4609
--- /dev/null
+++ b/subversion/tests/libsvn_delta/window-test.c
@@ -0,0 +1,113 @@
+/*
+ * window-test.c: Test delta window generation
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_types.h"
+#include "svn_error.h"
+#include "svn_delta.h"
+
+#include "private/svn_subr_private.h"
+
+static svn_error_t *
+stream_window_test(apr_pool_t *pool)
+{
+ /* Note: put these in data segment, not the stack */
+ static char source[109001];
+ static char target[109001];
+ int i;
+ char *p = &source[9];
+ svn_checksum_t *expected;
+ svn_checksum_t *actual;
+ svn_string_t source_str;
+ svn_string_t target_str;
+ svn_stream_t *source_stream;
+ svn_stream_t *target_stream;
+ svn_txdelta_stream_t *txstream;
+
+ memcpy(source, "a\nb\nc\nd\ne", 9);
+ for (i = 100; i--; )
+ *p++ = '\n';
+ for (i = 999; i--; p += 109)
+ memcpy(p, source, 109);
+ source[109000] = '\0';
+
+ memcpy(target, source, 109001);
+ for (i = 1000; i--; )
+ target[i*109 + 4] = 'X';
+
+ SVN_ERR(svn_checksum(&expected, svn_checksum_md5, target, 109000, pool));
+ /* f6fd44565e14c6e44b35292719deb77e */
+ printf("expected: %s\n", svn_checksum_to_cstring(expected, pool));
+
+ source_str.data = source;
+ source_str.len = 109000;
+ source_stream = svn_stream_from_string(&source_str, pool);
+
+ target_str.data = target;
+ target_str.len = 109000;
+ target_stream = svn_stream_from_string(&target_str, pool);
+
+ svn_txdelta2(&txstream, source_stream, target_stream, TRUE, pool);
+
+ while (1)
+ {
+ svn_txdelta_window_t *window;
+
+ SVN_ERR(svn_txdelta_next_window(&window, txstream, pool));
+ if (window == NULL)
+ break;
+
+ /* ### examine the window */
+ }
+
+ actual = svn_checksum__from_digest_md5(svn_txdelta_md5_digest(txstream),
+ pool);
+ printf(" actual: %s\n", svn_checksum_to_cstring(actual, pool));
+
+ if (!svn_checksum_match(expected, actual))
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Checksums did not match.");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(stream_window_test,
+ "txdelta stream and windows test"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_diff/diff-diff3-test.c b/subversion/tests/libsvn_diff/diff-diff3-test.c
new file mode 100644
index 0000000..27e4a33
--- /dev/null
+++ b/subversion/tests/libsvn_diff/diff-diff3-test.c
@@ -0,0 +1,3115 @@
+/*
+ * Incomplete regression tests for the diff/diff3 library.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include "../svn_test.h"
+
+#include "svn_diff.h"
+#include "svn_pools.h"
+#include "svn_utf.h"
+
+/* Used to terminate lines in large multi-line string literals. */
+#define NL APR_EOL_STR
+
+/* Random number seed. Yes, it's global, just pretend you can't see it. */
+static apr_uint32_t diff_diff3_seed;
+
+/* Return the value of the current random number seed, initializing it if
+ necessary */
+static apr_uint32_t
+seed_val(void)
+{
+ static svn_boolean_t first = TRUE;
+
+ if (first)
+ {
+ diff_diff3_seed = (apr_uint32_t) apr_time_now();
+ first = FALSE;
+ }
+
+ return diff_diff3_seed;
+}
+
+/* Return a random number N such that MIN_VAL <= N <= MAX_VAL */
+static apr_uint32_t
+range_rand(apr_uint32_t min_val,
+ apr_uint32_t max_val)
+{
+ apr_uint64_t diff = max_val - min_val;
+ apr_uint64_t val = diff * svn_test_rand(&diff_diff3_seed);
+ val /= 0xffffffff;
+ return min_val + (apr_uint32_t) val;
+}
+
+/* Make a file that is between MIN_LINES and MAX_LINES lines long, with at
+ most VAR_LINES distinct lines. If BLOCK_LINES is non-zero then every
+ other block of BLOCK_LINES lines will be identical, if BLOCK_LINES is
+ zero all lines will have contents chosen at random. If TRAILING_NEWLINE
+ is TRUE then the file will have a trailing newline, if not then it wont. */
+static svn_error_t *
+make_random_file(const char *filename,
+ int min_lines,
+ int max_lines,
+ int var_lines,
+ int block_lines,
+ svn_boolean_t trailing_newline,
+ apr_pool_t *pool)
+{
+ apr_file_t *file;
+ int num_lines;
+
+ num_lines = range_rand(min_lines, max_lines);
+
+ SVN_ERR(svn_io_file_open(&file, filename,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT,
+ pool));
+
+ while (num_lines--)
+ {
+ int x;
+ if (! (block_lines && (num_lines / block_lines % 2)))
+ x = range_rand(1, var_lines);
+ else
+ x = 0;
+ if (num_lines || trailing_newline)
+ apr_file_printf(file, "line %d line %d line %d\n", x, x, x);
+ else
+ apr_file_printf(file, "line %d line %d line %d", x, x, x);
+ }
+
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Create a file called FILENAME containing CONTENTS */
+static svn_error_t *
+make_file(const char *filename,
+ const char *contents,
+ apr_pool_t *pool)
+{
+ apr_file_t *file;
+ apr_status_t status;
+
+ SVN_ERR(svn_io_file_open(&file, filename,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT, pool));
+
+ status = apr_file_write_full(file, contents, strlen(contents), NULL);
+ if (status)
+ return svn_error_createf(status, NULL, "failed to write '%s'", filename);
+
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Create three files called FILENAME1, FILENAME2 and FILENAME3
+ containing CONTENTS1, CONTENTS2 and CONTENTS3 respectively. Run a
+ three way merge to merge the difference between CONTENTS1 and
+ CONTENTS2 into CONTENTS3, using OPTIONS, and verify that it results
+ in EXPECTED. The files FILENAME1, FILENAME2 and FILENAME3 will be
+ deleted if the merge is successful, and preserved otherwise. If
+ the merge fails the merge output will be in a file called
+ "merge-FILENAME1-FILENAME2-FILENAME3". The conflict style STYLE is
+ used. */
+static svn_error_t *
+three_way_merge(const char *base_filename1,
+ const char *base_filename2,
+ const char *base_filename3,
+ const char *contents1,
+ const char *contents2,
+ const char *contents3,
+ const char *expected,
+ const svn_diff_file_options_t *options,
+ svn_diff_conflict_display_style_t style,
+ apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+ apr_file_t *output;
+ svn_stream_t *ostream;
+ svn_stringbuf_t *actual;
+ char *merge_name = apr_psprintf(
+ pool, "merge-%s-%s-%s", base_filename1, base_filename2, base_filename3);
+
+ const char *filename1 = svn_test_data_path(base_filename1, pool);
+ const char *filename2 = svn_test_data_path(base_filename2, pool);
+ const char *filename3 = svn_test_data_path(base_filename3, pool);
+
+ /* We have an EXPECTED string we can match, because we don't support
+ any other combinations (yet) than the ones above. */
+ svn_string_t *original = svn_string_create(contents1, pool);
+ svn_string_t *modified = svn_string_create(contents2, pool);
+ svn_string_t *latest = svn_string_create(contents3, pool);
+
+ options = options ? options : svn_diff_file_options_create(pool);
+
+ SVN_ERR(svn_diff_mem_string_diff3(&diff,
+ original, modified, latest, options, pool));
+
+ actual = svn_stringbuf_create_empty(pool);
+ ostream = svn_stream_from_stringbuf(actual, pool);
+
+ SVN_ERR(svn_diff_mem_string_output_merge3
+ (ostream, diff, original, modified, latest,
+ apr_psprintf(pool, "||||||| %s", base_filename1),
+ apr_psprintf(pool, "<<<<<<< %s", base_filename2),
+ apr_psprintf(pool, ">>>>>>> %s", base_filename3),
+ NULL, /* separator */
+ style,
+ NULL, NULL, /* cancel */
+ pool));
+
+ SVN_ERR(svn_stream_close(ostream));
+ if (strcmp(actual->data, expected) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Failed mem-diff, expected and actual "
+ "outputs differ.\nEXPECTED:\n%s\n"
+ "ACTUAL:\n%s\n", expected, actual->data);
+
+ SVN_ERR(make_file(filename1, contents1, pool));
+ SVN_ERR(make_file(filename2, contents2, pool));
+ SVN_ERR(make_file(filename3, contents3, pool));
+
+ SVN_ERR(svn_diff_file_diff3_2(&diff, filename1, filename2, filename3,
+ options, pool));
+ SVN_ERR(svn_io_file_open(&output, merge_name,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT, pool));
+
+ ostream = svn_stream_from_aprfile2(output, FALSE, pool);
+ SVN_ERR(svn_diff_file_output_merge3(
+ ostream, diff,
+ filename1, filename2, filename3,
+ apr_psprintf(pool, "||||||| %s", base_filename1),
+ apr_psprintf(pool, "<<<<<<< %s", base_filename2),
+ apr_psprintf(pool, ">>>>>>> %s", base_filename3),
+ NULL, /* separator */
+ style,
+ NULL, NULL, /* cancel */
+ pool));
+ SVN_ERR(svn_stream_close(ostream));
+ SVN_ERR(svn_stringbuf_from_file2(&actual, merge_name, pool));
+ if (strcmp(actual->data, expected))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "failed merging diff '%s' to '%s' into '%s'",
+ base_filename1, base_filename2, base_filename3);
+
+ SVN_ERR(svn_io_remove_file2(filename1, TRUE, pool));
+ if (strcmp(filename1, filename2))
+ SVN_ERR(svn_io_remove_file2(filename2, TRUE, pool));
+ if (strcmp(filename1, filename3) && strcmp(filename2, filename3))
+ SVN_ERR(svn_io_remove_file2(filename3, TRUE, pool));
+ SVN_ERR(svn_io_remove_file2(merge_name, TRUE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Create two files called FILENAME1 and FILENAME2 containing
+ CONTENTS1 and CONTENTS2 respectively. Run a two way diff between
+ CONTENTS1 and CONTENTS2, using OPTIONS, and verify that it results
+ in EXPECTED. Then run the trivial merges to update CONTENTS1 to
+ CONTENTS2 and CONTENTS2 to CONTENTS1. The files FILENAME1,
+ FILENAME2 and be deleted if the diff and merges are successful, and
+ preserved otherwise. If the diff fails the diff output will be in
+ a file called "diff-FILENAME1-FILENAME2". */
+static svn_error_t *
+two_way_diff(const char *base_filename1,
+ const char *base_filename2,
+ const char *contents1,
+ const char *contents2,
+ const char *expected,
+ const svn_diff_file_options_t *options,
+ apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+ apr_file_t *output;
+ svn_stream_t *ostream;
+ svn_stringbuf_t *actual;
+ char *diff_name = (char *)apr_pstrdup(
+ pool, svn_test_data_path(
+ apr_psprintf(pool, "diff-%s-%s", base_filename1, base_filename2),
+ pool));
+
+ const char *filename1 = svn_test_data_path(base_filename1, pool);
+ const char *filename2 = svn_test_data_path(base_filename2, pool);
+
+ /* Some of the tests have lots of lines, although not much data as
+ the lines are short, and the in-memory diffs allocate a lot of
+ memory. Since we are doing multiple diff in a single test we use
+ a subpool to reuse that memory. */
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* We have an EXPECTED string we can match, because we don't support
+ any other combinations (yet) than the ones above. */
+ svn_string_t *original = svn_string_create(contents1, pool);
+ svn_string_t *modified = svn_string_create(contents2, pool);
+
+ options = options ? options : svn_diff_file_options_create(pool);
+
+ SVN_ERR(svn_diff_mem_string_diff(&diff, original, modified, options,
+ subpool));
+
+ actual = svn_stringbuf_create_empty(pool);
+ ostream = svn_stream_from_stringbuf(actual, pool);
+
+ SVN_ERR(svn_diff_mem_string_output_unified(ostream, diff,
+ base_filename1, base_filename2,
+ SVN_APR_LOCALE_CHARSET,
+ original, modified, subpool));
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_stream_close(ostream));
+ if (strcmp(actual->data, expected) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Failed mem-diff, expected and actual "
+ "outputs differ.\nEXPECTED:\n%s\n"
+ "ACTUAL:\n%s\n", expected, actual->data);
+
+ SVN_ERR(make_file(filename1, contents1, pool));
+ SVN_ERR(make_file(filename2, contents2, pool));
+
+ /* Check that two-way diff between contents1 and contents2 produces
+ expected output. */
+ SVN_ERR(svn_diff_file_diff_2(&diff, filename1, filename2, options, pool));
+
+ SVN_ERR(svn_io_file_open(&output, diff_name,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT, pool));
+
+ ostream = svn_stream_from_aprfile2(output, FALSE, pool);
+ SVN_ERR(svn_diff_file_output_unified2(ostream, diff,
+ filename1, filename2,
+ base_filename1, base_filename2,
+ SVN_APR_LOCALE_CHARSET, pool));
+ SVN_ERR(svn_stream_close(ostream));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual, diff_name, pool));
+ if (strcmp(actual->data, expected))
+ {
+ /*svn_stringbuf_t *dump_actual;
+ svn_stream_t *dump_ostream;
+ dump_actual = svn_stringbuf_create_empty(pool);
+ dump_ostream = svn_stream_from_stringbuf(dump_actual, pool);
+
+ SVN_ERR(svn_diff_mem_string_output_unified(dump_ostream, diff,
+ "expected", "actual",
+ SVN_APR_LOCALE_CHARSET,
+ svn_string_create(expected, pool),
+ svn_string_create(actual->data, pool),
+ pool));
+ SVN_ERR(svn_stream_close(ostream));
+
+ SVN_DBG(("%s\n", dump_actual->data));
+
+ SVN_ERR(make_file("memory", expected, pool));*/
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "failed comparing '%s' and '%s'"
+ " (memory and file results are different)",
+ base_filename1, base_filename2);
+ }
+
+ /* May as well do the trivial merges while we are here */
+ SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename1,
+ contents1, contents2, contents1, contents2, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ svn_pool_clear(subpool);
+ SVN_ERR(three_way_merge(base_filename2, base_filename1, base_filename2,
+ contents2, contents1, contents2, contents1, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ svn_pool_destroy(subpool);
+
+ SVN_ERR(svn_io_remove_file2(diff_name, TRUE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+struct random_mod
+{
+ int index; /* Zero based line number */
+ int mod; /* Type of mod: 0, 1, 2 (can be interpreted as you like just
+ do it consistently) */
+};
+
+/* Fill the SELECTED array of length NUM to select with randomly chosen
+ values, ensuring that none of SELECTED.INDEX are duplicates and that all
+ the SELECTED.INDEX values are less than NUM_LINES. Also ensure that for
+ each SELECTED.INDEX the three elements of LINES from SELECTED.INDEX-1 to
+ SELECTED.INDEX+1 are unset. Set all LINES[SELECTED.INDEX]. */
+static void
+select_lines(struct random_mod *selected,
+ int num_to_select,
+ svn_boolean_t *lines,
+ int num_lines)
+{
+ int i;
+ for (i = 0; i < num_to_select; ++i)
+ {
+ int j;
+ for (;;)
+ {
+ j= range_rand(0, num_lines - 1);
+ if (lines[j] /* already selected */
+ ||
+ (j > 0 && lines[j - 1]) /* previous selected */
+ ||
+ (j < num_lines - 1 && lines[j + 1])) /* next selected */
+ continue; /* try again */
+ break; /* got one */
+ }
+ selected[i].index = j;
+ selected[i].mod = range_rand(0, 2);
+ lines[j] = TRUE;
+ }
+}
+
+
+/* Create a file called FILENAME where the contents are obtained by
+ applying the modifications in MOD_LINES, of which there are NUM_MODS, to
+ a theoretical pristine file of length NUM_LINES lines. */
+static svn_error_t *
+make_random_merge_file(const char *filename,
+ int num_lines,
+ struct random_mod *mod_lines,
+ int num_mods,
+ apr_pool_t *pool)
+{
+ apr_file_t *file;
+ int i;
+
+ SVN_ERR(svn_io_file_open(&file, filename,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT, pool));
+
+ for (i = 0; i < num_lines; ++i)
+ {
+ int j;
+ for (j = 0; j < num_mods; ++j)
+ if (mod_lines[j].index == i)
+ break;
+
+ if (j < num_mods)
+ {
+ switch (mod_lines[j].mod)
+ {
+ case 0:
+ apr_file_printf(file, "replace line %d\n", i);
+ break;
+ case 1:
+ apr_file_printf(file,
+ "added line %d\n"
+ "unmodified line %d\n"
+ "added line %d\n",
+ i, i, i);
+ break;
+ default:
+ ; /* Delete the line */
+ }
+ }
+ else
+ {
+ apr_file_printf(file, "unmodified line %d\n", i);
+ }
+ }
+
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* ========================================================================== */
+
+static svn_error_t *
+dump_core(apr_pool_t *pool)
+{
+ SVN_ERR(two_way_diff("foo1", "bar1",
+ "",
+ "",
+ "",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo2", "bar2",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "",
+
+ "--- foo2" NL
+ "+++ bar2" NL
+ "@@ -1,3 +0,0 @@" NL
+ "-Aa\n"
+ "-Bb\n"
+ "-Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo3", "bar3",
+ "",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo3" NL
+ "+++ bar3" NL
+ "@@ -0,0 +1,3 @@" NL
+ "+Aa\n"
+ "+Bb\n"
+ "+Cc\n",
+ NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_two_way_unified(apr_pool_t *pool)
+{
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ SVN_ERR(two_way_diff("foo4", "bar4",
+ "Aa\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo4" NL
+ "+++ bar4" NL
+ "@@ -1 +1,3 @@" NL
+ " Aa\n"
+ "+Bb\n"
+ "+Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo4b", "bar4b",
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo4b" NL
+ "+++ bar4b" NL
+ "@@ -1 +1,3 @@" NL
+ "+Aa\n"
+ "+Bb\n"
+ " Cc\n",
+ NULL, pool));
+
+ diff_opts->ignore_eol_style = TRUE;
+ SVN_ERR(two_way_diff("foo4c", "bar4c",
+ "Cc\n",
+
+ "Aa\r"
+ "Bb\r"
+ "Cc\r",
+
+ "--- foo4c" NL
+ "+++ bar4c" NL
+ "@@ -1 +1,3 @@" NL
+ "+Aa\r"
+ "+Bb\r"
+ " Cc\n",
+ diff_opts, pool));
+ diff_opts->ignore_eol_style = FALSE;
+
+ SVN_ERR(two_way_diff("foo5", "bar5",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n",
+
+ "--- foo5" NL
+ "+++ bar5" NL
+ "@@ -1,3 +1 @@" NL
+ " Aa\n"
+ "-Bb\n"
+ "-Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo5b", "bar5b",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Cc\n",
+
+ "--- foo5b" NL
+ "+++ bar5b" NL
+ "@@ -1,3 +1 @@" NL
+ "-Aa\n"
+ "-Bb\n"
+ " Cc\n",
+ NULL, pool));
+
+ diff_opts->ignore_eol_style = TRUE;
+ SVN_ERR(two_way_diff("foo5c", "bar5c",
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+
+ "Cc\n",
+
+ "--- foo5c" NL
+ "+++ bar5c" NL
+ "@@ -1,3 +1 @@" NL
+ "-Aa\r\n"
+ "-Bb\r\n"
+ " Cc\r\n",
+ diff_opts, pool));
+
+
+ SVN_ERR(two_way_diff("foo5d", "bar5d",
+ "Aa\r\n"
+ "\r\n"
+ "Bb\r\n"
+ "\r\n"
+ "Cc\r\n"
+ "\r\n",
+
+ "Aa\n"
+ "\n"
+ "Bb\n"
+ "\n"
+ "Cc\n"
+ "\n",
+
+ "",
+ diff_opts, pool));
+ diff_opts->ignore_eol_style = FALSE;
+
+ SVN_ERR(two_way_diff("foo6", "bar6",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo6b", "bar6b",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Xx\n"
+ "Cc\n",
+
+ "--- foo6b" NL
+ "+++ bar6b" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\n"
+ "-Bb\n"
+ "+Xx\n"
+ " Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo6c", "bar6c",
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+
+ "Aa\r\n"
+ "Xx\r\n"
+ "Cc\r\n",
+
+ "--- foo6c" NL
+ "+++ bar6c" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\r\n"
+ "-Bb\r\n"
+ "+Xx\r\n"
+ " Cc\r\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo6d", "bar6d",
+ "Aa\r"
+ "Bb\r"
+ "Cc\r",
+
+ "Aa\r"
+ "Xx\r"
+ "Cc\r",
+
+ "--- foo6d" NL
+ "+++ bar6d" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\r"
+ "-Bb\r"
+ "+Xx\r"
+ " Cc\r",
+ NULL, pool));
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_change;
+ SVN_ERR(two_way_diff("foo6e", "bar6e",
+ " A a \n"
+ " B b \r"
+ " C c \r\n",
+
+ " A a \n"
+ " B b \r"
+ " C c \r\n",
+
+ "",
+ diff_opts, pool));
+ diff_opts->ignore_space = svn_diff_file_ignore_space_none;
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ SVN_ERR(two_way_diff("foo6f", "bar6f",
+ "Aa\n"
+ "Bb\r"
+ "Cc\r\n",
+
+ " A a \n"
+ " B b \r"
+ " C c \r\n",
+
+ "",
+ diff_opts, pool));
+ diff_opts->ignore_space = svn_diff_file_ignore_space_none;
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ diff_opts->ignore_eol_style = TRUE;
+ SVN_ERR(two_way_diff("foo6f", "bar6f",
+ "Aa\n"
+ "Bb\r"
+ "Cc\r\n",
+
+ " A a \r"
+ " B b \r\n"
+ " C c \n",
+
+ "",
+ diff_opts, pool));
+ diff_opts->ignore_space = svn_diff_file_ignore_space_none;
+ diff_opts->ignore_eol_style = FALSE;
+
+ SVN_ERR(two_way_diff("foo7", "bar7",
+ "Aa\n",
+
+ "Bb\n",
+
+ "--- foo7" NL
+ "+++ bar7" NL
+ "@@ -1 +1 @@" NL
+ "-Aa\n"
+ "+Bb\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo7a", "bar7a",
+ "Aa\n"
+ "Cc\n",
+
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo7a" NL
+ "+++ bar7a" NL
+ "@@ -1,2 +1,2 @@" NL
+ "-Aa\n"
+ "+Bb\n"
+ " Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo7b", "bar7b",
+ "Aa\r"
+ "Cc\n",
+
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo7b" NL
+ "+++ bar7b" NL
+ "@@ -1,2 +1,2 @@" NL
+ "-Aa\r"
+ "+Bb\n"
+ " Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo8", "bar8",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Yy\n",
+
+ "--- foo8" NL
+ "+++ bar8" NL
+ "@@ -1,3 +1,2 @@" NL
+ "-Aa\n"
+ "-Bb\n"
+ "-Cc\n"
+ "+Xx\n"
+ "+Yy\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo9", "bar9",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Bb\n",
+
+ "--- foo9" NL
+ "+++ bar9" NL
+ "@@ -1,3 +1 @@" NL
+ "-Aa\n"
+ " Bb\n"
+ "-Cc\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo10", "bar10",
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "Aa\n"
+ "Xx\n"
+ "Yy\n",
+
+ "--- foo10" NL
+ "+++ bar10" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\n"
+ "-Bb\n"
+ "-Cc" NL
+ "\\ No newline at end of file" NL
+ "+Xx\n"
+ "+Yy\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo11", "bar11",
+ "Aa\n"
+ "Xx\n"
+ "Yy\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "--- foo11" NL
+ "+++ bar11" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\n"
+ "-Xx\n"
+ "-Yy\n"
+ "+Bb\n"
+ "+Cc" NL
+ "\\ No newline at end of file" NL,
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo12", "bar12",
+ "Aa\n"
+ "Xx\n"
+ "Yy",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "--- foo12" NL
+ "+++ bar12" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\n"
+ "-Xx\n"
+ "-Yy" NL
+ "\\ No newline at end of file" NL
+ "+Bb\n"
+ "+Cc" NL
+ "\\ No newline at end of file" NL,
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo13", "bar13",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Yy\n",
+
+ "--- foo13" NL
+ "+++ bar13" NL
+ "@@ -1,3 +1,4 @@" NL
+ "+Xx\n"
+ " Aa\n"
+ " Bb\n"
+ " Cc\n"
+ "@@ -5,3 +6,4 @@" NL
+ " Ee\n"
+ " Ff\n"
+ " Gg\n"
+ "+Yy\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo14", "bar14",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n",
+
+ "Bb\n"
+ "Aa\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Gg\n"
+ "Ff\n",
+
+ "--- foo14" NL
+ "+++ bar14" NL
+ "@@ -1,7 +1,7 @@" NL
+ "+Bb\n"
+ " Aa\n"
+ "-Bb\n"
+ " Cc\n"
+ " Dd\n"
+ " Ee\n"
+ "+Gg\n"
+ " Ff\n"
+ "-Gg\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo16", "bar16",
+ "Aa\n"
+ "\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "--- foo16" NL
+ "+++ bar16" NL
+ "@@ -1,3 +1,3 @@" NL
+ " Aa\n"
+ "-\n"
+ "+Bb\n"
+ " Cc\n",
+ NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_two_way_unified_suspect(apr_pool_t *pool)
+{
+ SVN_ERR(two_way_diff("foo15a", "bar15a",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "--- foo15a" NL
+ "+++ bar15a" NL
+ "@@ -2,7 +2,6 @@" NL
+ " Bb\n"
+ " Cc\n"
+ " Dd\n"
+ "-Ee\n"
+ " Ff\n"
+ " Gg\n"
+ " Hh\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo15b", "bar15b",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Xx\n"
+ "Yy\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "--- foo15b" NL
+ "+++ bar15b" NL
+ "@@ -3,6 +3,8 @@" NL
+ " Cc\n"
+ " Dd\n"
+ " Ee\n"
+ "+Xx\n"
+ "+Yy\n"
+ " Ff\n"
+ " Gg\n"
+ " Hh\n",
+ NULL, pool));
+
+ SVN_ERR(two_way_diff("foo15c", "bar15c",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Xx\n"
+ "Yy\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "--- foo15c" NL
+ "+++ bar15c" NL
+ "@@ -2,7 +2,8 @@" NL
+ " Bb\n"
+ " Cc\n"
+ " Dd\n"
+ "-Ee\n"
+ "+Xx\n"
+ "+Yy\n"
+ " Ff\n"
+ " Gg\n"
+ " Hh\n",
+ NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_three_way_merge_no_overlap(apr_pool_t *pool)
+{
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ SVN_ERR(three_way_merge("zig1", "zag1", "zog1",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig1a", "zag1a", "zog1a",
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+
+ "Xx\r\n"
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n"
+ "Yy\r\n",
+
+ "Xx\r\n"
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n"
+ "Yy\r\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig1b", "zag1b", "zog1b",
+ "Aa\r"
+ "Bb\r"
+ "Cc\r",
+
+ "Xx\r"
+ "Aa\r"
+ "Bb\r"
+ "Cc\r",
+
+ "Aa\r"
+ "Bb\r"
+ "Cc\r"
+ "Yy\r",
+
+ "Xx\r"
+ "Aa\r"
+ "Bb\r"
+ "Cc\r"
+ "Yy\r",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ SVN_ERR(three_way_merge("zig1c", "zag1c", "zog1c",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "X x\n"
+ "A a\n"
+ "B b\n"
+ "C c\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n",
+
+ "X x\n"
+ "A a\n"
+ "B b\n"
+ "C c\n"
+ "Yy\n",
+ diff_opts,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+ diff_opts->ignore_space = svn_diff_file_ignore_space_none;
+
+ SVN_ERR(three_way_merge("zig2", "zag2", "zog2",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Zz\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Zz\n"
+ "Cc\n"
+ "Yy\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig3a", "zag3a", "zog3a",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "Xx\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Bb\n"
+ "Cc",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig3b", "zag3b", "zog3b",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "Xx\n"
+ "Bb\n"
+ "Cc",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ diff_opts->ignore_eol_style = TRUE;
+ SVN_ERR(three_way_merge("zig2c", "zag2c", "zog2c",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ " Xx\r\n"
+ " Aa\r\n"
+ " Bb\r\n"
+ " Cc\r\n"
+ " Yy\r\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Zz\n"
+ "Cc\n",
+
+ " Xx\r\n"
+ " Aa\r\n"
+ " Bb\r\n"
+ "Zz\n"
+ " Cc\r\n"
+ " Yy\r\n",
+ diff_opts,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+ diff_opts->ignore_space = svn_diff_file_ignore_space_none;
+ diff_opts->ignore_eol_style = FALSE;
+
+ SVN_ERR(three_way_merge("zig4", "zag4", "zog4",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Yy\n"
+ "Zz\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Ii\n",
+
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Yy\n"
+ "Zz\n"
+ "Hh\n"
+ "Ii\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig5", "zag5", "zog5",
+ "Aa\r\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\r\n"
+ "Aa\r\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\r\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\r\n",
+
+ "Xx\r\n"
+ "Aa\r\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\r\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig6", "zag6", "zog6",
+ "AaAaAaAaAaAa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Bb\n"
+ "Cc\n",
+
+ "AaAaAaAaAaAa\n"
+ "Bb\n"
+ "CcCcCcCcCcCc\n"
+ "Yy\n",
+
+ "Xx\n"
+ "Bb\n"
+ "CcCcCcCcCcCc\n"
+ "Yy\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("zig7", "zag7", "zog7",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ diff_opts->ignore_eol_style = FALSE;
+ SVN_ERR(three_way_merge("zig8", "zag8", "zog8",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ " Aa\n"
+ "B b\n"
+ "C c\n",
+
+ "A a\n"
+ "Bb \n"
+ " Cc\n"
+ "New line in zog8\n",
+
+ " Aa\n"
+ "B b\n"
+ "C c\n"
+ "New line in zog8\n",
+ diff_opts,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_three_way_merge_with_overlap(apr_pool_t *pool)
+{
+ SVN_ERR(three_way_merge("splish1", "splash1", "splosh1",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n",
+
+ "Aa\n"
+ "Xx\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n"
+ "Ee\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n"
+ "Ee\n"
+ "Zz\n",
+
+ "Aa\n"
+ "Xx\n"
+ "Bb\n"
+ "Cc\n"
+ "Yy\n"
+ "Ee\n"
+ "Zz\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("splish2", "splash2", "splosh2",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n",
+
+ "Aa\n"
+ "Yy\n"
+ "Zz\n"
+ "Dd\n"
+ "Pp\n"
+ "Qq\n"
+ "Ff\n",
+
+ "Pp\n"
+ "Qq\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Pp\n"
+ "Qq\n"
+ "Ff\n"
+ "Pp\n"
+ "Qq\n",
+
+ "Pp\n"
+ "Qq\n"
+ "Aa\n"
+ "Yy\n"
+ "Zz\n"
+ "Dd\n"
+ "Pp\n"
+ "Qq\n"
+ "Ff\n"
+ "Pp\n"
+ "Qq\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("splish3", "splash3", "splosh3",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Xx\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc",
+
+ "Aa\n"
+ "Xx\n"
+ "Bb\n"
+ "Cc",
+
+ "Xx\n"
+ "Aa\n"
+ "Xx\n"
+ "Bb\n"
+ "Cc",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("splish4", "splash4", "splosh4",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n",
+
+ "Aa\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Bb\n"
+ "Cc\n"
+ "Xx\n"
+ "Dd\n"
+ "Ee\n"
+ "Yy\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Xx\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "Gg\n"
+ "Zz\n"
+ "Hh\n",
+
+ "Aa\n"
+ "Ff\n"
+ "Gg\n"
+ "Hh\n"
+ "Bb\n"
+ "Cc\n"
+ "Xx\n"
+ "Dd\n"
+ "Ee\n"
+ "Yy\n"
+ "Ff\n"
+ "Gg\n"
+ "Zz\n"
+ "Hh\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_three_way_merge_with_conflict(apr_pool_t *pool)
+{
+ SVN_ERR(three_way_merge("dig1", "dug1", "dag1",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "",
+
+ "",
+
+ "",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("dig2", "dug2", "dag2",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n",
+
+ "",
+
+ "<<<<<<< dug2\n"
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "=======\n"
+ ">>>>>>> dag2\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("dig2a", "dug2a", "dag2a",
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n",
+
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n"
+ "Dd\r\n"
+ "Ee\r\n"
+ "Ff\r\n",
+
+ "",
+
+ "<<<<<<< dug2a\r\n"
+ "Aa\r\n"
+ "Bb\r\n"
+ "Cc\r\n"
+ "Dd\r\n"
+ "Ee\r\n"
+ "Ff\r\n"
+ "=======\r\n"
+ ">>>>>>> dag2a\r\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("dig2b", "dug2b", "dag2b",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\r"
+ "Bb\r"
+ "Cc\r"
+ "Dd\r"
+ "Ee\r"
+ "Ff\r",
+
+ "",
+
+ "<<<<<<< dug2b\r"
+ "Aa\r"
+ "Bb\r"
+ "Cc\r"
+ "Dd\r"
+ "Ee\r"
+ "Ff\r"
+ "=======\r"
+ ">>>>>>> dag2b\r",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("dig3", "dug3", "dag3",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n",
+
+ "Aa\n"
+ "Bb\n",
+
+ "Aa\n"
+ "Bb\n"
+ "<<<<<<< dug3\n"
+ "Cc\n"
+ "Dd\n"
+ "Ee\n"
+ "Ff\n"
+ "=======\n"
+ ">>>>>>> dag3\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("dig4", "dug4", "dag4",
+ "Aa\n"
+ "Bb\n"
+ "Cc\n",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Dd",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "Ee",
+
+ "Aa\n"
+ "Bb\n"
+ "Cc\n"
+ "<<<<<<< dug4\n"
+ "Dd=======\n"
+ "Ee>>>>>>> dag4\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_three_way_merge_conflict_styles(apr_pool_t *pool)
+{
+ static const char *original =
+ "a\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "i\n"
+ "j\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "n\n"
+ "o\n"
+ "p\n"
+ "q\n"
+ "r\n"
+ "s\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "z\n"
+ ;
+ static const char *modified =
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "P\n"
+ "Q\n"
+ "r\n"
+ "s\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "z\n"
+ ;
+ static const char *latest =
+ "a\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "i\n"
+ "j\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "p\n"
+ "q\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n"
+ ;
+ /* So, 'modified' capitalized N through Q; 'latest' added numbers to
+ 'k' through 'o'; and they both inserted "hello world yay" in the
+ middle. Also, there are non-conflicting changes to the first and
+ last lines. */
+
+ SVN_ERR(three_way_merge("style-normal1", "style-normal2", "style-normal3",
+ original, modified, latest,
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "<<<<<<< style-normal2\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "P\n"
+ "Q\n"
+ "=======\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "p\n"
+ "q\n"
+ ">>>>>>> style-normal3\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n",
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("style-resolved1", "style-resolved2",
+ "style-resolved3",
+ original, modified, latest,
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "<<<<<<< style-resolved2\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "=======\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ ">>>>>>> style-resolved3\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "<<<<<<< style-resolved2\n"
+ "P\n"
+ "Q\n"
+ "=======\n"
+ "p\n"
+ "q\n"
+ ">>>>>>> style-resolved3\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n",
+ NULL,
+ svn_diff_conflict_display_resolved_modified_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("style-three1", "style-three2", "style-three3",
+ original, modified, latest,
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "<<<<<<< style-three2\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "P\n"
+ "Q\n"
+ "||||||| style-three1\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "n\n"
+ "o\n"
+ "p\n"
+ "q\n"
+ "=======\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "p\n"
+ "q\n"
+ ">>>>>>> style-three3\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n",
+ NULL,
+ svn_diff_conflict_display_modified_original_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("style-only1", "style-only2", "style-only3",
+ original, modified, latest,
+ "@@\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "<<<<<<< style-only2 (11,10)\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "P\n"
+ "Q\n"
+ "||||||| style-only1 (11,7)\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "n\n"
+ "o\n"
+ "p\n"
+ "q\n"
+ "=======\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "p\n"
+ "q\n"
+ ">>>>>>> style-only3 (11,10)\n"
+ "r\n"
+ "sLAT\n"
+ "t\n",
+ NULL,
+ svn_diff_conflict_display_only_conflicts,
+ pool));
+
+ SVN_ERR(three_way_merge("style-mod1", "style-mod2", "style-mod3",
+ original, modified, latest,
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "k\n"
+ "l\n"
+ "m\n"
+ "N\n"
+ "O\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "P\n"
+ "Q\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n",
+ NULL,
+ svn_diff_conflict_display_modified,
+ pool));
+
+ SVN_ERR(three_way_merge("style-latest1", "style-latest2", "style-latest3",
+ original, modified, latest,
+ "A\n"
+ "b\n"
+ "c\n"
+ "d\n"
+ "e\n"
+ "f\n"
+ "g\n"
+ "h\n"
+ "iMOD\n"
+ "j\n"
+ "k1\n"
+ "l2\n"
+ "m3\n"
+ "n4\n"
+ "o5\n"
+ "hello\n"
+ "world\n"
+ "yay\n"
+ "p\n"
+ "q\n"
+ "r\n"
+ "sLAT\n"
+ "t\n"
+ "u\n"
+ "v\n"
+ "w\n"
+ "x\n"
+ "y\n"
+ "Z\n",
+ NULL,
+ svn_diff_conflict_display_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+#define MAKE_STRING(cstr) { (cstr), sizeof((cstr))-1 }
+
+static svn_error_t *
+test_diff4(apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+ svn_stream_t *actual, *expected;
+ svn_boolean_t same;
+ static svn_string_t B2 = MAKE_STRING(
+ "int main (int argc, char **argv)\n"
+ "{\n"
+ " /* line minus-five of context */\n"
+ " /* line minus-four of context */\n"
+ " /* line minus-three of context */\n"
+ " /* line -1 of context */\n"
+ " printf (\"Hello, world!\\n\");\n"
+ " /* newly inserted line of context */\n"
+ " /* line plus-one of context */\n"
+ " /* line plus-two of context */\n"
+ " /* line plus-three of context */\n"
+ " /* line plus-four of context */\n"
+ " /* line plus-five of context */\n"
+ "}\n");
+ static svn_string_t B2new = MAKE_STRING(
+ "int main (int argc, char **argv)\n"
+ "{\n"
+ " /* line minus-five of context */\n"
+ " /* line minus-four of context */\n"
+ " /* line minus-three of context */\n"
+ " /* line -1 of context */\n"
+ " printf (\"Good-bye, cruel world!\\n\");\n"
+ " /* newly inserted line of context */\n"
+ " /* line plus-one of context */\n"
+ " /* line plus-two of context */\n"
+ " /* line plus-three of context */\n"
+ " /* line plus-four of context */\n"
+ " /* line plus-five of context */\n"
+ "}\n");
+ static svn_string_t T1 = MAKE_STRING(
+ "int main (int argc, char **argv)\n"
+ "{\n"
+ " /* line minus-five of context */\n"
+ " /* line minus-four of context */\n"
+ " /* line minus-three of context */\n"
+ " /* line minus-two of context */\n"
+ " /* line minus-one of context */\n"
+ " printf (\"Hello, world!\\n\");\n"
+ " /* line plus-one of context */\n"
+ " /* line plus-two of context */\n"
+ " /* line plus-three of context */\n"
+ " /* line plus-four of context */\n"
+ " /* line plus-five of context */\n"
+ "}\n");
+ static svn_string_t T2 = MAKE_STRING(
+ "#include <stdio.h>\n"
+ "\n"
+ "int main (int argc, char **argv)\n"
+ "{\n"
+ " /* line minus-five of context */\n"
+ " /* line minus-four of context */\n"
+ " /* line minus-three of context */\n"
+ " /* line minus-two of context */\n"
+ " /* line minus-one of context */\n"
+ " printf (\"Hello, world!\\n\");\n"
+ " /* line plus-one of context */\n"
+ " /* line plus-two of context */\n"
+ " /* line plus-three of context */\n"
+ " /* line plus-four of context */\n"
+ " /* line plus-five of context */\n"
+ "}\n");
+ static svn_string_t T3 = MAKE_STRING(
+ "#include <stdio.h>\n"
+ "\n"
+ "int main (int argc, char **argv)\n"
+ "{\n"
+ " /* line minus-five of context */\n"
+ " /* line minus-four of context */\n"
+ " /* line minus-three of context */\n"
+ " /* line minus-two of context */\n"
+ " /* line minus-one of context */\n"
+ " printf (\"Good-bye, cruel world!\\n\");\n"
+ " /* line plus-one of context */\n"
+ " /* line plus-two of context */\n"
+ " /* line plus-three of context */\n"
+ " /* line plus-four of context */\n"
+ " /* line plus-five of context */\n"
+ "}\n");
+
+ const char *B2_path = svn_test_data_path("B2", pool);
+ const char *T1_path = svn_test_data_path("T1", pool);
+ const char *T2_path = svn_test_data_path("T2", pool);
+ const char *T3_path = svn_test_data_path("T3", pool);
+
+ SVN_ERR(make_file(B2_path, B2.data, pool));
+ SVN_ERR(make_file(T1_path, T1.data, pool));
+ SVN_ERR(make_file(T2_path, T2.data, pool));
+ SVN_ERR(make_file(T3_path, T3.data, pool));
+
+ /* Usage: tools/diff/diff4 <mine> <older> <yours> <ancestor> */
+ /* tools/diff/diff4 B2 T2 T3 T1 > B2new */
+ SVN_ERR(svn_diff_file_diff4(&diff, T2_path, B2_path, T3_path, T1_path, pool));
+
+ /* Sanity. */
+ SVN_TEST_ASSERT(! svn_diff_contains_conflicts(diff));
+ SVN_TEST_ASSERT(svn_diff_contains_diffs(diff));
+
+ /* Comparison. */
+ expected = svn_stream_from_string(&B2new, pool);
+
+ actual = svn_stream_from_stringbuf(
+ svn_stringbuf_create_ensure(417, pool), /* 417 == wc -c < B2new */
+ pool);
+ SVN_ERR(svn_diff_file_output_merge(actual, diff,
+ T2_path, B2_path, T3_path,
+ NULL, NULL, NULL, NULL,
+ FALSE,
+ FALSE,
+ pool));
+ SVN_ERR(svn_stream_contents_same2(&same, actual, expected, pool));
+ SVN_TEST_ASSERT(same);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+random_trivial_merge(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ const char *base_filename1 = "trivial1";
+ const char *base_filename2 = "trivial2";
+
+ const char *filename1 = svn_test_data_path(base_filename1, pool);
+ const char *filename2 = svn_test_data_path(base_filename2, pool);
+
+ seed_val();
+
+ for (i = 0; i < 5; ++i)
+ {
+ int min_lines = 1000;
+ int max_lines = 1100;
+ int var_lines = 50;
+ int block_lines = 10;
+ svn_stringbuf_t *contents1, *contents2;
+
+ SVN_ERR(make_random_file(filename1,
+ min_lines, max_lines, var_lines, block_lines,
+ i % 3, subpool));
+ SVN_ERR(make_random_file(filename2,
+ min_lines, max_lines, var_lines, block_lines,
+ i % 2, subpool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&contents1, filename1, subpool));
+ SVN_ERR(svn_stringbuf_from_file2(&contents2, filename2, subpool));
+
+ SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename1,
+ contents1->data, contents2->data,
+ contents1->data, contents2->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ SVN_ERR(three_way_merge(base_filename2, base_filename1, base_filename2,
+ contents2->data, contents1->data,
+ contents2->data, contents1->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ svn_pool_clear(subpool);
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The "original" file has a number of distinct lines. We generate two
+ random modifications by selecting two subsets of the original lines and
+ for each selected line either adding an additional line, replacing the
+ line, or deleting the line. The two subsets are chosen so that each
+ selected line is distinct and no two selected lines are adjacent. This
+ means the two sets of changes should merge without conflict. */
+static svn_error_t *
+random_three_way_merge(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ const char *base_filename1 = "original";
+ const char *base_filename2 = "modified1";
+ const char *base_filename3 = "modified2";
+ const char *base_filename4 = "combined";
+
+ const char *filename1 = svn_test_data_path(base_filename1, pool);
+ const char *filename2 = svn_test_data_path(base_filename2, pool);
+ const char *filename3 = svn_test_data_path(base_filename3, pool);
+ const char *filename4 = svn_test_data_path(base_filename4, pool);
+
+ seed_val();
+
+ for (i = 0; i < 20; ++i)
+ {
+ svn_stringbuf_t *original, *modified1, *modified2, *combined;
+ /* Pick NUM_LINES large enough so that the 'strip identical suffix' code
+ gets triggered with reasonable probability. (Currently it ignores
+ 50 lines or more, and empirically N=4000 suffices to trigger that
+ behaviour most of the time.) */
+ int num_lines = 4000, num_src = 10, num_dst = 10;
+ svn_boolean_t *lines = apr_pcalloc(subpool, sizeof(*lines) * num_lines);
+ struct random_mod *src_lines = apr_palloc(subpool,
+ sizeof(*src_lines) * num_src);
+ struct random_mod *dst_lines = apr_palloc(subpool,
+ sizeof(*dst_lines) * num_dst);
+ struct random_mod *mrg_lines = apr_palloc(subpool,
+ (sizeof(*mrg_lines)
+ * (num_src + num_dst)));
+
+ select_lines(src_lines, num_src, lines, num_lines);
+ select_lines(dst_lines, num_dst, lines, num_lines);
+ memcpy(mrg_lines, src_lines, sizeof(*mrg_lines) * num_src);
+ memcpy(mrg_lines + num_src, dst_lines, sizeof(*mrg_lines) * num_dst);
+
+ SVN_ERR(make_random_merge_file(filename1, num_lines, NULL, 0, pool));
+ SVN_ERR(make_random_merge_file(filename2, num_lines, src_lines, num_src,
+ pool));
+ SVN_ERR(make_random_merge_file(filename3, num_lines, dst_lines, num_dst,
+ pool));
+ SVN_ERR(make_random_merge_file(filename4, num_lines, mrg_lines,
+ num_src + num_dst, pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&original, filename1, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&modified1, filename2, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&modified2, filename3, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&combined, filename4, pool));
+
+ SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename3,
+ original->data, modified1->data,
+ modified2->data, combined->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ SVN_ERR(three_way_merge(base_filename1, base_filename3, base_filename2,
+ original->data, modified2->data,
+ modified1->data, combined->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+
+ SVN_ERR(svn_io_remove_file2(filename4, TRUE, pool));
+
+ svn_pool_clear(subpool);
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* This is similar to random_three_way_merge above, except this time half
+ of the original-to-modified1 changes are already present in modified2
+ (or, equivalently, half the original-to-modified2 changes are already
+ present in modified1). Since the overlapping changes match exactly the
+ merge should work without a conflict. */
+static svn_error_t *
+merge_with_part_already_present(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ const char *base_filename1 = "pap-original";
+ const char *base_filename2 = "pap-modified1";
+ const char *base_filename3 = "pap-modified2";
+ const char *base_filename4 = "pap-combined";
+
+ const char *filename1 = svn_test_data_path(base_filename1, pool);
+ const char *filename2 = svn_test_data_path(base_filename2, pool);
+ const char *filename3 = svn_test_data_path(base_filename3, pool);
+ const char *filename4 = svn_test_data_path(base_filename4, pool);
+
+ seed_val();
+
+ for (i = 0; i < 20; ++i)
+ {
+ svn_stringbuf_t *original, *modified1, *modified2, *combined;
+ int num_lines = 200, num_src = 20, num_dst = 20;
+ svn_boolean_t *lines = apr_pcalloc(subpool, sizeof(*lines) * num_lines);
+ struct random_mod *src_lines = apr_palloc(subpool,
+ sizeof(*src_lines) * num_src);
+ struct random_mod *dst_lines = apr_palloc(subpool,
+ sizeof(*dst_lines) * num_dst);
+ struct random_mod *mrg_lines = apr_palloc(subpool,
+ (sizeof(*mrg_lines)
+ * (num_src + num_dst / 2)));
+
+ select_lines(src_lines, num_src, lines, num_lines);
+ /* Select half the destination changes at random */
+ select_lines(dst_lines, num_dst / 2, lines, num_lines);
+ /* Copy the other half from the source changes */
+ memcpy(dst_lines + num_dst / 2, src_lines,
+ sizeof(*dst_lines) * (num_dst - num_dst / 2));
+ memcpy(mrg_lines, src_lines, sizeof(*mrg_lines) * num_src);
+ memcpy(mrg_lines + num_src, dst_lines,
+ sizeof(*mrg_lines) * num_dst / 2);
+
+ SVN_ERR(make_random_merge_file(filename1, num_lines, NULL, 0, pool));
+ SVN_ERR(make_random_merge_file(filename2, num_lines, src_lines, num_src,
+ pool));
+ SVN_ERR(make_random_merge_file(filename3, num_lines, dst_lines, num_dst,
+ pool));
+ SVN_ERR(make_random_merge_file(filename4, num_lines, mrg_lines,
+ num_src + num_dst / 2, pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&original, filename1, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&modified1, filename2, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&modified2, filename3, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&combined, filename4, pool));
+
+ SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename3,
+ original->data, modified1->data,
+ modified2->data, combined->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+ SVN_ERR(three_way_merge(base_filename1, base_filename3, base_filename2,
+ original->data, modified2->data,
+ modified1->data, combined->data, NULL,
+ svn_diff_conflict_display_modified_latest,
+ subpool));
+
+ SVN_ERR(svn_io_remove_file2(filename4, TRUE, pool));
+
+ svn_pool_clear(subpool);
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Merge is more "aggressive" about resolving conflicts than traditional
+ * patch or diff3. Some people consider this behaviour to be a bug, see
+ * http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=35014
+ */
+static svn_error_t *
+merge_adjacent_changes(apr_pool_t *pool)
+{
+ SVN_ERR(three_way_merge("adj1", "adj2", "adj3",
+
+ "foo\n"
+ "bar\n"
+ "baz\n",
+
+ "foo\n"
+ "new_bar\n"
+ "baz\n",
+
+ "zig\n"
+ "foo\n"
+ "bar\n"
+ "new_baz\n",
+
+ "zig\n"
+ "foo\n"
+ "new_bar\n"
+ "new_baz\n",
+
+ NULL,
+ svn_diff_conflict_display_modified_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Issue #4133, 'When sequences of whitespace characters at head of line
+ strides chunk boundary, "diff -x -w" showing wrong change'.
+ The magic number used in this test, 1<<17, is
+ CHUNK_SIZE from ../../libsvn_diff/diff_file.c
+ */
+static svn_error_t *
+test_norm_offset(apr_pool_t *pool)
+{
+ apr_size_t chunk_size = 1 << 17;
+ const char *pattern1 = " \n";
+ const char *pattern2 = "\n\n\n\n\n\n\n\n";
+ const char *pattern3 = " @@@@@@@\n";
+ const char *pattern4 = " \n";
+ svn_stringbuf_t *original, *modified;
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ /* The original contents become like this
+
+ $ hexdump -C norm-offset-original
+ 00000000 20 20 20 20 20 20 20 0a 0a 0a 0a 0a 0a 0a 0a 0a | .........|
+ 00000010 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a |................|
+ *
+ 0001fff0 0a 0a 0a 0a 0a 0a 0a 0a 20 20 20 20 20 20 20 20 |........ |
+ 00020000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | |
+ 00020010 40 40 40 40 40 40 40 0a 0a 0a 0a 0a 0a 0a 0a 0a |@@@@@@@.........|
+ 00020020 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a |................|
+ *
+ 000203f0 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 0a | .|
+ 00020400
+ */
+ original = svn_stringbuf_create_ensure(chunk_size + 1024, pool);
+ svn_stringbuf_appendcstr(original, pattern1);
+ while (original->len < chunk_size - 8)
+ {
+ svn_stringbuf_appendcstr(original, pattern2);
+ }
+ svn_stringbuf_appendcstr(original, pattern3);
+ while (original->len < chunk_size +1024 - 16)
+ {
+ svn_stringbuf_appendcstr(original, pattern2);
+ }
+ svn_stringbuf_appendcstr(original, pattern4);
+
+ /* The modified contents become like this.
+
+ $ hexdump -C norm-offset-modified
+ 00000000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 0a | .|
+ 00000010 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a |................|
+ *
+ 00020000 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 | |
+ 00020010 20 20 20 20 20 20 20 20 40 40 40 40 40 40 40 0a | @@@@@@@.|
+ 00020020 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a 0a |................|
+ *
+ 000203f0 0a 0a 0a 0a 0a 0a 0a 0a 20 20 20 20 20 20 20 0a |........ .|
+ 00020400
+ */
+ modified = svn_stringbuf_create_ensure(chunk_size + 1024, pool);
+ svn_stringbuf_appendcstr(modified, pattern4);
+ while (modified->len < chunk_size)
+ {
+ svn_stringbuf_appendcstr(modified, pattern2);
+ }
+ svn_stringbuf_appendcstr(modified, pattern3);
+ while (modified->len < chunk_size +1024 - 8)
+ {
+ svn_stringbuf_appendcstr(modified, pattern2);
+ }
+ svn_stringbuf_appendcstr(modified, pattern1);
+
+ /* Diff them. Modulo whitespace, they are identical. */
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+ SVN_ERR(two_way_diff("norm-offset-original", "norm-offset-modified",
+ original->data, modified->data, "",
+ diff_opts, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Issue #4283, 'When identical suffix started at a chunk boundary,
+ incorrect diff was generated'.
+ The magic number used in this test, (1<<17) and 50 are CHUNK_SIZE
+ and SUFFIX_LINES_TO_KEEP from ../../libsvn_diff/diff_file.c, respectively.
+ */
+#define ORIGINAL_CONTENTS_PATTERN "0123456789abcde\n"
+#define INSERTED_LINE "0123456789ABCDE\n"
+static svn_error_t *
+test_identical_suffix(apr_pool_t *pool)
+{
+ apr_size_t lines_in_chunk = (1 << 17)
+ / (sizeof(ORIGINAL_CONTENTS_PATTERN) - 1);
+ /* To let identical suffix start at a chunk boundary,
+ insert a line at before (SUFFIX_LINES_TO_KEEP + 1) lines
+ from tail of the previous chunk. */
+ apr_size_t insert_pos = lines_in_chunk
+#ifdef SUFFIX_LINES_TO_KEEP
+ - SUFFIX_LINES_TO_KEEP
+#else
+ - 50
+#endif
+ - 1;
+ apr_size_t i;
+ svn_stringbuf_t *original, *modified;
+
+ /* The original contents become like this.
+
+ $ hexdump -C identical-suffix-original
+ 00000000 30 31 32 33 34 35 36 37 38 39 61 62 63 64 65 0a |0123456789abcde.|
+ *
+ 00020400
+ */
+ original = svn_stringbuf_create_ensure((1 << 17) + 1024, pool);
+ for (i = 0; i < lines_in_chunk + 64; i++)
+ {
+ svn_stringbuf_appendbytes(original, ORIGINAL_CONTENTS_PATTERN,
+ sizeof(ORIGINAL_CONTENTS_PATTERN) - 1);
+ }
+
+ /* The modified contents become like this.
+
+ $ hexdump -C identical-suffix-modified
+ 00000000 30 31 32 33 34 35 36 37 38 39 61 62 63 64 65 0a |0123456789abcde.|
+ *
+ 00000400 30 31 32 33 34 35 36 37 38 39 41 42 43 44 45 0a |0123456789ABCDE.|
+ 00000410 30 31 32 33 34 35 36 37 38 39 61 62 63 64 65 0a |0123456789abcde.|
+ *
+ 0001fcd0 30 31 32 33 34 35 36 37 38 39 41 42 43 44 45 0a |0123456789ABCDE.|
+ 0001fce0 30 31 32 33 34 35 36 37 38 39 61 62 63 64 65 0a |0123456789abcde.|
+ *
+ 00020420
+ */
+ modified = svn_stringbuf_dup(original, pool);
+ svn_stringbuf_insert(modified,
+ 64 * (sizeof(ORIGINAL_CONTENTS_PATTERN) - 1),
+ INSERTED_LINE, sizeof(INSERTED_LINE) - 1);
+ svn_stringbuf_insert(modified,
+ insert_pos * (sizeof(ORIGINAL_CONTENTS_PATTERN) - 1),
+ INSERTED_LINE, sizeof(INSERTED_LINE) - 1);
+
+ SVN_ERR(two_way_diff("identical-suffix-original",
+ "identical-suffix-modified",
+ original->data, modified->data,
+ apr_psprintf(pool,
+ "--- identical-suffix-original" NL
+ "+++ identical-suffix-modified" NL
+ "@@ -62,6 +62,7 @@" NL
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ "+" INSERTED_LINE
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ "@@ -%u,6 +%u,7 @@" NL
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ "+" INSERTED_LINE
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN
+ " " ORIGINAL_CONTENTS_PATTERN,
+ 1 + (unsigned int)insert_pos - 3 - 1,
+ 1 + (unsigned int)insert_pos - 3),
+ NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+#undef ORIGINAL_CONTENTS_PATTERN
+#undef INSERTED_LINE
+
+/* The magic number used in this test, 1<<17, is
+ CHUNK_SIZE from ../../libsvn_diff/diff_file.c
+ */
+static svn_error_t *
+test_token_compare(apr_pool_t *pool)
+{
+ apr_size_t chunk_size = 1 << 17;
+ const char *pattern = "ABCDEFG\n";
+ svn_stringbuf_t *original, *modified;
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ diff_opts->ignore_space = svn_diff_file_ignore_space_all;
+
+ original = svn_stringbuf_create_ensure(chunk_size * 2 + 8, pool);
+ /* CHUNK_SIZE bytes */
+ while (original->len < chunk_size - 8)
+ {
+ svn_stringbuf_appendcstr(original, pattern);
+ }
+ svn_stringbuf_appendcstr(original, " @@@\n");
+
+ modified = svn_stringbuf_create_ensure(chunk_size * 2 + 9, pool);
+ /* CHUNK_SIZE+1 bytes, one ' ' more than original */
+ while (modified->len < chunk_size - 8)
+ {
+ svn_stringbuf_appendcstr(modified, pattern);
+ }
+ svn_stringbuf_appendcstr(modified, " @@@\n");
+
+ /* regression test for reading exceeding the file size */
+ SVN_ERR(two_way_diff("token-compare-original1", "token-compare-modified1",
+ original->data, modified->data, "",
+ diff_opts, pool));
+
+ svn_stringbuf_appendcstr(original, "aaaaaaa\n");
+ svn_stringbuf_appendcstr(modified, "bbbbbbb\n");
+
+ /* regression test for comparison beyond the end-of-line */
+ SVN_ERR(two_way_diff("token-compare-original2", "token-compare-modified2",
+ original->data, modified->data,
+ apr_psprintf(pool,
+ "--- token-compare-original2" NL
+ "+++ token-compare-modified2" NL
+ "@@ -%u,4 +%u,4 @@" NL
+ " ABCDEFG\n"
+ " ABCDEFG\n"
+ " @@@\n"
+ "-aaaaaaa\n"
+ "+bbbbbbb\n",
+ (unsigned int)chunk_size/8 - 2,
+ (unsigned int)chunk_size/8 - 2),
+ diff_opts, pool));
+
+ /* CHUNK_SIZE*2 bytes */
+ while (original->len <= chunk_size * 2 - 8)
+ {
+ svn_stringbuf_appendcstr(original, pattern);
+ }
+
+ /* CHUNK_SIZE*2+1 bytes, one ' ' more than original */
+ while (modified->len <= chunk_size * 2 - 7)
+ {
+ svn_stringbuf_appendcstr(modified, pattern);
+ }
+
+ SVN_ERR(two_way_diff("token-compare-original2", "token-compare-modified2",
+ original->data, modified->data,
+ apr_psprintf(pool,
+ "--- token-compare-original2" NL
+ "+++ token-compare-modified2" NL
+ "@@ -%u,7 +%u,7 @@" NL
+ " ABCDEFG\n"
+ " ABCDEFG\n"
+ " @@@\n"
+ "-aaaaaaa\n"
+ "+bbbbbbb\n"
+ " ABCDEFG\n"
+ " ABCDEFG\n"
+ " ABCDEFG\n",
+ (unsigned int)chunk_size/8 - 2,
+ (unsigned int)chunk_size/8 - 2),
+ diff_opts, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+two_way_issue_3362_v1(apr_pool_t *pool)
+{
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ SVN_ERR(two_way_diff("issue-3362-1-v1",
+ "issue-3362-2-v1",
+ /* File 1 */
+ "line_1\n"
+ "line_2\n"
+ "line_3\n"
+ "line_4\n"
+ "line_5\n"
+ "line_6\n"
+ "line_7\n"
+ "line_8\n"
+ "line_9\n"
+ "line_10\n"
+ "line_11\n"
+ "line_12\n"
+ "line_13\n"
+ "line_14\n"
+ "line_15\n"
+ "line_16\n"
+ "line_17\n"
+ "line_18\n"
+ "line_19\n"
+ "line_20\n"
+ "line_21\n"
+ "line_22\n"
+ "line_23\n"
+ "line_24\n"
+ "line_25\n"
+ "line_26\n"
+ "line_27\n"
+ "line_28\n"
+ "line_29\n"
+ "line_30\n",
+ /* File 2 */
+ "line_1a\n"
+ "line_2a\n"
+ "line_3a\n"
+ "line_1\n"
+ "line_2\n"
+ "line_3\n"
+ "line_4\n"
+ "line_5a\n"
+ "line_6b\n"
+ "line_7c\n"
+ "line_8\n"
+ "line_9\n"
+ "line_10\n"
+ "line_11a\n"
+ "line_11b\n"
+ "line_11c\n"
+ "line_12\n"
+ "line_13\n"
+ "line_14\n"
+ "line_15\n"
+ "line_16\n"
+ "line_17\n"
+ "line_18\n"
+ "line_19a\n"
+ "line_19b\n"
+ "line_19c\n"
+ "line_20\n"
+ "line_21\n"
+ "line_22\n"
+ "line_23\n"
+ "line_24\n"
+ "line_25\n"
+ "line_26\n"
+ "line_27\n"
+ "line_27a\n",
+ /* Expected */
+ "--- issue-3362-1-v1" APR_EOL_STR
+ "+++ issue-3362-2-v1" APR_EOL_STR
+ "@@ -1,14 +1,19 @@" APR_EOL_STR
+ "+line_1a\n"
+ "+line_2a\n"
+ "+line_3a\n"
+ " line_1\n" /* 1.7 mem diff: line missing */
+ " line_2\n"
+ " line_3\n"
+ " line_4\n"
+ "-line_5\n"
+ "-line_6\n"
+ "-line_7\n"
+ "+line_5a\n"
+ "+line_6b\n"
+ "+line_7c\n"
+ " line_8\n"
+ " line_9\n"
+ " line_10\n"
+ "-line_11\n"
+ "+line_11a\n"
+ "+line_11b\n"
+ "+line_11c\n"
+ " line_12\n"
+ " line_13\n"
+ " line_14\n" /* 1.7 mem diff: line missing */
+ "@@ -16,7 +21,9 @@" APR_EOL_STR
+ " line_16\n"
+ " line_17\n"
+ " line_18\n"
+ "-line_19\n"
+ "+line_19a\n"
+ "+line_19b\n"
+ "+line_19c\n"
+ " line_20\n"
+ " line_21\n"
+ " line_22\n"
+ "@@ -25,6 +32,4 @@" APR_EOL_STR
+ " line_25\n"
+ " line_26\n"
+ " line_27\n"
+ "-line_28\n"
+ "-line_29\n"
+ "-line_30\n"
+ "+line_27a\n",
+ diff_opts, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+two_way_issue_3362_v2(apr_pool_t *pool)
+{
+ svn_diff_file_options_t *diff_opts = svn_diff_file_options_create(pool);
+
+ SVN_ERR(two_way_diff("issue-3362-1-v2",
+ "issue-3362-2-v2",
+ /* File 1 */
+ "line_1\n"
+ "line_2\n"
+ "line_3\n"
+ "line_4\n"
+ "line_5\n"
+ "line_6\n"
+ "line_7\n"
+ "line_8\n"
+ "line_9\n"
+ "line_10\n"
+ "line_11\n"
+ "line_12\n"
+ "line_13\n"
+ "line_14\n"
+ "line_15\n"
+ "line_16\n"
+ "line_17\n"
+ "line_18\n"
+ "line_19\n"
+ "line_20\n"
+ "line_21\n"
+ "line_22\n"
+ "line_23\n"
+ "line_24\n"
+ "line_25\n"
+ "line_26\n"
+ "line_27\n"
+ "line_28\n"
+ "line_29\n"
+ "line_30\n",
+ /* File 2 */
+ "line_1a\n"
+ "line_1b\n"
+ "line_1c\n"
+ "line_1\n"
+ "line_2\n"
+ "line_3\n"
+ "line_4\n"
+ "line_5a\n"
+ "line_5b\n"
+ "line_5c\n"
+ "line_6\n"
+ "line_7\n"
+ "line_8\n"
+ "line_9\n"
+ "line_10\n"
+ "line_11a\n"
+ "line_11b\n"
+ "line_11c\n"
+ "line_12\n"
+ "line_13\n"
+ "line_14\n"
+ "line_15\n"
+ "line_16\n"
+ "line_17\n"
+ "line_18\n"
+ "line_19a\n"
+ "line_19b\n"
+ "line_19c\n"
+ "line_20\n"
+ "line_21\n"
+ "line_22\n"
+ "line_23\n"
+ "line_24\n"
+ "line_25\n"
+ "line_26\n"
+ "line_27a\n"
+ "line_27b\n"
+ "line_27c\n"
+ "line_28\n"
+ "line_29\n"
+ "line_30\n",
+ /* Expected */
+ "--- issue-3362-1-v2" APR_EOL_STR
+ "+++ issue-3362-2-v2" APR_EOL_STR
+ "@@ -1,14 +1,21 @@" APR_EOL_STR
+ "+line_1a\n"
+ "+line_1b\n"
+ "+line_1c\n"
+ " line_1\n" /* 1.7 mem diff: line missing */
+ " line_2\n"
+ " line_3\n"
+ " line_4\n"
+ "-line_5\n"
+ "+line_5a\n"
+ "+line_5b\n"
+ "+line_5c\n"
+ " line_6\n"
+ " line_7\n"
+ " line_8\n"
+ " line_9\n"
+ " line_10\n"
+ "-line_11\n"
+ "+line_11a\n"
+ "+line_11b\n"
+ "+line_11c\n"
+ " line_12\n"
+ " line_13\n"
+ " line_14\n" /* 1.7 mem diff: line missing */
+ "@@ -16,7 +23,9 @@" APR_EOL_STR
+ " line_16\n"
+ " line_17\n"
+ " line_18\n"
+ "-line_19\n"
+ "+line_19a\n"
+ "+line_19b\n"
+ "+line_19c\n"
+ " line_20\n"
+ " line_21\n"
+ " line_22\n"
+ "@@ -24,7 +33,9 @@" APR_EOL_STR
+ " line_24\n"
+ " line_25\n"
+ " line_26\n"
+ "-line_27\n"
+ "+line_27a\n"
+ "+line_27b\n"
+ "+line_27c\n"
+ " line_28\n"
+ " line_29\n"
+ " line_30\n",
+ diff_opts, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+three_way_double_add(apr_pool_t *pool)
+{
+ SVN_ERR(three_way_merge("doubleadd1", "doubleadd2", "doubleadd3",
+ "A\n"
+ "B\n"
+ "C\n"
+ "J\n"
+ "K\n"
+ "L",
+
+ "A\n"
+ "B\n"
+ "C\n"
+ "D\n" /* New line 1a */
+ "E\n" /* New line 2a */
+ "F\n" /* New line 3a*/
+ "J\n"
+ "K\n"
+ "L",
+
+ "A\n"
+ "B\n"
+ "O\n" /* Change C to O */
+ "P\n" /* New line 1b */
+ "Q\n" /* New line 2b */
+ "R\n" /* New line 3b */
+ "J\n"
+ "K\n"
+ "L",
+
+ /* With s/C/O/ we expect something like this,
+ but the current (1.9/trunk) result is a
+ succeeded merge to a combined result.
+
+ ### I'm guessing this result needs tweaks before it
+ will be a PASS. */
+ "A\n"
+ "B\n"
+ "<<<<<<< doubleadd2\n"
+ "C\n"
+ "D\n" /* New line 1a */
+ "E\n" /* New line 2a */
+ "F\n" /* New line 3a*/
+ "=======\n"
+ "O\n"
+ "P\n" /* New line 1b */
+ "Q\n" /* New line 2b */
+ "R\n" /* New line 3b */
+ ">>>>>>> doubleadd3\n"
+ "J\n"
+ "K\n"
+ "L",
+ NULL,
+ svn_diff_conflict_display_modified_original_latest,
+ pool));
+
+ SVN_ERR(three_way_merge("doubleadd1", "doubleadd2", "doubleadd3",
+ "A\n"
+ "B\n"
+ "C\n"
+ "J\n"
+ "K\n"
+ "L",
+
+ "A\n"
+ "B\n"
+ "C\n"
+ "D\n" /* New line 1a */
+ "E\n" /* New line 2a */
+ "F\n" /* New line 3a*/
+ "K\n"
+ "L",
+
+ "A\n"
+ "B\n"
+ "O\n" /* Change C to O */
+ "P\n" /* New line 1b */
+ "Q\n" /* New line 2b */
+ "R\n" /* New line 3b */
+ "J\n"
+ "K\n"
+ "L",
+
+ /* With s/C/O/ we expect something like this,
+ but the current (1.9/trunk) result is a
+ succeeded merge to a combined result.
+
+ ### I'm guessing this result needs tweaks before it
+ will be a PASS. */
+ "A\n"
+ "B\n"
+ "<<<<<<< doubleadd2\n"
+ "C\n"
+ "D\n" /* New line 1a */
+ "E\n" /* New line 2a */
+ "F\n" /* New line 3a*/
+ "=======\n"
+ "O\n"
+ "P\n" /* New line 1b */
+ "Q\n" /* New line 2b */
+ "R\n" /* New line 3b */
+ "J\n"
+ ">>>>>>> doubleadd3\n"
+ "K\n"
+ "L",
+ NULL,
+ svn_diff_conflict_display_modified_original_latest,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* ========================================================================== */
+
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(dump_core,
+ "these dump core"),
+ SVN_TEST_PASS2(test_two_way_unified,
+ "2-way unified diff and trivial merge"),
+ SVN_TEST_PASS2(test_two_way_unified_suspect,
+ "2-way unified diff where output is suspect"),
+ SVN_TEST_PASS2(test_three_way_merge_no_overlap,
+ "3-way merge, non-overlapping changes"),
+ SVN_TEST_PASS2(test_three_way_merge_with_overlap,
+ "3-way merge, non-conflicting overlapping changes"),
+ SVN_TEST_PASS2(test_three_way_merge_with_conflict,
+ "3-way merge, conflicting overlapping changes"),
+ SVN_TEST_PASS2(random_trivial_merge,
+ "random trivial merge"),
+ SVN_TEST_PASS2(random_three_way_merge,
+ "random 3-way merge"),
+ SVN_TEST_PASS2(merge_with_part_already_present,
+ "merge with part already present"),
+ SVN_TEST_PASS2(merge_adjacent_changes,
+ "3-way merge, adjacent changes"),
+ SVN_TEST_PASS2(test_three_way_merge_conflict_styles,
+ "3-way merge with conflict styles"),
+ SVN_TEST_PASS2(test_diff4,
+ "4-way merge; see variance-adjusted-patching.html"),
+ SVN_TEST_PASS2(test_norm_offset,
+ "offset of the normalized token"),
+ SVN_TEST_PASS2(test_identical_suffix,
+ "identical suffix starts at the boundary of a chunk"),
+ SVN_TEST_PASS2(test_token_compare,
+ "compare tokens at the chunk boundary"),
+ SVN_TEST_PASS2(two_way_issue_3362_v1,
+ "2-way issue #3362 test v1"),
+ SVN_TEST_PASS2(two_way_issue_3362_v2,
+ "2-way issue #3362 test v2"),
+ SVN_TEST_XFAIL2(three_way_double_add,
+ "3-way merge, double add"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_diff/parse-diff-test.c b/subversion/tests/libsvn_diff/parse-diff-test.c
new file mode 100644
index 0000000..d41cfa7
--- /dev/null
+++ b/subversion/tests/libsvn_diff/parse-diff-test.c
@@ -0,0 +1,1138 @@
+/*
+ * Regression tests for the diff/diff3 library -- parsing unidiffs
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include "../svn_test.h"
+
+#include "svn_diff.h"
+#include "svn_hash.h"
+#include "svn_mergeinfo.h"
+#include "svn_pools.h"
+#include "svn_utf.h"
+
+/* Used to terminate lines in large multi-line string literals. */
+#define NL APR_EOL_STR
+
+static const char *unidiff =
+ "Index: A/mu (deleted)" NL
+ "===================================================================" NL
+ "Index: A/C/gamma" NL
+ "===================================================================" NL
+ "--- A/C/gamma\t(revision 2)" NL
+ "+++ A/C/gamma\t(working copy)" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'gamma'." NL
+ "+some more bytes to 'gamma'" NL
+ "Index: A/D/gamma" NL
+ "===================================================================" NL
+ "--- A/D/gamma.orig" NL
+ "+++ A/D/gamma" NL
+ "@@ -1,2 +1 @@" NL
+ " This is the file 'gamma'." NL
+ "-some less bytes to 'gamma'" NL
+ "" NL
+ "Property changes on: mu-ng" NL
+ "___________________________________________________________________" NL
+ "Name: newprop" NL
+ " + newpropval" NL
+ "Name: svn:mergeinfo" NL
+ "" NL;
+
+static const char *git_unidiff =
+ "Index: A/mu (deleted)" NL
+ "===================================================================" NL
+ "diff --git a/A/mu b/A/mu" NL
+ "deleted file mode 100644" NL
+ "Index: A/C/gamma" NL
+ "===================================================================" NL
+ "diff --git a/A/C/gamma b/A/C/gamma" NL
+ "old mode 100644" NL
+ "new mode 100755" NL
+ "--- a/A/C/gamma\t(revision 2)" NL
+ "+++ b/A/C/gamma\t(working copy)" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'gamma'." NL
+ "+some more bytes to 'gamma'" NL
+ "Index: iota" NL
+ "===================================================================" NL
+ "diff --git a/iota b/iota.copied" NL
+ "copy from iota" NL
+ "copy to iota.copied" NL
+ "Index: new" NL
+ "===================================================================" NL
+ "diff --git a/new b/new" NL
+ "new file mode 100644" NL
+ "" NL;
+
+static const char *git_tree_and_text_unidiff =
+ "Index: iota.copied" NL
+ "===================================================================" NL
+ "diff --git a/iota b/iota.copied" NL
+ "old mode 100644" NL
+ "new mode 100755" NL
+ "copy from iota" NL
+ "copy to iota.copied" NL
+ "--- a/iota\t(revision 2)" NL
+ "+++ b/iota.copied\t(working copy)" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'iota'." NL
+ "+some more bytes to 'iota'" NL
+ "Index: A/mu.moved" NL
+ "===================================================================" NL
+ "diff --git a/A/mu b/A/mu.moved" NL
+ "old mode 100644" NL
+ "new mode 100755" NL
+ "rename from A/mu" NL
+ "rename to A/mu.moved" NL
+ "--- a/A/mu\t(revision 2)" NL
+ "+++ b/A/mu.moved\t(working copy)" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'mu'." NL
+ "+some more bytes to 'mu'" NL
+ "Index: new" NL
+ "===================================================================" NL
+ "diff --git a/new b/new" NL
+ "new file mode 100644" NL
+ "--- /dev/null\t(revision 0)" NL
+ "+++ b/new\t(working copy)" NL
+ "@@ -0,0 +1 @@" NL
+ "+This is the file 'new'." NL
+ "Index: A/B/lambda" NL
+ "===================================================================" NL
+ "diff --git a/A/B/lambda b/A/B/lambda" NL
+ "deleted file mode 100755" NL
+ "--- a/A/B/lambda\t(revision 2)" NL
+ "+++ /dev/null\t(working copy)" NL
+ "@@ -1 +0,0 @@" NL
+ "-This is the file 'lambda'." NL
+ "" NL;
+
+ /* Only the last git diff header is valid. The other ones either misses a
+ * path element or has noise between lines that must be continous. See
+ * issue #3809. */
+static const char *bad_git_diff_header =
+ "Index: iota.copied" NL
+ "===================================================================" NL
+ "diff --git a/foo1 b/" NL
+ "diff --git a/foo2 b" NL
+ "diff --git a/foo3 " NL
+ "diff --git a/foo3 " NL
+ "diff --git foo4 b/foo4" NL
+ "diff --git a/foo5 b/foo5" NL
+ "random noise" NL
+ "diff --git a/foo6 b/foo6" NL
+ "copy from foo6" NL
+ "random noise" NL
+ "copy to foo6" NL
+ "diff --git a/foo6 b/foo6" NL
+ "copy from foo6" NL
+ "diff --git a/iota b/iota.copied" NL
+ "copy from iota" NL
+ "copy to iota.copied" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'iota'." NL
+ "+some more bytes to 'iota'" NL
+ "" NL;
+
+ static const char *property_unidiff =
+ "Index: iota" NL
+ "===================================================================" NL
+ "--- iota" NL
+ "+++ iota" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Deleted: prop_del" NL
+ "## -1 +0,0 ##" NL
+ "-value" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Added: prop_add" NL
+ "## -0,0 +1 ##" NL
+ "+value" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Modified: prop_mod" NL
+ "## -1,4 +1,4 ##" NL
+ "-value" NL
+ "+new value" NL
+ " context" NL
+ " context" NL
+ " context" NL
+ "## -10,4 +10,4 ##" NL
+ " context" NL
+ " context" NL
+ " context" NL
+ "-value" NL
+ "+new value" NL
+ "" NL;
+
+ /* ### Add edge cases like context lines stripped from leading whitespaces
+ * ### that starts with 'Added: ', 'Deleted: ' or 'Modified: '. */
+ static const char *property_and_text_unidiff =
+ "Index: iota" NL
+ "===================================================================" NL
+ "--- iota" NL
+ "+++ iota" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'iota'." NL
+ "+some more bytes to 'iota'" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Added: prop_add" NL
+ "## -0,0 +1 ##" NL
+ "+value" NL;
+
+ /* A unidiff containing diff symbols in the body of the hunks. */
+ static const char *diff_symbols_in_prop_unidiff =
+ "Index: iota" NL
+ "===================================================================" NL
+ "--- iota" NL
+ "+++ iota" NL
+ "" NL
+ "Property changes on: iota" NL
+ "___________________________________________________________________" NL
+ "Added: prop_add" NL
+ "## -0,0 +1,3 ##" NL
+ "+Added: bogus_prop" NL
+ "+## -0,0 +20 ##" NL
+ "+@@ -1,2 +0,0 @@" NL
+ "Deleted: prop_del" NL
+ "## -1,2 +0,0 ##" NL
+ "---- iota" NL
+ "-+++ iota" NL
+ "Modified: non-existent" NL
+ "blah, just noise - no valid hunk header" NL
+ "Modified: prop_mod" NL
+ "## -1,4 +1,4 ##" NL
+ "-## -1,2 +1,2 ##" NL
+ "+## -1,3 +1,3 ##" NL
+ " ## -1,5 -0,0 ##" NL
+ " @@ -1,5 -0,0 @@" NL
+ " Modified: prop_mod" NL
+ "## -10,4 +10,4 ##" NL
+ " context" NL
+ " context" NL
+ " context" NL
+ "-## -0,0 +1 ##" NL
+ "+## -1,2 +1,4 ##" NL
+ "" NL;
+
+ /* A unidiff containing paths with spaces. */
+ static const char *path_with_spaces_unidiff =
+ "diff --git a/path 1 b/path 1" NL
+ "new file mode 100644" NL
+ "diff --git a/path one 1 b/path one 1" NL
+ "new file mode 100644" NL
+ "diff --git a/dir/ b/path b/dir/ b/path" NL
+ "new file mode 100644" NL
+ "diff --git a/ b/path 1 b/ b/path 1" NL
+ "new file mode 100644" NL;
+
+static const char *unidiff_lacking_trailing_eol =
+ "Index: A/C/gamma" NL
+ "===================================================================" NL
+ "--- A/C/gamma\t(revision 2)" NL
+ "+++ A/C/gamma\t(working copy)" NL
+ "@@ -1 +1,2 @@" NL
+ " This is the file 'gamma'." NL
+ "+some more bytes to 'gamma'"; /* Don't add NL after this line */
+
+static const char *unidiff_with_mergeinfo =
+ "Index: A/C" NL
+ "===================================================================" NL
+ "--- A/C\t(revision 2)" NL
+ "+++ A/C\t(working copy)" NL
+ "Modified: svn:ignore" NL
+ "## -7,6 +7,7 ##" NL
+ " configure" NL
+ " libtool" NL
+ " .gdb_history" NL
+ "+.swig_checked" NL
+ " *.orig" NL
+ " *.rej" NL
+ " TAGS" NL
+ "Modified: svn:mergeinfo" NL
+ "## -0,1 +0,3 ##" NL
+ " Reverse-merged /subversion/branches/1.6.x-r935631:r952683-955333" NL
+ " /subversion/branches/nfc-nfd-aware-client:r870276,870376 をマージã—ã¾ã—ãŸ"NL
+ " Fusionné /subversion/branches/1.7.x-r1507044:r1507300-1511568" NL
+ " Merged /subversion/branches/1.8.x-openssl-dirs:r1535139" NL;
+/* The above diff intentionally contains i18n versions of some lines. */
+
+/* Create a PATCH_FILE containing the contents of DIFF. */
+static svn_error_t *
+create_patch_file(svn_patch_file_t **patch_file,
+ const char *diff, apr_pool_t *pool)
+{
+ apr_size_t bytes;
+ apr_size_t len;
+ const char *path;
+ apr_file_t *apr_file;
+
+ /* Create a patch file. */
+ SVN_ERR(svn_io_open_unique_file3(&apr_file, &path, NULL,
+ svn_io_file_del_on_pool_cleanup,
+ pool, pool));
+
+ bytes = strlen(diff);
+ SVN_ERR(svn_io_file_write_full(apr_file, diff, bytes, &len, pool));
+ if (len != bytes)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Cannot write to '%s'", path);
+ SVN_ERR(svn_io_file_close(apr_file, pool));
+ SVN_ERR(svn_diff_open_patch_file(patch_file, path, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* svn_stream_readline() with hunk reader semantics */
+static svn_error_t *
+stream_readline_diff(svn_stream_t *stream,
+ svn_stringbuf_t **buf,
+ const char *eol,
+ svn_boolean_t *eof,
+ apr_pool_t *result_pool)
+{
+ SVN_ERR(svn_stream_readline(stream, buf, eol, eof, result_pool));
+
+ /* Hunks are only at EOF after they are completely read, even if
+ they don't have a final EOL in the text */
+ if (*eof && (*buf)->len)
+ *eof = FALSE;
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that reading a line from HUNK equals what's inside EXPECTED.
+ * If ORIGINAL is TRUE, read the original hunk text; else, read the
+ * modified hunk text. */
+static svn_error_t *
+check_content(svn_diff_hunk_t *hunk, svn_boolean_t original,
+ const char *expected, apr_pool_t *pool)
+{
+ svn_stream_t *exp;
+ svn_stringbuf_t *exp_buf;
+ svn_stringbuf_t *hunk_buf;
+ svn_boolean_t exp_eof;
+ svn_boolean_t hunk_eof;
+
+ exp = svn_stream_from_string(svn_string_create(expected, pool),
+ pool);
+
+ while (TRUE)
+ {
+ SVN_ERR(stream_readline_diff(exp, &exp_buf, NL, &exp_eof, pool));
+ if (original)
+ SVN_ERR(svn_diff_hunk_readline_original_text(hunk, &hunk_buf, NULL,
+ &hunk_eof, pool, pool));
+ else
+ SVN_ERR(svn_diff_hunk_readline_modified_text(hunk, &hunk_buf, NULL,
+ &hunk_eof, pool, pool));
+
+ SVN_TEST_ASSERT(exp_eof == hunk_eof);
+ if (exp_eof)
+ break;
+ SVN_TEST_STRING_ASSERT(exp_buf->data, hunk_buf->data);
+ }
+
+ if (!hunk_eof)
+ SVN_TEST_ASSERT(hunk_buf->len == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_unidiff(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_boolean_t reverse;
+ svn_boolean_t ignore_whitespace;
+ int i;
+ apr_pool_t *iterpool;
+
+ reverse = FALSE;
+ ignore_whitespace = FALSE;
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < 2; i++)
+ {
+ svn_patch_t *patch;
+ svn_diff_hunk_t *hunk;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(create_patch_file(&patch_file, unidiff, pool));
+
+ /* We have two patches with one hunk each.
+ * Parse the first patch. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file, reverse,
+ ignore_whitespace, iterpool,
+ iterpool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/C/gamma");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/C/gamma");
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+ SVN_ERR(check_content(hunk, ! reverse,
+ "This is the file 'gamma'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, reverse,
+ "This is the file 'gamma'." NL
+ "some more bytes to 'gamma'" NL,
+ pool));
+
+ /* Parse the second patch. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file, reverse,
+ ignore_whitespace, pool, pool));
+ SVN_TEST_ASSERT(patch);
+ if (reverse)
+ {
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/D/gamma.orig");
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/D/gamma");
+ }
+ else
+ {
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/D/gamma.orig");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/D/gamma");
+ }
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+ SVN_ERR(check_content(hunk, ! reverse,
+ "This is the file 'gamma'." NL
+ "some less bytes to 'gamma'" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, reverse,
+ "This is the file 'gamma'." NL,
+ pool));
+
+ reverse = !reverse;
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ }
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_git_diff(apr_pool_t *pool)
+{
+ /* ### Should we check for reversed diffs? */
+
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_diff_hunk_t *hunk;
+
+ SVN_ERR(create_patch_file(&patch_file, git_unidiff, pool));
+
+ /* Parse a deleted empty file */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/mu");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/mu");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_deleted);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ /* Parse a modified file. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/C/gamma");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/C/gamma");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_modified);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+ SVN_TEST_ASSERT(patch->old_executable_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_executable_bit == svn_tristate_true);
+ SVN_TEST_ASSERT(patch->old_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_symlink_bit == svn_tristate_false);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'gamma'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'gamma'." NL
+ "some more bytes to 'gamma'" NL,
+ pool));
+
+ /* Parse a copied empty file */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota.copied");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_copied);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ /* Parse an added empty file */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "new");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "new");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+ SVN_TEST_ASSERT(patch->old_executable_bit == svn_tristate_unknown);
+ SVN_TEST_ASSERT(patch->new_executable_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->old_symlink_bit == svn_tristate_unknown);
+ SVN_TEST_ASSERT(patch->new_symlink_bit == svn_tristate_false);
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_git_tree_and_text_diff(apr_pool_t *pool)
+{
+ /* ### Should we check for reversed diffs? */
+
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_diff_hunk_t *hunk;
+
+ SVN_ERR(create_patch_file(&patch_file, git_tree_and_text_unidiff, pool));
+
+ /* Parse a copied file with text modifications. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota.copied");
+ SVN_TEST_ASSERT(patch->old_executable_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_executable_bit == svn_tristate_true);
+ SVN_TEST_ASSERT(patch->old_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_copied);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'iota'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'iota'." NL
+ "some more bytes to 'iota'" NL,
+ pool));
+
+ /* Parse a moved file with text modifications. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/mu");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/mu.moved");
+ SVN_TEST_ASSERT(patch->old_executable_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_executable_bit == svn_tristate_true);
+ SVN_TEST_ASSERT(patch->old_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_moved);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'mu'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'mu'." NL
+ "some more bytes to 'mu'" NL,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "/dev/null");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "new");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'new'." NL,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/B/lambda");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "/dev/null");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_deleted);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+ SVN_TEST_ASSERT(patch->old_executable_bit == svn_tristate_true);
+ SVN_TEST_ASSERT(patch->new_executable_bit == svn_tristate_unknown);
+ SVN_TEST_ASSERT(patch->old_symlink_bit == svn_tristate_false);
+ SVN_TEST_ASSERT(patch->new_symlink_bit == svn_tristate_unknown);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'lambda'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
+ pool));
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+/* Tests to parse non-valid git diffs. */
+static svn_error_t *
+test_bad_git_diff_headers(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_diff_hunk_t *hunk;
+
+ SVN_ERR(create_patch_file(&patch_file, bad_git_diff_header, pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota.copied");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_copied);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'iota'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'iota'." NL
+ "some more bytes to 'iota'" NL,
+ pool));
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+/* Tests to parse a diff with three property changes, one is added, one is
+ * modified and one is deleted. */
+static svn_error_t *
+test_parse_property_diff(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_prop_patch_t *prop_patch;
+ svn_diff_hunk_t *hunk;
+ apr_array_header_t *hunks;
+
+ SVN_ERR(create_patch_file(&patch_file, property_unidiff, pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota");
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 3);
+
+ /* Check the deleted property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_del",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_deleted);
+ hunks = prop_patch->hunks;
+
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "value" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
+ pool));
+
+ /* Check the added property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_STRING_ASSERT(prop_patch->name, "prop_add");
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+ hunks = prop_patch->hunks;
+
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "value" NL,
+ pool));
+
+ /* Check the modified property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_mod",
+ APR_HASH_KEY_STRING);
+
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_modified);
+ hunks = prop_patch->hunks;
+
+ SVN_TEST_ASSERT(hunks->nelts == 2);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "value" NL
+ "context" NL
+ "context" NL
+ "context" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "new value" NL
+ "context" NL
+ "context" NL
+ "context" NL,
+ pool));
+
+ hunk = APR_ARRAY_IDX(hunks, 1 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "value" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "new value" NL,
+ pool));
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_property_and_text_diff(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+ svn_prop_patch_t *prop_patch;
+ svn_diff_hunk_t *hunk;
+ apr_array_header_t *hunks;
+
+ SVN_ERR(create_patch_file(&patch_file, property_and_text_unidiff, pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota");
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 1);
+
+ /* Check contents of text hunk */
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "This is the file 'iota'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "This is the file 'iota'." NL
+ "some more bytes to 'iota'" NL,
+ pool));
+
+ /* Check the added property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "value" NL,
+ pool));
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_diff_symbols_in_prop_unidiff(apr_pool_t *pool)
+{
+ svn_patch_t *patch;
+ svn_patch_file_t *patch_file;
+ svn_prop_patch_t *prop_patch;
+ svn_diff_hunk_t *hunk;
+ apr_array_header_t *hunks;
+
+ SVN_ERR(create_patch_file(&patch_file, diff_symbols_in_prop_unidiff, pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "iota");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "iota");
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 3);
+
+ /* Check the added property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_add",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added);
+
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "",
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "Added: bogus_prop" NL
+ "## -0,0 +20 ##" NL
+ "@@ -1,2 +0,0 @@" NL,
+ pool));
+
+ /* Check the deleted property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_del",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_deleted);
+
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 1);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "--- iota" NL
+ "+++ iota" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "",
+ pool));
+
+ /* Check the modified property */
+ prop_patch = apr_hash_get(patch->prop_patches, "prop_mod",
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_modified);
+ hunks = prop_patch->hunks;
+ SVN_TEST_ASSERT(hunks->nelts == 2);
+ hunk = APR_ARRAY_IDX(hunks, 0 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "## -1,2 +1,2 ##" NL
+ "## -1,5 -0,0 ##" NL
+ "@@ -1,5 -0,0 @@" NL
+ "Modified: prop_mod" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "## -1,3 +1,3 ##" NL
+ "## -1,5 -0,0 ##" NL
+ "@@ -1,5 -0,0 @@" NL
+ "Modified: prop_mod" NL,
+ pool));
+
+ hunk = APR_ARRAY_IDX(hunks, 1 , svn_diff_hunk_t *);
+
+ SVN_ERR(check_content(hunk, TRUE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "## -0,0 +1 ##" NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, FALSE,
+ "context" NL
+ "context" NL
+ "context" NL
+ "## -1,2 +1,4 ##" NL,
+ pool));
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_git_diffs_with_spaces_diff(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_patch_t *patch;
+
+ SVN_ERR(create_patch_file(&patch_file, path_with_spaces_unidiff, pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "path 1");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "path 1");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "path one 1");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "path one 1");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "dir/ b/path");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "dir/ b/path");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file,
+ FALSE, /* reverse */
+ FALSE, /* ignore_whitespace */
+ pool, pool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, " b/path 1");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, " b/path 1");
+ SVN_TEST_ASSERT(patch->operation == svn_diff_op_added);
+ SVN_TEST_ASSERT(patch->hunks->nelts == 0);
+
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_unidiff_lacking_trailing_eol(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_boolean_t reverse;
+ svn_boolean_t ignore_whitespace;
+ int i;
+ apr_pool_t *iterpool;
+
+ reverse = FALSE;
+ ignore_whitespace = FALSE;
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < 2; i++)
+ {
+ svn_patch_t *patch;
+ svn_diff_hunk_t *hunk;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(create_patch_file(&patch_file, unidiff_lacking_trailing_eol,
+ pool));
+
+ /* We have one patch with one hunk. Parse it. */
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file, reverse,
+ ignore_whitespace, iterpool,
+ iterpool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/C/gamma");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/C/gamma");
+ SVN_TEST_ASSERT(patch->hunks->nelts == 1);
+
+ hunk = APR_ARRAY_IDX(patch->hunks, 0, svn_diff_hunk_t *);
+ SVN_ERR(check_content(hunk, ! reverse,
+ "This is the file 'gamma'." NL,
+ pool));
+
+ SVN_ERR(check_content(hunk, reverse,
+ "This is the file 'gamma'." NL
+ "some more bytes to 'gamma'",
+ pool));
+
+ reverse = !reverse;
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ }
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_unidiff_with_mergeinfo(apr_pool_t *pool)
+{
+ svn_patch_file_t *patch_file;
+ svn_boolean_t reverse;
+ svn_boolean_t ignore_whitespace;
+ int i;
+ apr_pool_t *iterpool;
+
+ reverse = FALSE;
+ ignore_whitespace = FALSE;
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < 2; i++)
+ {
+ svn_patch_t *patch;
+ svn_mergeinfo_t mergeinfo;
+ svn_mergeinfo_t reverse_mergeinfo;
+ svn_rangelist_t *rangelist;
+ svn_merge_range_t *range;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(create_patch_file(&patch_file, unidiff_with_mergeinfo,
+ pool));
+
+ SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file, reverse,
+ ignore_whitespace, iterpool,
+ iterpool));
+ SVN_TEST_ASSERT(patch);
+ SVN_TEST_STRING_ASSERT(patch->old_filename, "A/C");
+ SVN_TEST_STRING_ASSERT(patch->new_filename, "A/C");
+
+ /* svn:ignore */
+ SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 1);
+
+ SVN_TEST_ASSERT(patch->mergeinfo);
+ SVN_TEST_ASSERT(patch->reverse_mergeinfo);
+
+ if (reverse)
+ {
+ mergeinfo = patch->reverse_mergeinfo;
+ reverse_mergeinfo = patch->mergeinfo;
+ }
+ else
+ {
+ mergeinfo = patch->mergeinfo;
+ reverse_mergeinfo = patch->reverse_mergeinfo;
+ }
+
+ rangelist = svn_hash_gets(reverse_mergeinfo,
+ "/subversion/branches/1.6.x-r935631");
+ SVN_TEST_ASSERT(rangelist);
+ SVN_TEST_ASSERT(rangelist->nelts == 1);
+ range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *);
+ SVN_TEST_ASSERT(range->start == 952682);
+ SVN_TEST_ASSERT(range->end == 955333);
+
+ rangelist = svn_hash_gets(mergeinfo,
+ "/subversion/branches/nfc-nfd-aware-client");
+ SVN_TEST_ASSERT(rangelist);
+ SVN_TEST_ASSERT(rangelist->nelts == 2);
+ range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *);
+ SVN_TEST_ASSERT(range->end == 870276);
+ range = APR_ARRAY_IDX(rangelist, 1, svn_merge_range_t *);
+ SVN_TEST_ASSERT(range->end == 870376);
+
+ rangelist = svn_hash_gets(mergeinfo,
+ "/subversion/branches/1.8.x-openssl-dirs");
+ SVN_TEST_ASSERT(rangelist);
+ SVN_TEST_ASSERT(rangelist->nelts == 1);
+ range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *);
+ SVN_TEST_ASSERT(range->end == 1535139);
+
+ reverse = !reverse;
+ SVN_ERR(svn_diff_close_patch_file(patch_file, pool));
+ }
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+/* ========================================================================== */
+
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_parse_unidiff,
+ "test unidiff parsing"),
+ SVN_TEST_PASS2(test_parse_git_diff,
+ "test git unidiff parsing"),
+ SVN_TEST_PASS2(test_parse_git_tree_and_text_diff,
+ "test git unidiff parsing of tree and text changes"),
+ SVN_TEST_PASS2(test_bad_git_diff_headers,
+ "test badly formatted git diff headers"),
+ SVN_TEST_PASS2(test_parse_property_diff,
+ "test property unidiff parsing"),
+ SVN_TEST_PASS2(test_parse_property_and_text_diff,
+ "test property and text unidiff parsing"),
+ SVN_TEST_PASS2(test_parse_diff_symbols_in_prop_unidiff,
+ "test property diffs with odd symbols"),
+ SVN_TEST_PASS2(test_git_diffs_with_spaces_diff,
+ "test git diffs with spaces in paths"),
+ SVN_TEST_PASS2(test_parse_unidiff_lacking_trailing_eol,
+ "test parsing unidiffs lacking trailing eol"),
+ SVN_TEST_PASS2(test_parse_unidiff_with_mergeinfo,
+ "test parsing unidiffs with mergeinfo"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs/fs-sequential-test.c b/subversion/tests/libsvn_fs/fs-sequential-test.c
new file mode 100644
index 0000000..daf5b0c
--- /dev/null
+++ b/subversion/tests/libsvn_fs/fs-sequential-test.c
@@ -0,0 +1,415 @@
+/* fs-sequential-test.c --- tests for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+#include <apr_thread_proc.h>
+#include <assert.h>
+
+#include "../svn_test.h"
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_time.h"
+#include "svn_string.h"
+#include "svn_fs.h"
+#include "svn_checksum.h"
+#include "svn_mergeinfo.h"
+#include "svn_props.h"
+#include "svn_version.h"
+
+#include "svn_private_config.h"
+#include "private/svn_fs_util.h"
+#include "private/svn_fs_private.h"
+#include "private/svn_fspath.h"
+
+#include "../svn_test_fs.h"
+
+#include "../../libsvn_delta/delta.h"
+#include "../../libsvn_fs/fs-loader.h"
+
+#define SET_STR(ps, s) ((ps)->data = (s), (ps)->len = strlen(s))
+
+
+/*-----------------------------------------------------------------*/
+
+/** The actual fs-sequential-tests called by `make check` **/
+
+/* Helper: commit TXN, expecting either success or failure:
+ *
+ * If EXPECTED_CONFLICT is null, then the commit is expected to
+ * succeed. If it does succeed, set *NEW_REV to the new revision;
+ * else return error.
+ *
+ * If EXPECTED_CONFLICT is non-null, it is either the empty string or
+ * the expected path of the conflict. If it is the empty string, any
+ * conflict is acceptable. If it is a non-empty string, the commit
+ * must fail due to conflict, and the conflict path must match
+ * EXPECTED_CONFLICT. If they don't match, return error.
+ *
+ * If a conflict is expected but the commit succeeds anyway, return
+ * error. If the commit fails but does not provide an error, return
+ * error.
+ */
+static svn_error_t *
+test_commit_txn(svn_revnum_t *new_rev,
+ svn_fs_txn_t *txn,
+ const char *expected_conflict,
+ apr_pool_t *pool)
+{
+ const char *conflict;
+ svn_error_t *err;
+
+ err = svn_fs_commit_txn(&conflict, new_rev, txn, pool);
+
+ if (err && (err->apr_err == SVN_ERR_FS_CONFLICT))
+ {
+ svn_error_clear(err);
+ if (! expected_conflict)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted at '%s', but no conflict expected",
+ conflict ? conflict : "(missing conflict info!)");
+ }
+ else if (conflict == NULL)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted as expected, "
+ "but no conflict path was returned ('%s' expected)",
+ expected_conflict);
+ }
+ else if ((strcmp(expected_conflict, "") != 0)
+ && (strcmp(conflict, expected_conflict) != 0))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted at '%s', but expected conflict at '%s')",
+ conflict, expected_conflict);
+ }
+
+ /* The svn_fs_commit_txn() API promises to set *NEW_REV to an
+ invalid revision number in the case of a conflict. */
+ if (SVN_IS_VALID_REVNUM(*new_rev))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "conflicting commit returned valid new revision");
+ }
+ }
+ else if (err) /* commit may have succeeded, but always report an error */
+ {
+ if (SVN_IS_VALID_REVNUM(*new_rev))
+ return svn_error_quick_wrap
+ (err, "commit succeeded but something else failed");
+ else
+ return svn_error_quick_wrap
+ (err, "commit failed due to something other than a conflict");
+ }
+ else /* err == NULL, commit should have succeeded */
+ {
+ if (! SVN_IS_VALID_REVNUM(*new_rev))
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "commit failed but no error was returned");
+ }
+
+ if (expected_conflict)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "commit succeeded that was expected to fail at '%s'",
+ expected_conflict);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#if APR_HAS_THREADS
+struct reopen_modify_baton_t {
+ const char *fs_path;
+ const char *txn_name;
+ apr_pool_t *pool;
+ svn_error_t *err;
+};
+
+static void * APR_THREAD_FUNC
+reopen_modify_child(apr_thread_t *tid, void *data)
+{
+ struct reopen_modify_baton_t *baton = data;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+
+ baton->err = svn_fs_open(&fs, baton->fs_path, NULL, baton->pool);
+ if (!baton->err)
+ baton->err = svn_fs_open_txn(&txn, fs, baton->txn_name, baton->pool);
+ if (!baton->err)
+ baton->err = svn_fs_txn_root(&root, txn, baton->pool);
+ if (!baton->err)
+ baton->err = svn_fs_change_node_prop(root, "A", "name",
+ svn_string_create("value",
+ baton->pool),
+ baton->pool);
+ svn_pool_destroy(baton->pool);
+ apr_thread_exit(tid, 0);
+ return NULL;
+}
+#endif
+
+static svn_error_t *
+reopen_modify(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+#if APR_HAS_THREADS
+ svn_fs_t *fs;
+ svn_revnum_t head_rev = 0;
+ svn_fs_root_t *root;
+ svn_fs_txn_t *txn;
+ const char *fs_path, *txn_name;
+ svn_string_t *value;
+ struct reopen_modify_baton_t baton;
+ apr_status_t status, child_status;
+ apr_threadattr_t *tattr;
+ apr_thread_t *tid;
+
+ /* Create test repository with greek tree. */
+ fs_path = "test-reopen-modify";
+ SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(root, pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Create txn with changes. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(root, "X", pool));
+
+ /* In another thread: reopen fs and txn, and add more changes. This
+ works in BDB and FSX but in FSFS the txn_dir_cache becomes
+ out-of-date and the thread's changes don't reach the revision. */
+ baton.fs_path = fs_path;
+ baton.txn_name = txn_name;
+ baton.pool = svn_pool_create(pool);
+ status = apr_threadattr_create(&tattr, pool);
+ if (status)
+ return svn_error_wrap_apr(status, _("Can't create threadattr"));
+ status = apr_thread_create(&tid, tattr, reopen_modify_child, &baton, pool);
+ if (status)
+ return svn_error_wrap_apr(status, _("Can't create thread"));
+ status = apr_thread_join(&child_status, tid);
+ if (status)
+ return svn_error_wrap_apr(status, _("Can't join thread"));
+ if (baton.err)
+ return svn_error_trace(baton.err);
+
+ /* Commit */
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Check for change made by thread. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, head_rev, pool));
+ SVN_ERR(svn_fs_node_prop(&value, root, "A", "name", pool));
+ SVN_TEST_ASSERT(value && !strcmp(value->data, "value"));
+
+ return SVN_NO_ERROR;
+#else
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, "no thread support");
+#endif
+}
+
+/* Convenience wrapper around svn_fs_change_rev_prop2. */
+static svn_error_t *
+set_revprop(svn_fs_t *fs,
+ svn_revnum_t revision,
+ const char *value,
+ apr_pool_t *scratch_pool)
+{
+ svn_string_t *content = svn_string_create(value, scratch_pool);
+ SVN_ERR(svn_fs_change_rev_prop2(fs, revision, "prop", NULL, content,
+ scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Call svn_fs_revision_prop2 and verify that the property value matches
+ * EXPECTED. */
+static svn_error_t *
+check_revprop(svn_fs_t *fs,
+ svn_revnum_t revision,
+ svn_boolean_t refresh,
+ const char *expected,
+ apr_pool_t *scratch_pool)
+{
+ svn_string_t *actual;
+ SVN_ERR(svn_fs_revision_prop2(&actual, fs, revision, "prop", refresh,
+ scratch_pool, scratch_pool));
+ SVN_TEST_STRING_ASSERT(actual->data, expected);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+revprop_refresh(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs, *fs2;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ svn_string_t *old_value, *new_value;
+ apr_hash_t *config;
+
+ if (!strcmp(opts->fs_type, "bdb"))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "the BDB backend ignores the refresh option");
+
+ /* That option is required to make this work with FSX. */
+ config = apr_hash_make(pool);
+ svn_hash_sets(config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, "1");
+
+ /* Build a repository with a few revisions in it. */
+ SVN_ERR(svn_test__create_fs2(&fs, "test-repo-revprop-refresh", opts,
+ config, pool));
+ SVN_ERR(svn_fs_open2(&fs2, "test-repo-revprop-refresh", config, pool,
+ pool));
+
+ for (i = 1; i < 5; ++i)
+ {
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev = 0;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, apr_itoa(pool, i), iterpool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, iterpool));
+ }
+
+ /* The initial access sees the latest revprops - even without refresh. */
+ SVN_ERR(set_revprop(fs, 0, "x0", pool));
+ SVN_ERR(set_revprop(fs, 1, "x1", pool));
+ SVN_ERR(set_revprop(fs, 2, "x2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "x0", pool));
+ SVN_ERR(check_revprop(fs2, 1, FALSE, "x1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "x2", pool));
+
+ /* With the REFRESH option set, revprop changes are immediately visible. */
+ SVN_ERR(set_revprop(fs, 0, "y0", pool));
+ SVN_ERR(set_revprop(fs, 1, "y1", pool));
+ SVN_ERR(set_revprop(fs, 2, "y2", pool));
+ SVN_ERR(check_revprop(fs2, 0, TRUE, "y0", pool));
+ SVN_ERR(check_revprop(fs2, 1, TRUE, "y1", pool));
+ SVN_ERR(check_revprop(fs2, 2, TRUE, "y2", pool));
+
+ /* Without the REFRESH option set, revprop changes not always visible.
+ * Our cache is large enough that we won't see any change.
+ * But first we have to heat up our cache. */
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "y0", pool));
+ SVN_ERR(check_revprop(fs2, 1, FALSE, "y1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "y2", pool));
+ SVN_ERR(set_revprop(fs, 0, "z0", pool));
+ SVN_ERR(set_revprop(fs, 1, "z1", pool));
+ SVN_ERR(set_revprop(fs, 2, "z2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "y0", pool));
+ SVN_ERR(check_revprop(fs2, 1, FALSE, "y1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "y2", pool));
+
+ /* An explicit refresh helps. */
+ SVN_ERR(svn_fs_refresh_revision_props(fs2, pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "z0", pool));
+ SVN_ERR(check_revprop(fs2, 1, FALSE, "z1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "z2", pool));
+
+ /* A single REFRESH is enough to make *all* recent changes visible. */
+ SVN_ERR(set_revprop(fs, 0, "t0", pool));
+ SVN_ERR(set_revprop(fs, 1, "t1", pool));
+ SVN_ERR(set_revprop(fs, 2, "t2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "z0", pool));
+ SVN_ERR(check_revprop(fs2, 1, TRUE, "t1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "t2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "t0", pool));
+
+ /* A single revprop write is enough to make *all* recent changes visible. */
+ SVN_ERR(set_revprop(fs, 0, "u0", pool));
+ SVN_ERR(set_revprop(fs, 1, "u1", pool));
+ SVN_ERR(set_revprop(fs, 2, "u2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "t0", pool));
+ SVN_ERR(set_revprop(fs2, 3, "a3", pool));
+ SVN_ERR(check_revprop(fs2, 1, FALSE, "u1", pool));
+ SVN_ERR(check_revprop(fs2, 2, FALSE, "u2", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "u0", pool));
+
+ /* A revprop write is always visible to the writer. */
+ SVN_ERR(check_revprop(fs, 0, FALSE, "u0", pool));
+ SVN_ERR(check_revprop(fs, 1, FALSE, "u1", pool));
+ SVN_ERR(check_revprop(fs, 2, FALSE, "u2", pool));
+ SVN_ERR(check_revprop(fs2, 3, FALSE, "a3", pool));
+
+ /* An atomic revprop write will always verify against the on-disk data. */
+ SVN_ERR(set_revprop(fs, 0, "v0", pool));
+
+ SVN_ERR(check_revprop(fs, 0, FALSE, "v0", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "u0", pool));
+
+ old_value = svn_string_create("v0", pool);
+ new_value = svn_string_create("b0", pool);
+ SVN_ERR(svn_fs_change_rev_prop2(fs2, 0, "prop",
+ (const svn_string_t * const *)&old_value,
+ new_value, pool));
+
+ SVN_ERR(check_revprop(fs, 0, FALSE, "v0", pool));
+ SVN_ERR(check_revprop(fs2, 0, FALSE, "b0", pool));
+
+ old_value = svn_string_create("v0", pool);
+ new_value = svn_string_create("w0", pool);
+ SVN_TEST_ASSERT_ERROR(svn_fs_change_rev_prop2(fs, 0, "prop",
+ (const svn_string_t * const *)&old_value,
+ new_value, pool),
+ SVN_ERR_FS_PROP_BASEVALUE_MISMATCH);
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 1; /* Run tests sequentially. */
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(reopen_modify,
+ "test reopen and modify txn"),
+ SVN_TEST_OPTS_PASS(revprop_refresh,
+ "refresh option in FS revprop API"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs/fs-test.c b/subversion/tests/libsvn_fs/fs-test.c
new file mode 100644
index 0000000..3e47a85
--- /dev/null
+++ b/subversion/tests/libsvn_fs/fs-test.c
@@ -0,0 +1,7519 @@
+/* fs-test.c --- tests for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+#include <apr_thread_proc.h>
+#include <apr_poll.h>
+#include <assert.h>
+
+#include "../svn_test.h"
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_time.h"
+#include "svn_string.h"
+#include "svn_fs.h"
+#include "svn_checksum.h"
+#include "svn_mergeinfo.h"
+#include "svn_props.h"
+#include "svn_version.h"
+
+#include "svn_private_config.h"
+#include "private/svn_cache.h"
+#include "private/svn_fs_util.h"
+#include "private/svn_fs_private.h"
+#include "private/svn_fspath.h"
+#include "private/svn_sqlite.h"
+
+#include "../svn_test_fs.h"
+
+#include "../../libsvn_delta/delta.h"
+#include "../../libsvn_fs/fs-loader.h"
+
+#define SET_STR(ps, s) ((ps)->data = (s), (ps)->len = strlen(s))
+
+
+/*-----------------------------------------------------------------*/
+
+/** The actual fs-tests called by `make check` **/
+
+/* Helper: commit TXN, expecting either success or failure:
+ *
+ * If EXPECTED_CONFLICT is null, then the commit is expected to
+ * succeed. If it does succeed, set *NEW_REV to the new revision;
+ * else return error.
+ *
+ * If EXPECTED_CONFLICT is non-null, it is either the empty string or
+ * the expected path of the conflict. If it is the empty string, any
+ * conflict is acceptable. If it is a non-empty string, the commit
+ * must fail due to conflict, and the conflict path must match
+ * EXPECTED_CONFLICT. If they don't match, return error.
+ *
+ * If a conflict is expected but the commit succeeds anyway, return
+ * error. If the commit fails but does not provide an error, return
+ * error.
+ */
+static svn_error_t *
+test_commit_txn(svn_revnum_t *new_rev,
+ svn_fs_txn_t *txn,
+ const char *expected_conflict,
+ apr_pool_t *pool)
+{
+ const char *conflict;
+ svn_error_t *err;
+
+ err = svn_fs_commit_txn(&conflict, new_rev, txn, pool);
+
+ if (err && (err->apr_err == SVN_ERR_FS_CONFLICT))
+ {
+ svn_error_clear(err);
+ if (! expected_conflict)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted at '%s', but no conflict expected",
+ conflict ? conflict : "(missing conflict info!)");
+ }
+ else if (conflict == NULL)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted as expected, "
+ "but no conflict path was returned ('%s' expected)",
+ expected_conflict);
+ }
+ else if ((strcmp(expected_conflict, "") != 0)
+ && (strcmp(conflict, expected_conflict) != 0))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_CONFLICT, NULL,
+ "commit conflicted at '%s', but expected conflict at '%s')",
+ conflict, expected_conflict);
+ }
+
+ /* The svn_fs_commit_txn() API promises to set *NEW_REV to an
+ invalid revision number in the case of a conflict. */
+ if (SVN_IS_VALID_REVNUM(*new_rev))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "conflicting commit returned valid new revision");
+ }
+ }
+ else if (err) /* commit may have succeeded, but always report an error */
+ {
+ if (SVN_IS_VALID_REVNUM(*new_rev))
+ return svn_error_quick_wrap
+ (err, "commit succeeded but something else failed");
+ else
+ return svn_error_quick_wrap
+ (err, "commit failed due to something other than a conflict");
+ }
+ else /* err == NULL, commit should have succeeded */
+ {
+ if (! SVN_IS_VALID_REVNUM(*new_rev))
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "commit failed but no error was returned");
+ }
+
+ if (expected_conflict)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "commit succeeded that was expected to fail at '%s'",
+ expected_conflict);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Begin a txn, check its name, then close it */
+static svn_error_t *
+trivial_transaction(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ const char *txn_name;
+ int is_invalid_char[256];
+ int i;
+ const char *p;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-trivial-txn",
+ opts, pool));
+
+ /* Begin a new transaction that is based on revision 0. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+
+ /* Test that the txn name is non-null. */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+
+ if (! txn_name)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "Got a NULL txn name.");
+
+ /* Test that the txn name contains only valid characters. See
+ svn_fs.h for the list of valid characters. */
+ for (i = 0; i < sizeof(is_invalid_char)/sizeof(*is_invalid_char); ++i)
+ is_invalid_char[i] = 1;
+ for (i = '0'; i <= '9'; ++i)
+ is_invalid_char[i] = 0;
+ for (i = 'a'; i <= 'z'; ++i)
+ is_invalid_char[i] = 0;
+ for (i = 'A'; i <= 'Z'; ++i)
+ is_invalid_char[i] = 0;
+ for (p = "-."; *p; ++p)
+ is_invalid_char[(unsigned char) *p] = 0;
+
+ for (p = txn_name; *p; ++p)
+ {
+ if (is_invalid_char[(unsigned char) *p])
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "The txn name '%s' contains an illegal '%c' "
+ "character", txn_name, *p);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Open an existing transaction by name. */
+static svn_error_t *
+reopen_trivial_transaction(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ const char *txn_name;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-reopen-trivial-txn",
+ opts, pool));
+
+ /* Create a first transaction - we don't want that one to reopen. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+
+ /* Begin a second transaction that is based on revision 0. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+
+ /* Don't use the subpool, txn_name must persist beyond the current txn */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == 0);
+
+ /* Create a third transaction - we don't want that one to reopen. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+
+ /* Close the transaction. */
+ svn_pool_clear(subpool);
+
+ /* Reopen the transaction by name */
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, subpool));
+
+ /* Does it have the same name? */
+ SVN_ERR(svn_fs_txn_root(&root, txn, subpool));
+ SVN_TEST_STRING_ASSERT(svn_fs_txn_root_name(root, subpool), txn_name);
+
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == 0);
+
+ {
+ const char *conflict;
+ svn_revnum_t new_rev;
+ SVN_ERR(svn_fs_commit_txn(&conflict, &new_rev, txn, subpool));
+ SVN_TEST_STRING_ASSERT(conflict, NULL);
+ SVN_TEST_ASSERT(new_rev == 1);
+ }
+
+ /* Close the transaction ... again. */
+ svn_pool_clear(subpool);
+
+ /* Begin another transaction that is based on revision 1. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, subpool));
+
+ /* Don't use the subpool, txn_name must persist beyond the current txn */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == 1);
+
+ /* Keep the txn name in pool */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+
+ /* Close the transaction ... again. */
+ svn_pool_clear(subpool);
+
+ /* Reopen the transaction by name ... again */
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, subpool));
+
+ /* Does it have the same name? ... */
+ SVN_ERR(svn_fs_txn_root(&root, txn, subpool));
+ SVN_TEST_STRING_ASSERT(svn_fs_txn_root_name(root, subpool), txn_name);
+
+ /* And the same base revision? */
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == 1);
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Create a file! */
+static svn_error_t *
+create_file_transaction(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-create-file-txn",
+ opts, pool));
+
+ /* Begin a new transaction that is based on revision 0. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+
+ /* Get the txn root */
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create a new file in the root directory. */
+ SVN_ERR(svn_fs_make_file(txn_root, "beer.txt", pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Make sure we get txn lists correctly. */
+static svn_error_t *
+verify_txn_list(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ apr_pool_t *subpool;
+ svn_fs_txn_t *txn1, *txn2;
+ const char *name1, *name2;
+ apr_array_header_t *txn_list;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-verify-txn-list",
+ opts, pool));
+
+ /* Begin a new transaction, get its name (in the top pool), close it. */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_begin_txn(&txn1, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_name(&name1, txn1, pool));
+ svn_pool_destroy(subpool);
+
+ /* Begin *another* transaction, get its name (in the top pool), close it. */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_begin_txn(&txn2, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_name(&name2, txn2, pool));
+ svn_pool_destroy(subpool);
+
+ /* Get the list of active transactions from the fs. */
+ SVN_ERR(svn_fs_list_transactions(&txn_list, fs, pool));
+
+ /* Check the list. It should have *exactly* two entries. */
+ if (txn_list->nelts != 2)
+ goto all_bad;
+
+ /* We should be able to find our 2 txn names in the list, in some
+ order. */
+ if ((! strcmp(name1, APR_ARRAY_IDX(txn_list, 0, const char *)))
+ && (! strcmp(name2, APR_ARRAY_IDX(txn_list, 1, const char *))))
+ goto all_good;
+
+ else if ((! strcmp(name2, APR_ARRAY_IDX(txn_list, 0, const char *)))
+ && (! strcmp(name1, APR_ARRAY_IDX(txn_list, 1, const char *))))
+ goto all_good;
+
+ all_bad:
+
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "Got a bogus txn list.");
+ all_good:
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Generate N consecutive transactions, then abort them all. Return
+ the list of transaction names. */
+static svn_error_t *
+txn_names_are_not_reused_helper1(apr_hash_t **txn_names,
+ svn_fs_t *fs,
+ apr_pool_t *pool)
+{
+ apr_hash_index_t *hi;
+ const int N = 10;
+ int i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ *txn_names = apr_hash_make(pool);
+
+ /* Create the transactions and store in a hash table the transaction
+ name as the key and the svn_fs_txn_t * as the value. */
+ for (i = 0; i < N; ++i)
+ {
+ svn_fs_txn_t *txn;
+ const char *name;
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_name(&name, txn, pool));
+ if (apr_hash_get(*txn_names, name, APR_HASH_KEY_STRING) != NULL)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "beginning a new transaction used an "
+ "existing transaction name '%s'",
+ name);
+ apr_hash_set(*txn_names, name, APR_HASH_KEY_STRING, txn);
+ }
+
+ i = 0;
+ for (hi = apr_hash_first(pool, *txn_names); hi; hi = apr_hash_next(hi))
+ {
+ void *val;
+ apr_hash_this(hi, NULL, NULL, &val);
+ SVN_ERR(svn_fs_abort_txn((svn_fs_txn_t *)val, pool));
+ ++i;
+ }
+
+ if (i != N)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "created %d transactions, but only aborted %d",
+ N, i);
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+/* Compare two hash tables and ensure that no keys in the first hash
+ table appear in the second hash table. */
+static svn_error_t *
+txn_names_are_not_reused_helper2(apr_hash_t *ht1,
+ apr_hash_t *ht2,
+ apr_pool_t *pool)
+{
+ apr_hash_index_t *hi;
+
+ for (hi = apr_hash_first(pool, ht1); hi; hi = apr_hash_next(hi))
+ {
+ const void *key;
+ const char *key_string;
+ apr_hash_this(hi, &key, NULL, NULL);
+ key_string = key;
+ if (apr_hash_get(ht2, key, APR_HASH_KEY_STRING) != NULL)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "the transaction name '%s' was reused",
+ key_string);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Make sure that transaction names are not reused. */
+static svn_error_t *
+txn_names_are_not_reused(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ apr_pool_t *subpool;
+ apr_hash_t *txn_names1, *txn_names2;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if ((strcmp(opts->fs_type, "fsfs") == 0)
+ && (opts->server_minor_version && (opts->server_minor_version < 5)))
+ return SVN_NO_ERROR;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-txn-names-are-not-reused",
+ opts, pool));
+
+ subpool = svn_pool_create(pool);
+
+ /* Create N transactions, abort them all, and collect the generated
+ transaction names. Do this twice. */
+ SVN_ERR(txn_names_are_not_reused_helper1(&txn_names1, fs, subpool));
+ SVN_ERR(txn_names_are_not_reused_helper1(&txn_names2, fs, subpool));
+
+ /* Check that no transaction names appear in both hash tables. */
+ SVN_ERR(txn_names_are_not_reused_helper2(txn_names1, txn_names2, subpool));
+ SVN_ERR(txn_names_are_not_reused_helper2(txn_names2, txn_names1, subpool));
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test writing & reading a file's contents. */
+static svn_error_t *
+write_and_read_file(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_stream_t *rstream;
+ svn_stringbuf_t *rstring;
+ svn_stringbuf_t *wstring;
+
+ wstring = svn_stringbuf_create("Wicki wild, wicki wicki wild.", pool);
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-read-and-write-file",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Add an empty file. */
+ SVN_ERR(svn_fs_make_file(txn_root, "beer.txt", pool));
+
+ /* And write some data into this file. */
+ SVN_ERR(svn_test__set_file_contents(txn_root, "beer.txt",
+ wstring->data, pool));
+
+ /* Now let's read the data back from the file. */
+ SVN_ERR(svn_fs_file_contents(&rstream, txn_root, "beer.txt", pool));
+ SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool));
+
+ /* Compare what was read to what was written. */
+ if (! svn_stringbuf_compare(rstring, wstring))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "data read != data written.");
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Create a file, a directory, and a file in that directory! */
+static svn_error_t *
+create_mini_tree_transaction(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-create-mini-tree-txn",
+ opts, pool));
+
+ /* Begin a new transaction that is based on revision 0. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+
+ /* Get the txn root */
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create a new file in the root directory. */
+ SVN_ERR(svn_fs_make_file(txn_root, "wine.txt", pool));
+
+ /* Create a new directory in the root directory. */
+ SVN_ERR(svn_fs_make_dir(txn_root, "keg", pool));
+
+ /* Now, create a file in our new directory. */
+ SVN_ERR(svn_fs_make_file(txn_root, "keg/beer.txt", pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Create a file, a directory, and a file in that directory! */
+static svn_error_t *
+create_greek_tree_transaction(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-create-greek-tree-txn",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create and verify the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Verify that entry KEY is present in ENTRIES, and that its value is
+ an svn_fs_dirent_t whose name and id are not null. */
+static svn_error_t *
+verify_entry(apr_hash_t *entries, const char *key)
+{
+ svn_fs_dirent_t *ent = apr_hash_get(entries, key,
+ APR_HASH_KEY_STRING);
+
+ if (ent == NULL)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "didn't find dir entry for \"%s\"", key);
+
+ if ((ent->name == NULL) && (ent->id == NULL))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "dir entry for \"%s\" has null name and null id", key);
+
+ if (ent->name == NULL)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "dir entry for \"%s\" has null name", key);
+
+ if (ent->id == NULL)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "dir entry for \"%s\" has null id", key);
+
+ if (strcmp(ent->name, key) != 0)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "dir entry for \"%s\" contains wrong name (\"%s\")", key, ent->name);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+list_directory(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ apr_hash_t *entries;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-list-dir",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* We create this tree
+ *
+ * /q
+ * /A/x
+ * /A/y
+ * /A/z
+ * /B/m
+ * /B/n
+ * /B/o
+ *
+ * then list dir A. It should have 3 files: "x", "y", and "z", no
+ * more, no less.
+ */
+
+ /* Create the tree. */
+ SVN_ERR(svn_fs_make_file(txn_root, "q", pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/x", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/y", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/z", pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "B", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "B/m", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "B/n", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "B/o", pool));
+
+ /* Get A's entries. */
+ SVN_ERR(svn_fs_dir_entries(&entries, txn_root, "A", pool));
+
+ /* Make sure exactly the right set of entries is present. */
+ if (apr_hash_count(entries) != 3)
+ {
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "unexpected number of entries in dir");
+ }
+ else
+ {
+ SVN_ERR(verify_entry(entries, "x"));
+ SVN_ERR(verify_entry(entries, "y"));
+ SVN_ERR(verify_entry(entries, "z"));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* If EXPR raises SVN_ERR_FS_PROP_BASEVALUE_MISMATCH, continue; else, fail
+ * the test. */
+#define FAILS_WITH_BOV(expr) \
+ do { \
+ svn_error_t *__err = (expr); \
+ if (!__err || __err->apr_err != SVN_ERR_FS_PROP_BASEVALUE_MISMATCH) \
+ return svn_error_create(SVN_ERR_TEST_FAILED, __err, \
+ "svn_fs_change_rev_prop2() failed to " \
+ "detect unexpected old value"); \
+ else \
+ svn_error_clear(__err); \
+ } while (0)
+
+static svn_error_t *
+revision_props(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ apr_hash_t *proplist;
+ svn_string_t *value;
+ int i;
+ svn_string_t s1;
+
+ const char *initial_props[4][2] = {
+ { "color", "red" },
+ { "size", "XXL" },
+ { "favorite saturday morning cartoon", "looney tunes" },
+ { "auto", "Green 1997 Saturn SL1" }
+ };
+
+ const char *final_props[4][2] = {
+ { "color", "violet" },
+ { "flower", "violet" },
+ { "favorite saturday morning cartoon", "looney tunes" },
+ { "auto", "Red 2000 Chevrolet Blazer" }
+ };
+
+ /* Open the fs */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-rev-props",
+ opts, pool));
+
+ /* Set some properties on the revision. */
+ for (i = 0; i < 4; i++)
+ {
+ SET_STR(&s1, initial_props[i][1]);
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, initial_props[i][0], &s1, pool));
+ }
+
+ /* Change some of the above properties. */
+ SET_STR(&s1, "violet");
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, "color", &s1, pool));
+
+ SET_STR(&s1, "Red 2000 Chevrolet Blazer");
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, "auto", &s1, pool));
+
+ /* Remove a property altogether */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, "size", NULL, pool));
+
+ /* Copy a property's value into a new property. */
+ SVN_ERR(svn_fs_revision_prop(&value, fs, 0, "color", pool));
+ SVN_TEST_ASSERT(value);
+
+ s1.data = value->data;
+ s1.len = value->len;
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, "flower", &s1, pool));
+
+ /* Test svn_fs_change_rev_prop2(). If the whole block goes through, then
+ * it is a no-op (it undoes all changes it makes). */
+ {
+ const svn_string_t s2 = { "wrong value", 11 };
+ const svn_string_t *s2_p = &s2;
+ const svn_string_t *s1_p = &s1;
+ const svn_string_t *unset = NULL;
+ const svn_string_t *s1_dup;
+
+ /* Value of "flower" is 's1'. */
+
+ FAILS_WITH_BOV(svn_fs_change_rev_prop2(fs, 0, "flower", &s2_p, s1_p, pool));
+ s1_dup = svn_string_dup(&s1, pool);
+ SVN_ERR(svn_fs_change_rev_prop2(fs, 0, "flower", &s1_dup, s2_p, pool));
+
+ /* Value of "flower" is 's2'. */
+
+ FAILS_WITH_BOV(svn_fs_change_rev_prop2(fs, 0, "flower", &s1_p, NULL, pool));
+ SVN_ERR(svn_fs_change_rev_prop2(fs, 0, "flower", &s2_p, NULL, pool));
+
+ /* Value of "flower" is <not set>. */
+
+ FAILS_WITH_BOV(svn_fs_change_rev_prop2(fs, 0, "flower", &s2_p, s1_p, pool));
+ SVN_ERR(svn_fs_change_rev_prop2(fs, 0, "flower", &unset, s1_p, pool));
+
+ /* Value of "flower" is 's1'. */
+ }
+
+ /* Obtain a list of all current properties, and make sure it matches
+ the expected values. */
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, 0, pool));
+ SVN_TEST_ASSERT(proplist);
+ {
+ svn_string_t *prop_value;
+
+ if (apr_hash_count(proplist) < 4 )
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "too few revision properties found");
+
+ /* Loop through our list of expected revision property name/value
+ pairs. */
+ for (i = 0; i < 4; i++)
+ {
+ /* For each expected property: */
+
+ /* Step 1. Find it by name in the hash of all rev. props
+ returned to us by svn_fs_revision_proplist. If it can't be
+ found, return an error. */
+ prop_value = apr_hash_get(proplist,
+ final_props[i][0],
+ APR_HASH_KEY_STRING);
+ if (! prop_value)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unable to find expected revision property");
+
+ /* Step 2. Make sure the value associated with it is the same
+ as what was expected, else return an error. */
+ if (strcmp(prop_value->data, final_props[i][1]))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "revision property had an unexpected value");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+transaction_props(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ apr_hash_t *proplist;
+ svn_string_t *value;
+ svn_revnum_t after_rev;
+ int i;
+ svn_string_t s1;
+
+ const char *initial_props[4][2] = {
+ { "color", "red" },
+ { "size", "XXL" },
+ { "favorite saturday morning cartoon", "looney tunes" },
+ { "auto", "Green 1997 Saturn SL1" }
+ };
+
+ const char *final_props[5][2] = {
+ { "color", "violet" },
+ { "flower", "violet" },
+ { "favorite saturday morning cartoon", "looney tunes" },
+ { "auto", "Red 2000 Chevrolet Blazer" },
+ { SVN_PROP_REVISION_DATE, "<some datestamp value>" }
+ };
+
+ /* Open the fs */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-txn-props",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+
+ /* Set some properties on the revision. */
+ for (i = 0; i < 4; i++)
+ {
+ SET_STR(&s1, initial_props[i][1]);
+ SVN_ERR(svn_fs_change_txn_prop(txn, initial_props[i][0], &s1, pool));
+ }
+
+ /* Change some of the above properties. */
+ SET_STR(&s1, "violet");
+ SVN_ERR(svn_fs_change_txn_prop(txn, "color", &s1, pool));
+
+ SET_STR(&s1, "Red 2000 Chevrolet Blazer");
+ SVN_ERR(svn_fs_change_txn_prop(txn, "auto", &s1, pool));
+
+ /* Remove a property altogether */
+ SVN_ERR(svn_fs_change_txn_prop(txn, "size", NULL, pool));
+
+ /* Copy a property's value into a new property. */
+ SVN_ERR(svn_fs_txn_prop(&value, txn, "color", pool));
+
+ s1.data = value->data;
+ s1.len = value->len;
+ SVN_ERR(svn_fs_change_txn_prop(txn, "flower", &s1, pool));
+
+ /* Obtain a list of all current properties, and make sure it matches
+ the expected values. */
+ SVN_ERR(svn_fs_txn_proplist(&proplist, txn, pool));
+ {
+ svn_string_t *prop_value;
+
+ /* All transactions get a datestamp property at their inception,
+ so we expect *5*, not 4 properties. */
+ if (apr_hash_count(proplist) != 5 )
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unexpected number of transaction properties were found");
+
+ /* Loop through our list of expected revision property name/value
+ pairs. */
+ for (i = 0; i < 5; i++)
+ {
+ /* For each expected property: */
+
+ /* Step 1. Find it by name in the hash of all rev. props
+ returned to us by svn_fs_revision_proplist. If it can't be
+ found, return an error. */
+ prop_value = apr_hash_get(proplist,
+ final_props[i][0],
+ APR_HASH_KEY_STRING);
+ if (! prop_value)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unable to find expected transaction property");
+
+ /* Step 2. Make sure the value associated with it is the same
+ as what was expected, else return an error. */
+ if (strcmp(final_props[i][0], SVN_PROP_REVISION_DATE))
+ if (strcmp(prop_value->data, final_props[i][1]))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "transaction property had an unexpected value");
+ }
+ }
+
+ /* Commit the transaction. */
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ if (after_rev != 1)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "committed transaction got wrong revision number");
+
+ /* Obtain a list of all properties on the new revision, and make
+ sure it matches the expected values. If you're wondering, the
+ expected values should be the exact same set of properties that
+ existed on the transaction just prior to its being committed. */
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, after_rev, pool));
+ {
+ svn_string_t *prop_value;
+
+ if (apr_hash_count(proplist) < 5 )
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unexpected number of revision properties were found");
+
+ /* Loop through our list of expected revision property name/value
+ pairs. */
+ for (i = 0; i < 5; i++)
+ {
+ /* For each expected property: */
+
+ /* Step 1. Find it by name in the hash of all rev. props
+ returned to us by svn_fs_revision_proplist. If it can't be
+ found, return an error. */
+ prop_value = apr_hash_get(proplist,
+ final_props[i][0],
+ APR_HASH_KEY_STRING);
+ if (! prop_value)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unable to find expected revision property");
+
+ /* Step 2. Make sure the value associated with it is the same
+ as what was expected, else return an error. */
+ if (strcmp(final_props[i][0], SVN_PROP_REVISION_DATE))
+ if (strcmp(prop_value->data, final_props[i][1]))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "revision property had an unexpected value");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+node_props(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ apr_hash_t *proplist;
+ svn_string_t *value;
+ int i;
+ svn_string_t s1;
+
+ const char *initial_props[4][2] = {
+ { "Best Rock Artist", "Creed" },
+ { "Best Rap Artist", "Eminem" },
+ { "Best Country Artist", "(null)" },
+ { "Best Sound Designer", "Pluessman" }
+ };
+
+ const char *final_props[4][2] = {
+ { "Best Rock Artist", "P.O.D." },
+ { "Best Rap Artist", "Busta Rhymes" },
+ { "Best Sound Designer", "Pluessman" },
+ { "Biggest Cakewalk Fanatic", "Pluessman" }
+ };
+
+ /* Open the fs and transaction */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-props",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Make a node to put some properties into */
+ SVN_ERR(svn_fs_make_file(txn_root, "music.txt", pool));
+
+ /* Set some properties on the nodes. */
+ for (i = 0; i < 4; i++)
+ {
+ SET_STR(&s1, initial_props[i][1]);
+ SVN_ERR(svn_fs_change_node_prop
+ (txn_root, "music.txt", initial_props[i][0], &s1, pool));
+ }
+
+ /* Change some of the above properties. */
+ SET_STR(&s1, "P.O.D.");
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "music.txt", "Best Rock Artist",
+ &s1, pool));
+
+ SET_STR(&s1, "Busta Rhymes");
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "music.txt", "Best Rap Artist",
+ &s1, pool));
+
+ /* Remove a property altogether */
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "music.txt",
+ "Best Country Artist", NULL, pool));
+
+ /* Copy a property's value into a new property. */
+ SVN_ERR(svn_fs_node_prop(&value, txn_root, "music.txt",
+ "Best Sound Designer", pool));
+
+ s1.data = value->data;
+ s1.len = value->len;
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "music.txt",
+ "Biggest Cakewalk Fanatic", &s1, pool));
+
+ /* Obtain a list of all current properties, and make sure it matches
+ the expected values. */
+ SVN_ERR(svn_fs_node_proplist(&proplist, txn_root, "music.txt", pool));
+ {
+ svn_string_t *prop_value;
+
+ if (apr_hash_count(proplist) != 4 )
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unexpected number of node properties were found");
+
+ /* Loop through our list of expected node property name/value
+ pairs. */
+ for (i = 0; i < 4; i++)
+ {
+ /* For each expected property: */
+
+ /* Step 1. Find it by name in the hash of all node props
+ returned to us by svn_fs_node_proplist. If it can't be
+ found, return an error. */
+ prop_value = apr_hash_get(proplist,
+ final_props[i][0],
+ APR_HASH_KEY_STRING);
+ if (! prop_value)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "unable to find expected node property");
+
+ /* Step 2. Make sure the value associated with it is the same
+ as what was expected, else return an error. */
+ if (strcmp(prop_value->data, final_props[i][1]))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "node property had an unexpected value");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Set *PRESENT to true if entry NAME is present in directory PATH
+ under ROOT, else set *PRESENT to false. */
+static svn_error_t *
+check_entry(svn_fs_root_t *root,
+ const char *path,
+ const char *name,
+ svn_boolean_t *present,
+ apr_pool_t *pool)
+{
+ apr_hash_t *entries;
+ svn_fs_dirent_t *ent;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_fs_dir_entries(&entries, root, path, subpool));
+ ent = apr_hash_get(entries, name, APR_HASH_KEY_STRING);
+
+ if (ent)
+ *present = TRUE;
+ else
+ *present = FALSE;
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+/* Return an error if entry NAME is absent in directory PATH under ROOT. */
+static svn_error_t *
+check_entry_present(svn_fs_root_t *root, const char *path,
+ const char *name, apr_pool_t *pool)
+{
+ svn_boolean_t present = FALSE;
+ SVN_ERR(check_entry(root, path, name, &present, pool));
+
+ if (! present)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "entry \"%s\" absent when it should be present", name);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return an error if entry NAME is present in directory PATH under ROOT. */
+static svn_error_t *
+check_entry_absent(svn_fs_root_t *root, const char *path,
+ const char *name, apr_pool_t *pool)
+{
+ svn_boolean_t present = TRUE;
+ SVN_ERR(check_entry(root, path, name, &present, pool));
+
+ if (present)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "entry \"%s\" present when it should be absent", name);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Fetch the youngest revision from a repos. */
+static svn_error_t *
+fetch_youngest_rev(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev;
+ svn_revnum_t youngest_rev, new_youngest_rev;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-youngest-rev",
+ opts, pool));
+
+ /* Get youngest revision of brand spankin' new filesystem. */
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* Commit it. */
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ /* Get the new youngest revision. */
+ SVN_ERR(svn_fs_youngest_rev(&new_youngest_rev, fs, pool));
+
+ if (youngest_rev == new_rev)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "commit didn't bump up revision number");
+
+ if (new_youngest_rev != new_rev)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "couldn't fetch youngest revision");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test committing against an empty repository.
+ todo: also test committing against youngest? */
+static svn_error_t *
+basic_commit(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t before_rev, after_rev;
+ const char *conflict;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-basic-commit",
+ opts, pool));
+
+ /* Save the current youngest revision. */
+ SVN_ERR(svn_fs_youngest_rev(&before_rev, fs, pool));
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Paranoidly check that the current youngest rev is unchanged. */
+ SVN_ERR(svn_fs_youngest_rev(&after_rev, fs, pool));
+ if (after_rev != before_rev)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "youngest revision changed unexpectedly");
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_TEST_ASSERT(svn_fs_is_txn_root(txn_root));
+ SVN_TEST_ASSERT(!svn_fs_is_revision_root(txn_root));
+
+ /* Commit it. */
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ /* Make sure it's a different revision than before. */
+ if (after_rev == before_rev)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "youngest revision failed to change");
+
+ /* Get root of the revision */
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_TEST_ASSERT(!svn_fs_is_txn_root(revision_root));
+ SVN_TEST_ASSERT(svn_fs_is_revision_root(revision_root));
+
+ /* Check the tree. */
+ SVN_ERR(svn_test__check_greek_tree(revision_root, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+static svn_error_t *
+test_tree_node_validation(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t after_rev;
+ const char *conflict;
+ apr_pool_t *subpool;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-validate-tree-entries",
+ opts, pool));
+
+ /* In a txn, create the greek tree. */
+ subpool = svn_pool_create(pool);
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+
+ /* Carefully validate that tree in the transaction. */
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 20,
+ subpool));
+
+ /* Go ahead and commit the tree, and destroy the txn object. */
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ /* Carefully validate that tree in the new revision, now. */
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, subpool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries, 20,
+ subpool));
+ }
+ svn_pool_destroy(subpool);
+
+ /* In a new txn, modify the greek tree. */
+ subpool = svn_pool_create(pool);
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is a new version of 'iota'.\n" },
+ { "A", 0 },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", "This is a new version of 'iota'.\n",
+ subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/mu", subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G", subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D/I", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/I/delta", subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/I/delta", "This is the file 'delta'.\n",
+ subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/I/epsilon", subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/I/epsilon", "This is the file 'epsilon'.\n",
+ subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/C/kappa", subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/C/kappa", "This is the file 'kappa'.\n",
+ subpool));
+
+ /* Carefully validate that tree in the transaction. */
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 19,
+ subpool));
+
+ /* Go ahead and commit the tree, and destroy the txn object. */
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ /* Carefully validate that tree in the new revision, now. */
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, subpool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 19, subpool));
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Commit with merging (committing against non-youngest). */
+static svn_error_t *
+merging_commit(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t after_rev;
+ svn_revnum_t revisions[24];
+ apr_size_t i;
+ svn_revnum_t revision_count;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-merging-commit",
+ opts, pool));
+
+ /* Initialize our revision number stuffs. */
+ for (i = 0;
+ i < ((sizeof(revisions)) / (sizeof(svn_revnum_t)));
+ i++)
+ revisions[i] = SVN_INVALID_REVNUM;
+ revision_count = 0;
+ revisions[revision_count++] = 0; /* the brand spankin' new revision */
+
+ /***********************************************************************/
+ /* REVISION 0 */
+ /***********************************************************************/
+
+ /* In one txn, create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /***********************************************************************/
+ /* REVISION 1 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 20, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* Let's add a directory and some files to the tree, and delete
+ 'iota' */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[revision_count-1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D/I", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/I/delta", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/I/delta", "This is the file 'delta'.\n", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/I/epsilon", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/I/epsilon", "This is the file 'epsilon'.\n", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/C/kappa", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/C/kappa", "This is the file 'kappa'.\n", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /***********************************************************************/
+ /* REVISION 2 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 23, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* We don't think the A/D/H directory is pulling its weight...let's
+ knock it off. Oh, and let's re-add iota, too. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[revision_count-1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/H", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "iota", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", "This is the new file 'iota'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /***********************************************************************/
+ /* REVISION 3 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the new file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 20, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* Delete iota (yet again). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[revision_count-1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /***********************************************************************/
+ /* REVISION 4 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 19, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /***********************************************************************/
+ /* GIVEN: A and B, with common ancestor ANCESTOR, where A and B
+ directories, and E, an entry in either A, B, or ANCESTOR.
+
+ For every E, the following cases exist:
+ - E exists in neither ANCESTOR nor A.
+ - E doesn't exist in ANCESTOR, and has been added to A.
+ - E exists in ANCESTOR, but has been deleted from A.
+ - E exists in both ANCESTOR and A ...
+ - but refers to different node revisions.
+ - and refers to the same node revision.
+
+ The same set of possible relationships with ANCESTOR holds for B,
+ so there are thirty-six combinations. The matrix is symmetrical
+ with A and B reversed, so we only have to describe one triangular
+ half, including the diagonal --- 21 combinations.
+
+ Our goal here is to test all the possible scenarios that can
+ occur given the above boolean logic table, and to make sure that
+ the results we get are as expected.
+
+ The test cases below have the following features:
+
+ - They run straight through the scenarios as described in the
+ `structure' document at this time.
+
+ - In each case, a txn is begun based on some revision (ANCESTOR),
+ is modified into a new tree (B), and then is attempted to be
+ committed (which happens against the head of the tree, A).
+
+ - If the commit is successful (and is *expected* to be such),
+ that new revision (which exists now as a result of the
+ successful commit) is thoroughly tested for accuracy of tree
+ entries, and in the case of files, for their contents. It is
+ important to realize that these successful commits are
+ advancing the head of the tree, and each one effective becomes
+ the new `A' described in further test cases.
+ */
+ /***********************************************************************/
+
+ /* (6) E exists in neither ANCESTOR nor A. */
+ {
+ /* (1) E exists in neither ANCESTOR nor B. Can't occur, by
+ assumption that E exists in either A, B, or ancestor. */
+
+ /* (1) E has been added to B. Add E in the merged result. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[0], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "theta", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "theta", "This is the file 'theta'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /*********************************************************************/
+ /* REVISION 5 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 20, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* (1) E has been deleted from B. Can't occur, by assumption that
+ E doesn't exist in ANCESTOR. */
+
+ /* (3) E exists in both ANCESTOR and B. Can't occur, by
+ assumption that E doesn't exist in ancestor. */
+ }
+
+ /* (5) E doesn't exist in ANCESTOR, and has been added to A. */
+ {
+ svn_revnum_t failed_rev;
+ /* (1) E doesn't exist in ANCESTOR, and has been added to B.
+ Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[4], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "theta", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "theta", "This is another file 'theta'.\n", pool));
+
+ /* TXN must actually be based upon revisions[4] (instead of HEAD). */
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == revisions[4]);
+
+ SVN_ERR(test_commit_txn(&failed_rev, txn, "/theta", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1) E exists in ANCESTOR, but has been deleted from B. Can't
+ occur, by assumption that E doesn't exist in ANCESTOR. */
+
+ /* (3) E exists in both ANCESTOR and B. Can't occur, by assumption
+ that E doesn't exist in ANCESTOR. */
+
+ SVN_TEST_ASSERT(failed_rev == SVN_INVALID_REVNUM);
+ }
+
+ /* (4) E exists in ANCESTOR, but has been deleted from A */
+ {
+ /* (1) E exists in ANCESTOR, but has been deleted from B. If
+ neither delete was a result of a rename, then omit E from the
+ merged tree. Otherwise, conflict. */
+ /* ### cmpilato todo: the rename case isn't actually handled by
+ merge yet, so we know we won't get a conflict here. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/H", pool));
+
+ /* TXN must actually be based upon revisions[1] (instead of HEAD). */
+ SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == revisions[1]);
+
+ /* We used to create the revision like this before fixing issue
+ #2751 -- Directory prop mods reverted in overlapping commits scenario.
+
+ But we now expect that to fail as out of date */
+ {
+ svn_revnum_t failed_rev;
+ SVN_ERR(test_commit_txn(&failed_rev, txn, "/A/D/H", pool));
+
+ SVN_TEST_ASSERT(failed_rev == SVN_INVALID_REVNUM);
+ }
+ /*********************************************************************/
+ /* REVISION 6 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 20, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* Try deleting a file F inside a subtree S where S does not exist
+ in the most recent revision, but does exist in the ancestor
+ tree. This should conflict. */
+ {
+ svn_revnum_t failed_rev;
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/H/omega", pool));
+ SVN_ERR(test_commit_txn(&failed_rev, txn, "/A/D/H", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ SVN_TEST_ASSERT(failed_rev == SVN_INVALID_REVNUM);
+ }
+
+ /* E exists in both ANCESTOR and B ... */
+ {
+ /* (1) but refers to different nodes. Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D/H", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ revisions[revision_count++] = after_rev;
+
+ /*********************************************************************/
+ /* REVISION 7 */
+ /*********************************************************************/
+
+ /* Re-remove A/D/H because future tests expect it to be absent. */
+ {
+ SVN_ERR(svn_fs_begin_txn
+ (&txn, fs, revisions[revision_count - 1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/H", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ revisions[revision_count++] = after_rev;
+ }
+
+ /*********************************************************************/
+ /* REVISION 8 (looks exactly like revision 6, we hope) */
+ /*********************************************************************/
+
+ /* (1) but refers to different revisions of the same node.
+ Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/H/zeta", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/A/D/H", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1) and refers to the same node revision. Omit E from the
+ merged tree. This is already tested in Merge-Test 3
+ (A/D/H/chi, A/D/H/psi, e.g.), but we'll test it here again
+ anyway. A little paranoia never hurt anyone. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/mu", pool)); /* unrelated change */
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /*********************************************************************/
+ /* REVISION 9 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 19, pool));
+ }
+ revisions[revision_count++] = after_rev;
+ }
+ }
+
+ /* Preparation for upcoming tests.
+ We make a new head revision, with A/mu restored, but containing
+ slightly different contents than its first incarnation. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[revision_count-1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/mu", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/mu", "A new file 'mu'.\n", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G/xi", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/G/xi", "This is the file 'xi'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ /*********************************************************************/
+ /* REVISION 10 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "A new file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/G/xi", "This is the file 'xi'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 21, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* (3) E exists in both ANCESTOR and A, but refers to different
+ nodes. */
+ {
+ /* (1) E exists in both ANCESTOR and B, but refers to different
+ nodes, and not all nodes are directories. Conflict. */
+
+ /* ### kff todo: A/mu's contents will be exactly the same.
+ If the fs ever starts optimizing this case, these tests may
+ start to fail. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/mu", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/mu", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/mu", "This is the file 'mu'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/A/mu", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1) E exists in both ANCESTOR and B, but refers to different
+ revisions of the same node. Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/mu", "A change to file 'mu'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/A/mu", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1) E exists in both ANCESTOR and B, and refers to the same
+ node revision. Replace E with A's node revision. */
+ {
+ svn_stringbuf_t *old_mu_contents;
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__get_file_contents
+ (txn_root, "A/mu", &old_mu_contents, pool));
+ if ((! old_mu_contents) || (strcmp(old_mu_contents->data,
+ "This is the file 'mu'.\n") != 0))
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "got wrong contents from an old revision tree");
+ }
+ SVN_ERR(svn_fs_make_file(txn_root, "A/sigma", pool));
+ SVN_ERR(svn_test__set_file_contents /* unrelated change */
+ (txn_root, "A/sigma", "This is the file 'sigma'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ /*********************************************************************/
+ /* REVISION 11 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "A new file 'mu'.\n" },
+ { "A/sigma", "This is the file 'sigma'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/G/xi", "This is the file 'xi'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 22, pool));
+ }
+ revisions[revision_count++] = after_rev;
+ }
+ }
+
+ /* Preparation for upcoming tests.
+ We make a new head revision. There are two changes in the new
+ revision: A/B/lambda has been modified. We will also use the
+ recent addition of A/D/G/xi, treated as a modification to
+ A/D/G. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[revision_count-1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/B/lambda", "Change to file 'lambda'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ /*********************************************************************/
+ /* REVISION 12 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "A new file 'mu'.\n" },
+ { "A/sigma", "This is the file 'sigma'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "Change to file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/G/xi", "This is the file 'xi'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root, expected_entries,
+ 22, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* (2) E exists in both ANCESTOR and A, but refers to different
+ revisions of the same node. */
+ {
+ /* (1a) E exists in both ANCESTOR and B, but refers to different
+ revisions of the same file node. Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/B/lambda", "A different change to 'lambda'.\n",
+ pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/A/B/lambda", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1b) E exists in both ANCESTOR and B, but refers to different
+ revisions of the same directory node. Merge A/E and B/E,
+ recursively. Succeed, because no conflict beneath E. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G/nu", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/G/nu", "This is the file 'nu'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ /*********************************************************************/
+ /* REVISION 13 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "A new file 'mu'.\n" },
+ { "A/sigma", "This is the file 'sigma'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "Change to file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/G/xi", "This is the file 'xi'.\n" },
+ { "A/D/G/nu", "This is the file 'nu'.\n" },
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 23, pool));
+ }
+ revisions[revision_count++] = after_rev;
+
+ /* (1c) E exists in both ANCESTOR and B, but refers to different
+ revisions of the same directory node. Merge A/E and B/E,
+ recursively. Fail, because conflict beneath E. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G/xi", pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/G/xi", "This is a different file 'xi'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/A/D/G/xi", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* (1) E exists in both ANCESTOR and B, and refers to the same node
+ revision. Replace E with A's node revision. */
+ {
+ svn_stringbuf_t *old_lambda_ctnts;
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__get_file_contents
+ (txn_root, "A/B/lambda", &old_lambda_ctnts, pool));
+ if ((! old_lambda_ctnts)
+ || (strcmp(old_lambda_ctnts->data,
+ "This is the file 'lambda'.\n") != 0))
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "got wrong contents from an old revision tree");
+ }
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/G/rho",
+ "This is an irrelevant change to 'rho'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ /*********************************************************************/
+ /* REVISION 14 */
+ /*********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "theta", "This is the file 'theta'.\n" },
+ { "A", 0 },
+ { "A/mu", "A new file 'mu'.\n" },
+ { "A/sigma", "This is the file 'sigma'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "Change to file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/C/kappa", "This is the file 'kappa'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is an irrelevant change to 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/G/xi", "This is the file 'xi'.\n" },
+ { "A/D/G/nu", "This is the file 'nu'.\n"},
+ { "A/D/I", 0 },
+ { "A/D/I/delta", "This is the file 'delta'.\n" },
+ { "A/D/I/epsilon", "This is the file 'epsilon'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ expected_entries,
+ 23, pool));
+ }
+ revisions[revision_count++] = after_rev;
+ }
+ }
+
+ /* (1) E exists in both ANCESTOR and A, and refers to the same node
+ revision. */
+ {
+ /* (1) E exists in both ANCESTOR and B, and refers to the same
+ node revision. Nothing has happened to ANCESTOR/E, so no
+ change is necessary. */
+
+ /* This has now been tested about fifty-four trillion times. We
+ don't need to test it again here. */
+ }
+
+ /* E exists in ANCESTOR, but has been deleted from A. E exists in
+ both ANCESTOR and B but refers to different revisions of the same
+ node. Conflict. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, revisions[1], pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", "New contents for 'iota'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, "/iota", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+copy_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t after_rev;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-copy",
+ opts, pool));
+
+ /* In first txn, create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /* In second txn, copy the file A/D/G/pi into the subtree A/D/H as
+ pi2. Change that file's contents to state its new name. Along
+ the way, test that the copy history was preserved both during the
+ transaction and after the commit. */
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G/pi",
+ txn_root, "A/D/H/pi2",
+ pool));
+ { /* Check that copy history was preserved. */
+ svn_revnum_t rev;
+ const char *path;
+
+ SVN_ERR(svn_fs_copied_from(&rev, &path, txn_root,
+ "A/D/H/pi2", pool));
+
+ if (rev != after_rev)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "pre-commit copy history not preserved (rev lost) for A/D/H/pi2");
+
+ if (strcmp(path, "/A/D/G/pi") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "pre-commit copy history not preserved (path lost) for A/D/H/pi2");
+ }
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/H/pi2", "This is the file 'pi2'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ { /* Check that copy history is still preserved _after_ the commit. */
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *path;
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/D/H/pi2", pool));
+
+ if (rev != (after_rev - 1))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "post-commit copy history wrong (rev) for A/D/H/pi2");
+
+ if (strcmp(path, "/A/D/G/pi") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "post-commit copy history wrong (path) for A/D/H/pi2");
+ }
+
+ /* Let's copy the copy we just made, to make sure copy history gets
+ chained correctly. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/H/pi2", txn_root, "A/D/H/pi3", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ { /* Check the copy history. */
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *path;
+
+ /* Check that the original copy still has its old history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, (after_rev - 1), pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/D/H/pi2", pool));
+
+ if (rev != (after_rev - 2))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "first copy history wrong (rev) for A/D/H/pi2");
+
+ if (strcmp(path, "/A/D/G/pi") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "first copy history wrong (path) for A/D/H/pi2");
+
+ /* Check that the copy of the copy has the right history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/D/H/pi3", pool));
+
+ if (rev != (after_rev - 1))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "second copy history wrong (rev) for A/D/H/pi3");
+
+ if (strcmp(path, "/A/D/H/pi2") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "second copy history wrong (path) for A/D/H/pi3");
+ }
+
+ /* Commit a regular change to a copy, make sure the copy history
+ isn't inherited. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/H/pi3", "This is the file 'pi3'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ { /* Check the copy history. */
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *path;
+
+ /* Check that the copy still has its history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, (after_rev - 1), pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/D/H/pi3", pool));
+
+ if (rev != (after_rev - 2))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for A/D/H/pi3");
+
+ if (strcmp(path, "/A/D/H/pi2") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for A/D/H/pi3");
+
+ /* Check that the next revision after the copy has no copy history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/D/H/pi3", pool));
+
+ if (rev != SVN_INVALID_REVNUM)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for A/D/H/pi3");
+
+ if (path != NULL)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for A/D/H/pi3");
+ }
+
+ /* Then, as if that wasn't fun enough, copy the whole subtree A/D/H
+ into the root directory as H2! */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/H", txn_root, "H2", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ { /* Check the copy history. */
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *path;
+
+ /* Check that the top of the copy has history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "H2", pool));
+
+ if (rev != (after_rev - 1))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for H2");
+
+ if (strcmp(path, "/A/D/H") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for H2");
+
+ /* Check that a random file under H2 reports no copy history. */
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "H2/omega", pool));
+
+ if (rev != SVN_INVALID_REVNUM)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for H2/omega");
+
+ if (path != NULL)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for H2/omega");
+
+ /* Note that H2/pi2 still has copy history, though. See the doc
+ string for svn_fs_copied_from() for more on this. */
+ }
+
+ /* Let's live dangerously. What happens if we copy a path into one
+ of its own children. Looping filesystem? Cyclic ancestry?
+ Another West Virginia family tree with no branches? We certainly
+ hope that's not the case. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/B", txn_root, "A/B/E/B", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ { /* Check the copy history. */
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *path;
+
+ /* Check that the copy has history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/B/E/B", pool));
+
+ if (rev != (after_rev - 1))
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for A/B/E/B");
+
+ if (strcmp(path, "/A/B") != 0)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for A/B/E/B");
+
+ /* Check that the original does not have copy history. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_copied_from(&rev, &path, root, "A/B", pool));
+
+ if (rev != SVN_INVALID_REVNUM)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (rev) for A/B");
+
+ if (path != NULL)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "copy history wrong (path) for A/B");
+ }
+
+ /* After all these changes, let's see if the filesystem looks as we
+ would expect it to. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "H2", 0 },
+ { "H2/chi", "This is the file 'chi'.\n" },
+ { "H2/pi2", "This is the file 'pi2'.\n" },
+ { "H2/pi3", "This is the file 'pi3'.\n" },
+ { "H2/psi", "This is the file 'psi'.\n" },
+ { "H2/omega", "This is the file 'omega'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/E/B", 0 },
+ { "A/B/E/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E/B/E", 0 },
+ { "A/B/E/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/E/B/F", 0 },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/pi2", "This is the file 'pi2'.\n" },
+ { "A/D/H/pi3", "This is the file 'pi3'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_test__validate_tree(rev_root, expected_entries,
+ 34, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* This tests deleting of mutable nodes. We build a tree in a
+ * transaction, then try to delete various items in the tree. We
+ * never commit the tree, so every entry being deleted points to a
+ * mutable node.
+ *
+ * ### todo: this test was written before commits worked. It might
+ * now be worthwhile to combine it with delete().
+ */
+static svn_error_t *
+delete_mutables(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_error_t *err;
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-del-from-dir",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* Baby, it's time to test like you've never tested before. We do
+ * the following, in this order:
+ *
+ * 1. Delete a single file somewhere, succeed.
+ * 2. Delete two files of three, then make sure the third remains.
+ * 3. Delete the third and last file.
+ * 4. Try again to delete the dir, succeed.
+ * 5. Delete one of the natively empty dirs, succeed.
+ * 6. Try to delete root, fail.
+ * 7. Try to delete a top-level file, succeed.
+ *
+ * Specifically, that's:
+ *
+ * 1. Delete A/D/gamma.
+ * 2. Delete A/D/G/pi, A/D/G/rho.
+ * 3. Delete A/D/G/tau.
+ * 4. Try again to delete A/D/G, succeed.
+ * 5. Delete A/C.
+ * 6. Try to delete /, fail.
+ * 7. Try to delete iota, succeed.
+ *
+ * Before and after each deletion or attempted deletion, we probe
+ * the affected directory, to make sure everything is as it should
+ * be.
+ */
+
+ /* 1 */
+ {
+ const svn_fs_id_t *gamma_id;
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "gamma", pool));
+ }
+
+ /* 2 */
+ {
+ const svn_fs_id_t *pi_id, *rho_id, *tau_id;
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "A/D/G/pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "A/D/G/rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/pi", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/rho", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ }
+
+ /* 3 */
+ {
+ const svn_fs_id_t *tau_id;
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/tau", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "tau", pool));
+ }
+
+ /* 4 */
+ {
+ const svn_fs_id_t *G_id;
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G", pool)); /* succeed */
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "G", pool));
+ }
+
+ /* 5 */
+ {
+ const svn_fs_id_t *C_id;
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/C", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A", "C", pool));
+ }
+
+ /* 6 */
+ {
+ const svn_fs_id_t *root_id;
+ SVN_ERR(svn_fs_node_id(&root_id, txn_root, "", pool));
+
+ err = svn_fs_delete(txn_root, "", pool);
+
+ if (err && (err->apr_err != SVN_ERR_FS_ROOT_DIR))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "deleting root directory got wrong error");
+ }
+ else if (! err)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "deleting root directory failed to get error");
+ }
+ svn_error_clear(err);
+
+ }
+
+ /* 7 */
+ {
+ const svn_fs_id_t *iota_id;
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* This tests deleting in general.
+ *
+ * ### todo: this test was written after (and independently of)
+ * delete_mutables(). It might be worthwhile to combine them.
+ */
+static svn_error_t *
+delete(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev;
+
+ /* This function tests 5 cases:
+ *
+ * 1. Delete mutable file.
+ * 2. Delete mutable directory.
+ * 3. Delete mutable directory with immutable nodes.
+ * 4. Delete immutable file.
+ * 5. Delete immutable directory.
+ */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-del-tree",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* 1. Delete mutable file. */
+ {
+ const svn_fs_id_t *iota_id, *gamma_id;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/C", 0 },
+ { "A/B/F", 0 },
+ { "A/D", 0 },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ /* Check nodes revision ID is gone. */
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+
+ /* Try deleting mutable files. */
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "gamma", pool));
+
+ /* Validate the tree. */
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 18, pool));
+ }
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 2. Delete mutable directory. */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id;
+
+ /* Check nodes revision ID is gone. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+
+ /* Try deleting a mutable empty dir. */
+ SVN_ERR(svn_fs_delete(txn_root, "A/C", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/B/F", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A", "C", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/B", "F", pool));
+
+ /* Now delete a mutable non-empty dir. */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" } };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 3. Delete mutable directory with immutable nodes. */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* Commit the greek tree. */
+ SVN_ERR(svn_fs_commit_txn(NULL, &new_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(new_rev));
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id, *sigma_id;
+
+ /* Create A/D/G/sigma. This makes all components of A/D/G
+ mutable. */
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G/sigma", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/sigma",
+ "This is another file 'sigma'.\n", pool));
+
+ /* Check that mutable node-revision-IDs are removed and immutable
+ ones still exist. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(svn_fs_node_id(&sigma_id, txn_root, "/A/D/G/sigma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "sigma", pool));
+
+ /* Delete "A" */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" }
+ };
+
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 4. Delete immutable file. */
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *iota_id, *gamma_id;
+
+ /* Check nodes revision ID is present. */
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+
+ /* Delete some files. */
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "iota", pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 18, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 5. Delete immutable directory. */
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id;
+
+ /* Check nodes revision ID is present. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+
+ /* Delete "A" */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" }
+ };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test the datestamps on commits. */
+static svn_error_t *
+commit_date(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t rev;
+ svn_string_t *datestamp;
+ apr_time_t before_commit, at_commit, after_commit;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-commit-date",
+ opts, pool));
+
+ before_commit = apr_time_now();
+
+ /* Commit a greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+
+ after_commit = apr_time_now();
+
+ /* Get the datestamp of the commit. */
+ SVN_ERR(svn_fs_revision_prop(&datestamp, fs, rev, SVN_PROP_REVISION_DATE,
+ pool));
+
+ if (datestamp == NULL)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "failed to get datestamp of committed revision");
+
+ SVN_ERR(svn_time_from_cstring(&at_commit, datestamp->data, pool));
+
+ if (at_commit < before_commit)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "datestamp too early");
+
+ if (at_commit > after_commit)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "datestamp too late");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+check_old_revisions(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t rev;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-old-revisions",
+ opts, pool));
+
+ /* Commit a greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Modify and commit iota a few times, then test to see if we can
+ retrieve all the committed revisions. */
+ {
+ /* right-side numbers match revision numbers */
+#define iota_contents_1 "This is the file 'iota'.\n"
+
+ /* Add a char to the front. */
+#define iota_contents_2 "XThis is the file 'iota'.\n"
+
+ /* Add a char to the end. */
+#define iota_contents_3 "XThis is the file 'iota'.\nX"
+
+ /* Add a couple of chars in the middle. */
+#define iota_contents_4 "XThis is the X file 'iota'.\nX"
+
+ /* Randomly add and delete chars all over. */
+#define iota_contents_5 \
+ "XTYhQis is ACK, PHHHT! no longer 'ioZZZZZta'.blarf\nbye"
+
+ /* Reassure iota that it will live for quite some time. */
+#define iota_contents_6 "Matthew 5:18 (Revised Standard Version) --\n\
+For truly, I say to you, till heaven and earth pass away, not an iota,\n\
+not a dot, will pass from the law until all is accomplished."
+
+ /* Revert to the original contents. */
+#define iota_contents_7 "This is the file 'iota'.\n"
+
+ /* Revision 2. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_2, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 3. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_3, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 4. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_4, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 5. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_5, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 6. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_6, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 7. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", iota_contents_7, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+ svn_pool_clear(subpool);
+
+ /** Now check the full Greek Tree in all of those revisions,
+ adjusting `iota' for each one. ***/
+
+ /* Validate revision 1. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_1 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 1, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 2. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_2 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 2, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 3. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_3 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 3, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 4. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_4 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 4, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 5. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_5 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/G", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 5, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 6. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_6 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 6, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+
+ /* Validate revision 7. */
+ {
+ svn_fs_root_t *root;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", iota_contents_7 },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, 7, pool));
+ SVN_ERR(svn_test__validate_tree(root, expected_entries, 20, pool));
+ }
+ }
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+/* For each revision R in FS, from 0 to MAX_REV, check that it
+ matches the tree in EXPECTED_TREES[R]. Use POOL for any
+ allocations. This is a helper function for check_all_revisions. */
+static svn_error_t *
+validate_revisions(svn_fs_t *fs,
+ svn_test__tree_t *expected_trees,
+ svn_revnum_t max_rev,
+ apr_pool_t *pool)
+{
+ svn_fs_root_t *revision_root;
+ svn_revnum_t i;
+ svn_error_t *err;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Validate all revisions up to the current one. */
+ for (i = 0; i <= max_rev; i++)
+ {
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ (svn_revnum_t)i, subpool));
+ err = svn_test__validate_tree(revision_root,
+ expected_trees[i].entries,
+ expected_trees[i].num_entries,
+ subpool);
+ if (err)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, err,
+ "Error validating revision %ld (youngest is %ld)", i, max_rev);
+ svn_pool_clear(subpool);
+ }
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+check_all_revisions(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev;
+ svn_test__tree_t expected_trees[5]; /* one tree per commit, please */
+ svn_revnum_t revision_count = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-all-revisions",
+ opts, pool));
+
+ /***********************************************************************/
+ /* REVISION 0 */
+ /***********************************************************************/
+ {
+ expected_trees[revision_count].num_entries = 0;
+ expected_trees[revision_count].entries = 0;
+ SVN_ERR(validate_revisions(fs, expected_trees, revision_count, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 1 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 20;
+ SVN_ERR(validate_revisions(fs, expected_trees, revision_count, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "A/delta", "This is the file 'delta'.\n" },
+ { 'a', "A/epsilon", "This is the file 'epsilon'.\n" },
+ { 'a', "A/B/Z", 0 },
+ { 'a', "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { 'd', "A/C", 0 },
+ { 'd', "A/mu", "" },
+ { 'd', "A/D/G/tau", "" },
+ { 'd', "A/D/H/omega", "" },
+ { 'e', "iota", "Changed file 'iota'.\n" },
+ { 'e', "A/D/G/rho", "Changed file 'rho'.\n" }
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 10,
+ subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 2 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "Changed file 'iota'.\n" },
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 20;
+ SVN_ERR(validate_revisions(fs, expected_trees, revision_count, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "A/mu", "Re-added file 'mu'.\n" },
+ { 'a', "A/D/H/omega", 0 }, /* re-add omega as directory! */
+ { 'd', "iota", "" },
+ { 'e', "A/delta", "This is the file 'delta'.\nLine 2.\n" }
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 4,
+ subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 3 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\nLine 2.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/mu", "Re-added file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", 0 }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 21;
+ SVN_ERR(validate_revisions(fs, expected_trees, revision_count, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'c', "A/D/G", "A/D/G2" },
+ { 'c', "A/epsilon", "A/B/epsilon" },
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 2, subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 4 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\nLine 2.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/mu", "Re-added file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/G2", 0 },
+ { "A/D/G2/pi", "This is the file 'pi'.\n" },
+ { "A/D/G2/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", 0 }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 25;
+ SVN_ERR(validate_revisions(fs, expected_trees, revision_count, subpool));
+ revision_count++;
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper function for large_file_integrity(). Given a ROOT and PATH
+ to a file, set *CHECKSUM to the checksum of kind CHECKSUM_KIND for the
+ contents of the file. */
+static svn_error_t *
+get_file_checksum(svn_checksum_t **checksum,
+ svn_checksum_kind_t checksum_kind,
+ svn_fs_root_t *root,
+ const char *path,
+ apr_pool_t *pool)
+{
+ svn_stream_t *stream;
+
+ /* Get a stream for the file contents. */
+ SVN_ERR(svn_fs_file_contents(&stream, root, path, pool));
+ SVN_ERR(svn_stream_contents_checksum(checksum, stream, checksum_kind,
+ pool, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return a pseudo-random number in the range [0,SCALAR) i.e. return
+ a number N such that 0 <= N < SCALAR */
+static int my_rand(apr_uint64_t scalar, apr_uint32_t *seed)
+{
+ static const apr_uint32_t TEST_RAND_MAX = 0xffffffffUL;
+ /* Assumes TEST_RAND_MAX+1 can be exactly represented in a double */
+ apr_uint32_t r = svn_test_rand(seed);
+ return (int)(((double)r
+ / ((double)TEST_RAND_MAX+1.0))
+ * (double)scalar);
+}
+
+
+/* Put pseudo-random bytes in buffer BUF (which is LEN bytes long).
+ If FULL is TRUE, simply replace every byte in BUF with a
+ pseudo-random byte, else, replace a pseudo-random collection of
+ bytes with pseudo-random data. */
+static void
+random_data_to_buffer(char *buf,
+ apr_size_t buf_len,
+ svn_boolean_t full,
+ apr_uint32_t *seed)
+{
+ apr_size_t i;
+ apr_size_t num_bytes;
+ apr_size_t offset;
+
+ int ds_off = 0;
+ const char *dataset = "0123456789";
+ apr_size_t dataset_size = strlen(dataset);
+
+ if (full)
+ {
+ for (i = 0; i < buf_len; i++)
+ {
+ ds_off = my_rand(dataset_size, seed);
+ buf[i] = dataset[ds_off];
+ }
+
+ return;
+ }
+
+ num_bytes = my_rand(buf_len / 100, seed) + 1;
+ for (i = 0; i < num_bytes; i++)
+ {
+ offset = my_rand(buf_len - 1, seed);
+ ds_off = my_rand(dataset_size, seed);
+ buf[offset] = dataset[ds_off];
+ }
+
+ return;
+}
+
+
+static svn_error_t *
+file_integrity_helper(apr_size_t filesize, apr_uint32_t *seed,
+ const svn_test_opts_t *opts, const char *fs_name,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_string_t contents;
+ char *content_buffer;
+ svn_checksum_t *checksum;
+ svn_checksum_kind_t checksum_kind = svn_checksum_md5;
+ svn_checksum_t *checksum_list[100];
+ svn_txdelta_window_handler_t wh_func;
+ void *wh_baton;
+ svn_revnum_t j;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, fs_name, opts, pool));
+
+ /* Set up our file contents string buffer. */
+ content_buffer = apr_palloc(pool, filesize);
+
+ contents.data = content_buffer;
+ contents.len = filesize;
+
+ /* THE PLAN:
+
+ The plan here is simple. We have a very large file (FILESIZE
+ bytes) that we initialize with pseudo-random data and commit.
+ Then we make pseudo-random modifications to that file's contents,
+ committing after each mod. Prior to each commit, we generate an
+ MD5 checksum for the contents of the file, storing each of those
+ checksums in an array. After we've made a whole bunch of edits
+ and commits, we'll re-check that file's contents as of each
+ revision in the repository, recalculate a checksum for those
+ contents, and make sure the "before" and "after" checksums
+ match. */
+
+ /* Create a big, ugly, pseudo-random-filled file and commit it. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "bigfile", subpool));
+ random_data_to_buffer(content_buffer, filesize, TRUE, seed);
+ SVN_ERR(svn_checksum(&checksum, checksum_kind, contents.data, contents.len,
+ pool));
+ SVN_ERR(svn_fs_apply_textdelta
+ (&wh_func, &wh_baton, txn_root, "bigfile", NULL, NULL, subpool));
+ SVN_ERR(svn_txdelta_send_string(&contents, wh_func, wh_baton, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ checksum_list[youngest_rev] = checksum;
+ svn_pool_clear(subpool);
+
+ /* Now, let's make some edits to the beginning of our file, and
+ commit those. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ random_data_to_buffer(content_buffer, 20, TRUE, seed);
+ SVN_ERR(svn_checksum(&checksum, checksum_kind, contents.data, contents.len,
+ pool));
+ SVN_ERR(svn_fs_apply_textdelta
+ (&wh_func, &wh_baton, txn_root, "bigfile", NULL, NULL, subpool));
+ SVN_ERR(svn_txdelta_send_string(&contents, wh_func, wh_baton, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ checksum_list[youngest_rev] = checksum;
+ svn_pool_clear(subpool);
+
+ /* Now, let's make some edits to the end of our file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ random_data_to_buffer(content_buffer + (filesize - 20), 20, TRUE, seed);
+ SVN_ERR(svn_checksum(&checksum, checksum_kind, contents.data, contents.len,
+ pool));
+ SVN_ERR(svn_fs_apply_textdelta
+ (&wh_func, &wh_baton, txn_root, "bigfile", NULL, NULL, subpool));
+ SVN_ERR(svn_txdelta_send_string(&contents, wh_func, wh_baton, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ checksum_list[youngest_rev] = checksum;
+ svn_pool_clear(subpool);
+
+ /* How about some edits to both the beginning and the end of the
+ file? */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ random_data_to_buffer(content_buffer, 20, TRUE, seed);
+ random_data_to_buffer(content_buffer + (filesize - 20), 20, TRUE, seed);
+ SVN_ERR(svn_checksum(&checksum, checksum_kind, contents.data, contents.len,
+ pool));
+ SVN_ERR(svn_fs_apply_textdelta
+ (&wh_func, &wh_baton, txn_root, "bigfile", NULL, NULL, subpool));
+ SVN_ERR(svn_txdelta_send_string(&contents, wh_func, wh_baton, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ checksum_list[youngest_rev] = checksum;
+ svn_pool_clear(subpool);
+
+ /* Alright, now we're just going to go crazy. Let's make many more
+ edits -- pseudo-random numbers and offsets of bytes changed to
+ more pseudo-random values. */
+ for (j = youngest_rev; j < 30; j = youngest_rev)
+ {
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ random_data_to_buffer(content_buffer, filesize, FALSE, seed);
+ SVN_ERR(svn_checksum(&checksum, checksum_kind, contents.data,
+ contents.len, pool));
+ SVN_ERR(svn_fs_apply_textdelta(&wh_func, &wh_baton, txn_root,
+ "bigfile", NULL, NULL, subpool));
+ SVN_ERR(svn_txdelta_send_string
+ (&contents, wh_func, wh_baton, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ checksum_list[youngest_rev] = checksum;
+ svn_pool_clear(subpool);
+ }
+
+ /* Now, calculate an MD5 digest for the contents of our big ugly
+ file in each revision currently in existence, and make the sure
+ the checksum matches the checksum of the data prior to its
+ commit. */
+ for (j = youngest_rev; j > 0; j--)
+ {
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, j, subpool));
+ SVN_ERR(get_file_checksum(&checksum, checksum_kind, rev_root, "bigfile",
+ subpool));
+ if (!svn_checksum_match(checksum, checksum_list[j]))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "verify-checksum: checksum mismatch, revision %ld:\n"
+ " expected: %s\n"
+ " actual: %s\n", j,
+ svn_checksum_to_cstring(checksum_list[j], pool),
+ svn_checksum_to_cstring(checksum, pool));
+
+ svn_pool_clear(subpool);
+ }
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+small_file_integrity(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_uint32_t seed = (apr_uint32_t) apr_time_now();
+
+ /* Just use a really small file size... */
+ return file_integrity_helper(20, &seed, opts,
+ "test-repo-small-file-integrity", pool);
+}
+
+
+static svn_error_t *
+almostmedium_file_integrity(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_uint32_t seed = (apr_uint32_t) apr_time_now();
+
+ return file_integrity_helper(SVN_DELTA_WINDOW_SIZE - 1, &seed, opts,
+ "test-repo-almostmedium-file-integrity", pool);
+}
+
+
+static svn_error_t *
+medium_file_integrity(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_uint32_t seed = (apr_uint32_t) apr_time_now();
+
+ /* Being no larger than the standard delta window size affects
+ deltification internally, so test that. */
+ return file_integrity_helper(SVN_DELTA_WINDOW_SIZE, &seed, opts,
+ "test-repo-medium-file-integrity", pool);
+}
+
+
+static svn_error_t *
+large_file_integrity(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_uint32_t seed = (apr_uint32_t) apr_time_now();
+
+ /* Being larger than the standard delta window size affects
+ deltification internally, so test that. */
+ return file_integrity_helper(SVN_DELTA_WINDOW_SIZE + 1, &seed, opts,
+ "test-repo-large-file-integrity", pool);
+}
+
+
+static svn_error_t *
+check_root_revision(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev, test_rev;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ int i;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-root-revision",
+ opts, pool));
+
+ /* Create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Root node's revision should be the same as YOUNGEST_REV. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_node_created_rev(&test_rev, rev_root, "", subpool));
+ if (test_rev != youngest_rev)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "Root node in revision %ld has unexpected stored revision %ld",
+ youngest_rev, test_rev);
+ svn_pool_clear(subpool);
+
+ for (i = 0; i < 10; i++)
+ {
+ /* Create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota",
+ apr_psprintf(subpool, "iota version %d", i + 2), subpool));
+
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Root node's revision should be the same as YOUNGEST_REV. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_node_created_rev(&test_rev, rev_root, "", subpool));
+ if (test_rev != youngest_rev)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "Root node in revision %ld has unexpected stored revision %ld",
+ youngest_rev, test_rev);
+ svn_pool_clear(subpool);
+ }
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+struct node_created_rev_args {
+ const char *path;
+ svn_revnum_t rev;
+};
+
+
+static svn_error_t *
+verify_path_revs(svn_fs_root_t *root,
+ struct node_created_rev_args *args,
+ int num_path_revs,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ int i;
+ svn_revnum_t rev;
+
+ for (i = 0; i < num_path_revs; i++)
+ {
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_fs_node_created_rev(&rev, root, args[i].path, subpool));
+ if (rev != args[i].rev)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "verify_path_revs: '%s' has created rev '%ld' "
+ "(expected '%ld')",
+ args[i].path, rev, args[i].rev);
+ }
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_node_created_rev(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+ int i;
+ struct node_created_rev_args path_revs[21];
+ const char *greek_paths[21] = {
+ /* 0 */ "",
+ /* 1 */ "iota",
+ /* 2 */ "A",
+ /* 3 */ "A/mu",
+ /* 4 */ "A/B",
+ /* 5 */ "A/B/lambda",
+ /* 6 */ "A/B/E",
+ /* 7 */ "A/B/E/alpha",
+ /* 8 */ "A/B/E/beta",
+ /* 9 */ "A/B/F",
+ /* 10 */ "A/C",
+ /* 11 */ "A/D",
+ /* 12 */ "A/D/gamma",
+ /* 13 */ "A/D/G",
+ /* 14 */ "A/D/G/pi",
+ /* 15 */ "A/D/G/rho",
+ /* 16 */ "A/D/G/tau",
+ /* 17 */ "A/D/H",
+ /* 18 */ "A/D/H/chi",
+ /* 19 */ "A/D/H/psi",
+ /* 20 */ "A/D/H/omega",
+ };
+
+ /* Initialize the paths in our args list. */
+ for (i = 0; i < 20; i++)
+ path_revs[i].path = greek_paths[i];
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-created-rev",
+ opts, pool));
+
+ /* Created the greek tree in revision 1. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+
+ /* Now, prior to committing, all these nodes should have an invalid
+ created rev. After all, the rev has been created yet. Verify
+ this. */
+ for (i = 0; i < 20; i++)
+ path_revs[i].rev = SVN_INVALID_REVNUM;
+ SVN_ERR(verify_path_revs(txn_root, path_revs, 20, subpool));
+
+ /* Now commit the transaction. */
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Now, we have a new revision, and all paths in it should have a
+ created rev of 1. Verify this. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, subpool));
+ for (i = 0; i < 20; i++)
+ path_revs[i].rev = 1;
+ SVN_ERR(verify_path_revs(rev_root, path_revs, 20, subpool));
+
+ /*** Let's make some changes/commits here and there, and make sure
+ we can keep this whole created rev thing in good standing. The
+ general rule here is that prior to commit, mutable things have
+ an invalid created rev, immutable things have their original
+ created rev. After the commit, those things which had invalid
+ created revs in the transaction now have the youngest revision
+ as their created rev.
+
+ ### NOTE: Bubble-up currently affect the created revisions for
+ directory nodes. I'm not sure if this is the behavior we've
+ settled on as desired.
+ */
+
+ /*** clear the per-commit pool */
+ svn_pool_clear(subpool);
+ /* begin a new transaction */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ /* The created revs on a txn root should be the same as on the rev
+ root it came from, if we haven't made changes yet. (See issue
+ #2608.) */
+ SVN_ERR(verify_path_revs(txn_root, path_revs, 20, subpool));
+ /* make mods */
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "iota", "pointless mod here", subpool));
+ /* verify created revs */
+ path_revs[0].rev = SVN_INVALID_REVNUM; /* (root) */
+ path_revs[1].rev = SVN_INVALID_REVNUM; /* iota */
+ SVN_ERR(verify_path_revs(txn_root, path_revs, 20, subpool));
+ /* commit transaction */
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ /* get a revision root for the new revision */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, subpool));
+ /* verify created revs */
+ path_revs[0].rev = 2; /* (root) */
+ path_revs[1].rev = 2; /* iota */
+ SVN_ERR(verify_path_revs(rev_root, path_revs, 20, subpool));
+
+ /*** clear the per-commit pool */
+ svn_pool_clear(subpool);
+ /* begin a new transaction */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ /* make mods */
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/H/omega", "pointless mod here", subpool));
+ /* verify created revs */
+ path_revs[0].rev = SVN_INVALID_REVNUM; /* (root) */
+ path_revs[2].rev = SVN_INVALID_REVNUM; /* A */
+ path_revs[11].rev = SVN_INVALID_REVNUM; /* D */
+ path_revs[17].rev = SVN_INVALID_REVNUM; /* H */
+ path_revs[20].rev = SVN_INVALID_REVNUM; /* omega */
+ SVN_ERR(verify_path_revs(txn_root, path_revs, 20, subpool));
+ /* commit transaction */
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ /* get a revision root for the new revision */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, subpool));
+ /* verify created revs */
+ path_revs[0].rev = 3; /* (root) */
+ path_revs[2].rev = 3; /* A */
+ path_revs[11].rev = 3; /* D */
+ path_revs[17].rev = 3; /* H */
+ path_revs[20].rev = 3; /* omega */
+ SVN_ERR(verify_path_revs(rev_root, path_revs, 20, subpool));
+
+ /* Destroy the per-commit subpool. */
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+check_related(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-related",
+ opts, pool));
+
+ /*** Step I: Build up some state in our repository through a series
+ of commits */
+
+ /* Using files because bubble-up complicates the testing. However,
+ the algorithm itself is ambivalent about what type of node is
+ being examined.
+
+ - New files show up in this order (through time): A,B,C,D,E,F
+ - Number following filename is the revision.
+ - Vertical motion shows revision history
+ - Horizontal motion show copy history.
+
+ A1---------C4 E7
+ | | |
+ A2 C5 E8---F9
+ | | |
+ A3---B4 C6 F10
+ | |
+ A4 B5----------D6
+ | |
+ B6 D7
+ */
+ /* Revision 1 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A", "1", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 2 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A", "2", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 3 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A", "3", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 4 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A", "4", subpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 3, subpool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "B", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "B", "4", subpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 1, subpool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "C", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "C", "4", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 5 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "B", "5", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "C", "5", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 6 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "B", "6", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "C", "6", subpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 5, subpool));
+ SVN_ERR(svn_fs_copy(rev_root, "B", txn_root, "D", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "D", "5", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 7 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "D", "7", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "E", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "E", "7", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 8 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "E", "8", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 9 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 8, subpool));
+ SVN_ERR(svn_fs_copy(rev_root, "E", txn_root, "F", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "F", "9", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ /* Revision 10 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "F", "10", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /*** Step II: Exhaustively verify relationship between all nodes in
+ existence. */
+ {
+ int i, j;
+
+ struct path_rev_t
+ {
+ const char *path;
+ svn_revnum_t rev;
+ };
+
+ /* Our 16 existing files/revisions. */
+ struct path_rev_t path_revs[16] = {
+ { "A", 1 }, { "A", 2 }, { "A", 3 }, { "A", 4 },
+ { "B", 4 }, { "B", 5 }, { "B", 6 }, { "C", 4 },
+ { "C", 5 }, { "C", 6 }, { "D", 6 }, { "D", 7 },
+ { "E", 7 }, { "E", 8 }, { "F", 9 }, { "F", 10 }
+ };
+
+ /* Latest revision that touched the respective path. */
+ struct path_rev_t latest_changes[6] = {
+ { "A", 4 }, { "B", 6 }, { "C", 6 },
+ { "D", 7 }, { "E", 8 }, { "F", 10 }
+ };
+
+ int related_matrix[16][16] = {
+ /* A1 ... F10 across the top here*/
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* A1 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* A2 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* A3 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* A4 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* B4 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* B5 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* B6 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* C4 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* C5 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* C6 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* D6 */
+ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* D7 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 }, /* E7 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 }, /* E8 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 }, /* F9 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1 } /* F10 */
+ };
+
+ /* Here's the fun part. Running the tests. */
+ for (i = 0; i < 16; i++)
+ {
+ for (j = 0; j < 16; j++)
+ {
+ struct path_rev_t pr1 = path_revs[i];
+ struct path_rev_t pr2 = path_revs[j];
+ const svn_fs_id_t *id1, *id2;
+ int related = 0;
+ svn_fs_node_relation_t relation;
+ svn_fs_root_t *rev_root1, *rev_root2;
+
+ /* Get the ID for the first path/revision combination. */
+ SVN_ERR(svn_fs_revision_root(&rev_root1, fs, pr1.rev, subpool));
+ SVN_ERR(svn_fs_node_id(&id1, rev_root1, pr1.path, subpool));
+
+ /* Get the ID for the second path/revision combination. */
+ SVN_ERR(svn_fs_revision_root(&rev_root2, fs, pr2.rev, subpool));
+ SVN_ERR(svn_fs_node_id(&id2, rev_root2, pr2.path, subpool));
+
+ /* <exciting> Now, run the relationship check! </exciting> */
+ related = svn_fs_check_related(id1, id2) ? 1 : 0;
+ if (related == related_matrix[i][j])
+ {
+ /* xlnt! */
+ }
+ else if (related && (! related_matrix[i][j]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to be related to '%s:%d'; it was not",
+ pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev);
+ }
+ else if ((! related) && related_matrix[i][j])
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to not be related to '%s:%d'; it was",
+ pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev);
+ }
+
+ /* Asking directly, i.e. without involving the noderev IDs as
+ * an intermediate, should yield the same results. */
+ SVN_ERR(svn_fs_node_relation(&relation, rev_root1, pr1.path,
+ rev_root2, pr2.path, subpool));
+ if (i == j)
+ {
+ /* Identical note. */
+ if (!related || relation != svn_fs_node_unchanged)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to be the same as '%s:%d';"
+ " it was not",
+ pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev);
+ }
+ }
+ else if (related && relation != svn_fs_node_common_ancestor)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to have a common ancestor with '%s:%d';"
+ " it had not",
+ pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev);
+ }
+ else if (!related && relation != svn_fs_node_unrelated)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to not be related to '%s:%d'; it was",
+ pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev);
+ }
+
+ svn_pool_clear(subpool);
+ } /* for ... */
+ } /* for ... */
+
+ /* Verify that the noderevs stay the same after their last change. */
+ for (i = 0; i < 6; ++i)
+ {
+ const char *path = latest_changes[i].path;
+ svn_revnum_t latest = latest_changes[i].rev;
+ svn_fs_root_t *latest_root;
+ svn_revnum_t rev;
+ svn_fs_node_relation_t relation;
+
+ /* FS root of the latest change. */
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_fs_revision_root(&latest_root, fs, latest, subpool));
+
+ /* All future revisions. */
+ for (rev = latest + 1; rev <= 10; ++rev)
+ {
+ /* Query their noderev relationship to the latest change. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, rev, subpool));
+ SVN_ERR(svn_fs_node_relation(&relation, latest_root, path,
+ rev_root, path, subpool));
+
+ /* They shall use the same noderevs */
+ if (relation != svn_fs_node_unchanged)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s:%d' to be the same as '%s:%d';"
+ " it was not",
+ path, (int)latest, path, (int)rev);
+ }
+ } /* for ... */
+ } /* for ... */
+ }
+
+ /* Destroy the subpool. */
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+check_txn_related(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn[3];
+ svn_fs_root_t *root[3];
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-txn-related",
+ opts, pool));
+
+ /*** Step I: Build up some state in our repository through a series
+ of commits */
+
+ /* This is the node graph we are testing. It contains one revision (r1)
+ and two transactions, T1 and T2 - yet uncommitted.
+
+ A is a file that exists in r1 (A-0) and gets modified in both txns.
+ C is a copy of A-0 made in both txns.
+ B is a new node created in both txns
+ D is a file that exists in r1 (D-0) and never gets modified.
+ / is the root folder, touched in r0, r1 and both txns (root-0)
+ R is a copy of the root-0 made in both txns.
+
+ The edges in the graph connect related noderevs:
+
+ +--A-0--+ D-0 +-root-0-+
+ | | | |
+ +-----+ +-----+ +------+ +------+
+ | | | | | | | |
+ B-1 C-1 A-1 A-2 C-2 B-2 R-1 root-1 root-2 R-2
+ */
+ /* Revision 1 */
+ SVN_ERR(svn_fs_begin_txn(&txn[0], fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&root[0], txn[0], subpool));
+ SVN_ERR(svn_fs_make_file(root[0], "A", subpool));
+ SVN_ERR(svn_test__set_file_contents(root[0], "A", "1", subpool));
+ SVN_ERR(svn_fs_make_file(root[0], "D", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn[0], subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_fs_revision_root(&root[0], fs, youngest_rev, pool));
+
+ /* Transaction 1 */
+ SVN_ERR(svn_fs_begin_txn(&txn[1], fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root[1], txn[1], pool));
+ SVN_ERR(svn_test__set_file_contents(root[1], "A", "2", pool));
+ SVN_ERR(svn_fs_copy(root[0], "A", root[1], "C", pool));
+ SVN_ERR(svn_fs_copy(root[0], "", root[1], "R", pool));
+ SVN_ERR(svn_fs_make_file(root[1], "B", pool));
+
+ /* Transaction 2 */
+ SVN_ERR(svn_fs_begin_txn(&txn[2], fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root[2], txn[2], pool));
+ SVN_ERR(svn_test__set_file_contents(root[2], "A", "2", pool));
+ SVN_ERR(svn_fs_copy(root[0], "A", root[2], "C", pool));
+ SVN_ERR(svn_fs_copy(root[0], "", root[2], "R", pool));
+ SVN_ERR(svn_fs_make_file(root[2], "B", pool));
+
+ /*** Step II: Exhaustively verify relationship between all nodes in
+ existence. */
+ {
+ enum { NODE_COUNT = 13 };
+ int i, j;
+
+ struct path_rev_t
+ {
+ const char *path;
+ int root;
+ };
+
+ /* Our 16 existing files/revisions. */
+ struct path_rev_t path_revs[NODE_COUNT] = {
+ { "A", 0 }, { "A", 1 }, { "A", 2 },
+ { "B", 1 }, { "B", 2 },
+ { "C", 1 }, { "C", 2 },
+ { "D", 0 },
+ { "/", 0 }, { "/", 1 }, { "/", 2 },
+ { "R", 1 }, { "R", 2 }
+ };
+
+ int related_matrix[NODE_COUNT][NODE_COUNT] = {
+ /* A-0 ... R-2 across the top here*/
+ { 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, /* A-0 */
+ { 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, /* A-1 */
+ { 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, /* A-2 */
+ { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* B-1 */
+ { 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 }, /* B-2 */
+ { 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, /* C-1 */
+ { 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0 }, /* C-2 */
+ { 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 }, /* D-0 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1 }, /* root-0 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1 }, /* root-1 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1 }, /* root-2 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1 }, /* R-1 */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1 }, /* R-2 */
+ };
+
+ /* Here's the fun part. Running the tests. */
+ for (i = 0; i < NODE_COUNT; i++)
+ {
+ for (j = 0; j < NODE_COUNT; j++)
+ {
+ struct path_rev_t pr1 = path_revs[i];
+ struct path_rev_t pr2 = path_revs[j];
+ const svn_fs_id_t *id1, *id2;
+ int related = 0;
+ svn_fs_node_relation_t relation;
+
+ svn_pool_clear(subpool);
+
+ /* Get the ID for the first path/revision combination. */
+ SVN_ERR(svn_fs_node_id(&id1, root[pr1.root], pr1.path, subpool));
+
+ /* Get the ID for the second path/revision combination. */
+ SVN_ERR(svn_fs_node_id(&id2, root[pr2.root], pr2.path, subpool));
+
+ /* <exciting> Now, run the relationship check! </exciting> */
+ related = svn_fs_check_related(id1, id2) ? 1 : 0;
+ if (related == related_matrix[i][j])
+ {
+ /* xlnt! */
+ }
+ else if ((! related) && related_matrix[i][j])
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s-%d' to be related to '%s-%d'; it was not",
+ pr1.path, pr1.root, pr2.path, pr2.root);
+ }
+ else if (related && (! related_matrix[i][j]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s-%d' to not be related to '%s-%d'; it was",
+ pr1.path, pr1.root, pr2.path, pr2.root);
+ }
+
+ /* Asking directly, i.e. without involving the noderev IDs as
+ * an intermediate, should yield the same results. */
+ SVN_ERR(svn_fs_node_relation(&relation, root[pr1.root], pr1.path,
+ root[pr2.root], pr2.path, subpool));
+ if (i == j)
+ {
+ /* Identical noderev. */
+ if (!related || relation != svn_fs_node_unchanged)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s-%d' to be the same as '%s-%d';"
+ " it was not",
+ pr1.path, pr1.root, pr2.path, pr2.root);
+ }
+ }
+ else if (related && relation != svn_fs_node_common_ancestor)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s-%d' to have a common ancestor with '%s-%d';"
+ " it had not",
+ pr1.path, pr1.root, pr2.path, pr2.root);
+ }
+ else if (!related && relation != svn_fs_node_unrelated)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s-%d' to not be related to '%s-%d'; it was",
+ pr1.path, pr1.root, pr2.path, pr2.root);
+ }
+ } /* for ... */
+ } /* for ... */
+
+ /* Verify that the noderevs stay the same after their last change.
+ There is only D that is not changed. */
+ for (i = 1; i <= 2; ++i)
+ {
+ svn_fs_node_relation_t relation;
+ svn_pool_clear(subpool);
+
+ /* Query their noderev relationship to the latest change. */
+ SVN_ERR(svn_fs_node_relation(&relation, root[i], "D",
+ root[0], "D", subpool));
+
+ /* They shall use the same noderevs */
+ if (relation != svn_fs_node_unchanged)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected 'D-%d' to be the same as 'D-0'; it was not", i);
+ }
+ } /* for ... */
+ }
+
+ /* Destroy the subpool. */
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+branch_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *spool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-branch",
+ opts, pool));
+
+ /*** Revision 1: Create the greek tree in revision. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 2: Copy A/D/G/rho to A/D/G/rho2. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G/rho", txn_root, "A/D/G/rho2", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 3: Copy A/D/G to A/D/G2. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G", txn_root, "A/D/G2", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 4: Copy A/D to A/D2. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D", txn_root, "A/D2", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 5: Edit all the rho's! ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/rho",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/rho2",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G2/rho",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G2/rho2",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/G/rho",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/G/rho2",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/G2/rho",
+ "Edited text.", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/G2/rho2",
+ "Edited text.", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ svn_pool_destroy(spool);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Verify that file FILENAME under ROOT has the same contents checksum
+ * as CONTENTS when comparing the checksums of the given TYPE.
+ * Use POOL for temporary allocations. */
+static svn_error_t *
+verify_file_checksum(svn_stringbuf_t *contents,
+ svn_fs_root_t *root,
+ const char *filename,
+ svn_checksum_kind_t type,
+ apr_pool_t *pool)
+{
+ svn_checksum_t *expected_checksum, *actual_checksum;
+
+ /* Write a file, compare the repository's idea of its checksum
+ against our idea of its checksum. They should be the same. */
+ SVN_ERR(svn_checksum(&expected_checksum, type, contents->data,
+ contents->len, pool));
+ SVN_ERR(svn_fs_file_checksum(&actual_checksum, type, root, filename, TRUE,
+ pool));
+ if (!svn_checksum_match(expected_checksum, actual_checksum))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "verify-checksum: checksum mismatch:\n"
+ " expected: %s\n"
+ " actual: %s\n",
+ svn_checksum_to_cstring(expected_checksum, pool),
+ svn_checksum_to_cstring(actual_checksum, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_checksum(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_stringbuf_t *str;
+ svn_revnum_t rev;
+
+ /* Write a file, compare the repository's idea of its checksum
+ against our idea of its checksum. They should be the same. */
+ str = svn_stringbuf_create("My text editor charges me rent.", pool);
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-verify-checksum",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "fact", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "fact", str->data, pool));
+
+ /* Do it for the txn. */
+ SVN_ERR(verify_file_checksum(str, txn_root, "fact", svn_checksum_md5,
+ pool));
+ SVN_ERR(verify_file_checksum(str, txn_root, "fact", svn_checksum_sha1,
+ pool));
+
+ /* Do it again - this time for the revision. */
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, rev, pool));
+ SVN_ERR(verify_file_checksum(str, rev_root, "fact", svn_checksum_md5,
+ pool));
+ SVN_ERR(verify_file_checksum(str, rev_root, "fact", svn_checksum_sha1,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper for closest_copy_test(). Verify that CLOSEST_PATH and the
+ revision associated with CLOSEST_ROOT match the EXPECTED_PATH and
+ EXPECTED_REVISION, respectively. */
+static svn_error_t *
+test_closest_copy_pair(svn_fs_root_t *closest_root,
+ const char *closest_path,
+ svn_revnum_t expected_revision,
+ const char *expected_path)
+{
+ svn_revnum_t closest_rev = SVN_INVALID_REVNUM;
+
+ /* Callers must pass valid -- EXPECTED_PATH and EXPECTED_REVISION
+ come as a both-or-nothing pair. */
+ assert(((! expected_path) && (! SVN_IS_VALID_REVNUM(expected_revision)))
+ || (expected_path && SVN_IS_VALID_REVNUM(expected_revision)));
+
+ /* CLOSEST_PATH and CLOSEST_ROOT come as a both-or-nothing pair, too. */
+ if (closest_path && (! closest_root))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "got closest path but no closest root");
+ if ((! closest_path) && closest_root)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "got closest root but no closest path");
+
+ /* Now that our pairs are known sane, we can compare them. */
+ if (closest_path && (! expected_path))
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "got closest path ('%s') when none expected",
+ closest_path);
+ if ((! closest_path) && expected_path)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "got no closest path; expected '%s'",
+ expected_path);
+ if (closest_path && (strcmp(closest_path, expected_path) != 0))
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "got a different closest path than expected:\n"
+ " expected: %s\n"
+ " actual: %s",
+ expected_path, closest_path);
+ if (closest_root)
+ closest_rev = svn_fs_revision_root_revision(closest_root);
+ if (closest_rev != expected_revision)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "got a different closest rev than expected:\n"
+ " expected: %ld\n"
+ " actual: %ld",
+ expected_revision, closest_rev);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+closest_copy_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root, *croot;
+ svn_revnum_t after_rev;
+ const char *cpath;
+ apr_pool_t *spool = svn_pool_create(pool);
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-closest-copy",
+ opts, pool));
+
+ /* In first txn, create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Copy A to Z, and commit. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "Z", spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Anything under Z should have a closest copy pair of ("/Z", 2), so
+ we'll pick some spots to test. Stuff under A should have no
+ relevant closest copy. */
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 2, "/Z"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z/D/G", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 2, "/Z"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z/mu", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 2, "/Z"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z/B/E/beta", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 2, "/Z"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "A", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "A/D/G", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "A/mu", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "A/B/E/beta", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+
+ /* Okay, so let's do some more stuff. We'll edit Z/mu, copy A to
+ Z2, copy A/D/H to Z2/D/H2, and edit Z2/D/H2/chi. We'll also make
+ new Z/t and Z2/D/H2/t files. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "Z/mu",
+ "Edited text.", spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "Z2", spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/H", txn_root, "Z2/D/H2", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "Z2/D/H2/chi",
+ "Edited text.", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "Z/t", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "Z2/D/H2/t", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Okay, just for kicks, let's modify Z2/D/H2/t. Shouldn't affect
+ its closest-copy-ness, right? */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "Z2/D/H2/t",
+ "Edited text.", spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Now, we expect Z2/D/H2 to have a closest copy of ("/Z2/D/H2", 3)
+ because of the deepest path rule. We expected Z2/D to have a
+ closest copy of ("/Z2", 3). Z/mu should still have a closest
+ copy of ("/Z", 2). As for the two new files (Z/t and Z2/D/H2/t),
+ neither should have a closest copy. */
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "A/mu", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z/mu", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 2, "/Z"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z2/D/H2", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 3, "/Z2/D/H2"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z2/D", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, 3, "/Z2"));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z/t", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "Z2/D/H2/t", spool));
+ SVN_ERR(test_closest_copy_pair(croot, cpath, SVN_INVALID_REVNUM, NULL));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+root_revisions(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t after_rev, fetched_rev;
+ apr_pool_t *spool = svn_pool_create(pool);
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-root-revisions",
+ opts, pool));
+
+ /* In first txn, create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+
+ /* First, verify that a revision root based on our new revision
+ reports the correct associated revision. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+ fetched_rev = svn_fs_revision_root_revision(rev_root);
+ if (after_rev != fetched_rev)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected revision '%d'; "
+ "got '%d' from svn_fs_revision_root_revision(rev_root)",
+ (int)after_rev, (int)fetched_rev);
+
+ /* Then verify that we can't ask about the txn-base-rev from a
+ revision root. */
+ fetched_rev = svn_fs_txn_root_base_revision(rev_root);
+ if (fetched_rev != SVN_INVALID_REVNUM)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected SVN_INVALID_REVNUM; "
+ "got '%d' from svn_fs_txn_root_base_revision(rev_root)",
+ (int)fetched_rev);
+
+ /* Now, create a second txn based on AFTER_REV. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+
+ /* Verify that it reports the right base revision. */
+ fetched_rev = svn_fs_txn_root_base_revision(txn_root);
+ if (after_rev != fetched_rev)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected '%d'; "
+ "got '%d' from svn_fs_txn_root_base_revision(txn_root)",
+ (int)after_rev, (int)fetched_rev);
+
+ /* Then verify that we can't ask about the rev-root-rev from a
+ txn root. */
+ fetched_rev = svn_fs_revision_root_revision(txn_root);
+ if (fetched_rev != SVN_INVALID_REVNUM)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected SVN_INVALID_REVNUM; "
+ "got '%d' from svn_fs_revision_root_revision(txn_root)",
+ (int)fetched_rev);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unordered_txn_dirprops(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn, *txn2;
+ svn_fs_root_t *txn_root, *txn_root2;
+ svn_string_t pval;
+ svn_revnum_t new_rev, not_rev;
+ svn_boolean_t is_bdb = strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0;
+
+ /* This is a regression test for issue #2751. */
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-unordered-txn-dirprops",
+ opts, pool));
+
+ /* Create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ /* Open two transactions */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn2, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root2, txn2, pool));
+
+ /* Change a child file in one. */
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/A/B/E/alpha",
+ "New contents", pool));
+
+ /* Change dir props in the other. (We're using svn:mergeinfo
+ property just to make sure special handling logic for that
+ property doesn't croak.) */
+ SET_STR(&pval, "/A/C:1");
+ SVN_ERR(svn_fs_change_node_prop(txn_root2, "/A/B", "svn:mergeinfo",
+ &pval, pool));
+
+ /* Commit the second one first. */
+ SVN_ERR(test_commit_txn(&new_rev, txn2, NULL, pool));
+
+ /* Then commit the first -- but expect a conflict due to the
+ propchanges made by the other txn. */
+ SVN_ERR(test_commit_txn(&not_rev, txn, "/A/B", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* Now, let's try those in reverse. Open two transactions */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn2, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root2, txn2, pool));
+
+ /* Change a child file in one. */
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/A/B/E/alpha",
+ "New contents", pool));
+
+ /* Change dir props in the other. */
+ SET_STR(&pval, "/A/C:1");
+ SVN_ERR(svn_fs_change_node_prop(txn_root2, "/A/B", "svn:mergeinfo",
+ &pval, pool));
+
+ /* Commit the first one first. */
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ /* Some backends are cleverer than others. */
+ if (is_bdb)
+ {
+ /* Then commit the second -- but expect an conflict because the
+ directory wasn't up-to-date, which is required for propchanges. */
+ SVN_ERR(test_commit_txn(&not_rev, txn2, "/A/B", pool));
+ SVN_ERR(svn_fs_abort_txn(txn2, pool));
+ }
+ else
+ {
+ /* Then commit the second -- there will be no conflict despite the
+ directory being out-of-data because the properties as well as the
+ directory structure (list of nodes) was up-to-date. */
+ SVN_ERR(test_commit_txn(&not_rev, txn2, NULL, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+set_uuid(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ const char *fixed_uuid = svn_uuid_generate(pool);
+ const char *fetched_uuid;
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-set-uuid",
+ opts, pool));
+
+ /* Set the repository UUID to something fixed. */
+ SVN_ERR(svn_fs_set_uuid(fs, fixed_uuid, pool));
+
+ /* Make sure we get back what we set. */
+ SVN_ERR(svn_fs_get_uuid(fs, &fetched_uuid, pool));
+ if (strcmp(fixed_uuid, fetched_uuid) != 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "expected UUID '%s'; got '%s'",
+ fixed_uuid, fetched_uuid);
+
+ /* Set the repository UUID to something new (and unknown). */
+ SVN_ERR(svn_fs_set_uuid(fs, NULL, pool));
+
+ /* Make sure we *don't* get back what we previously set (after all,
+ this stuff is supposed to be universally unique!). */
+ SVN_ERR(svn_fs_get_uuid(fs, &fetched_uuid, pool));
+ if (strcmp(fixed_uuid, fetched_uuid) == 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected something other than UUID '%s', but got that one",
+ fixed_uuid);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+node_origin_rev(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev = 0;
+ int i;
+
+ struct path_rev_t {
+ const char *path;
+ svn_revnum_t rev;
+ };
+
+ /* Create the repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-origin-rev",
+ opts, pool));
+
+ /* Revision 1: Create the Greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: Modify A/D/H/chi and A/B/E/alpha. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/chi", "2", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha", "2", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 3: Copy A/D to A/D2, and create A/D2/floop new. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D", txn_root, "A/D2", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D2/floop", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 4: Modify A/D/H/chi and A/D2/H/chi. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/chi", "4", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/H/chi", "4", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 5: Delete A/D2/G, add A/B/E/alfalfa. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D2/G", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/B/E/alfalfa", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 6: Restore A/D2/G (from version 4). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, 4, subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D2/G", txn_root, "A/D2/G", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 7: Move A/D2 to A/D (replacing it), Add a new file A/D2,
+ and tweak A/D/floop. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D", subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D2", txn_root, "A/D", subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D2", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D2", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/floop", "7", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Now test some origin revisions. */
+ {
+ struct path_rev_t pathrevs[5] = { { "A/D", 1 },
+ { "A/D/floop", 3 },
+ { "A/D2", 7 },
+ { "iota", 1 },
+ { "A/B/E/alfalfa", 5 } };
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, pool));
+ for (i = 0; i < (sizeof(pathrevs) / sizeof(struct path_rev_t)); i++)
+ {
+ struct path_rev_t path_rev = pathrevs[i];
+ svn_revnum_t revision;
+ SVN_ERR(svn_fs_node_origin_rev(&revision, root, path_rev.path, pool));
+ if (path_rev.rev != revision)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected origin revision of '%ld' for '%s'; got '%ld'",
+ path_rev.rev, path_rev.path, revision);
+ }
+ }
+
+ /* Also, we'll check a couple of queries into a transaction root. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "bloop", subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D/blarp", subpool));
+
+ {
+ struct path_rev_t pathrevs[6] = { { "A/D", 1 },
+ { "A/D/floop", 3 },
+ { "bloop", -1 },
+ { "A/D/blarp", -1 },
+ { "iota", 1 },
+ { "A/B/E/alfalfa", 5 } };
+
+ root = txn_root;
+ for (i = 0; i < (sizeof(pathrevs) / sizeof(struct path_rev_t)); i++)
+ {
+ struct path_rev_t path_rev = pathrevs[i];
+ svn_revnum_t revision;
+ SVN_ERR(svn_fs_node_origin_rev(&revision, root, path_rev.path, pool));
+ if (! SVN_IS_VALID_REVNUM(revision))
+ revision = -1;
+ if (path_rev.rev != revision)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected origin revision of '%ld' for '%s'; got '%ld'",
+ path_rev.rev, path_rev.path, revision);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper: call svn_fs_history_location() and check the results. */
+static svn_error_t *
+check_history_location(const char *expected_path,
+ svn_revnum_t expected_revision,
+ svn_fs_history_t *history,
+ apr_pool_t *pool)
+{
+ const char *actual_path;
+ svn_revnum_t actual_revision;
+
+ SVN_ERR(svn_fs_history_location(&actual_path, &actual_revision,
+ history, pool));
+
+ /* Validate the location against our expectations. */
+ if (actual_revision != expected_revision
+ || (actual_path && expected_path && strcmp(actual_path, expected_path))
+ || (actual_path != NULL) != (expected_path != NULL))
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_fs_history_location() failed:\n"
+ " expected '%s@%ld'\n"
+ " found '%s@%ld",
+ expected_path ? expected_path : "(null)",
+ expected_revision,
+ actual_path ? actual_path : "(null)",
+ actual_revision);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_fs_history_*(). */
+static svn_error_t *
+node_history(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t after_rev;
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-history",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create and verify the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+
+ /* Make some changes, following copy_test() above. */
+
+ /* r2: copy pi to pi2, with textmods. */
+ {
+ svn_fs_root_t *rev_root;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G/pi",
+ txn_root, "A/D/H/pi2",
+ pool));
+ SVN_ERR(svn_test__set_file_contents
+ (txn_root, "A/D/H/pi2", "This is the file 'pi2'.\n", pool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, pool));
+ }
+
+ /* Go back in history: pi2@r2 -> pi@r1 */
+ {
+ svn_fs_history_t *history;
+ svn_fs_root_t *rev_root;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, pool));
+
+ /* Fetch a history object, and walk it until its start. */
+
+ SVN_ERR(svn_fs_node_history(&history, rev_root, "A/D/H/pi2", pool));
+ SVN_ERR(check_history_location("/A/D/H/pi2", 2, history, pool));
+
+ SVN_ERR(svn_fs_history_prev(&history, history, TRUE, pool));
+ SVN_ERR(check_history_location("/A/D/H/pi2", 2, history, pool));
+
+ SVN_ERR(svn_fs_history_prev(&history, history, TRUE, pool));
+ SVN_ERR(check_history_location("/A/D/G/pi", 1, history, pool));
+
+ SVN_ERR(svn_fs_history_prev(&history, history, TRUE, pool));
+ SVN_TEST_ASSERT(history == NULL);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_fs_delete_fs(). */
+static svn_error_t *
+delete_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *path;
+ svn_node_kind_t kind;
+
+ /* We have to use a subpool to close the svn_fs_t before calling
+ svn_fs_delete_fs. See issue 4264. */
+ {
+ svn_fs_t *fs;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-delete-fs", opts, subpool));
+ path = svn_fs_path(fs, pool);
+ svn_pool_destroy(subpool);
+ }
+
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ SVN_TEST_ASSERT(kind != svn_node_none);
+ SVN_ERR(svn_fs_delete_fs(path, pool));
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ SVN_TEST_ASSERT(kind == svn_node_none);
+
+ /* Recreate dir so that test cleanup doesn't fail. */
+ SVN_ERR(svn_io_dir_make(path, APR_OS_DEFAULT, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Issue 4340, "filenames containing \n corrupt FSFS repositories" */
+static svn_error_t *
+filename_trailing_newline(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev = 0;
+ svn_error_t *err;
+
+ /* The FS API wants \n to be permitted, but FSFS never implemented that.
+ * Moreover, formats like svn:mergeinfo and svn:externals don't support
+ * it either. So, we can't have newlines in file names in any FS.
+ */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-filename-trailing-newline",
+ opts, pool));
+
+ /* Revision 1: Add a directory /foo */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/foo", subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Attempt to copy /foo to "/bar\n". This should fail. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ err = svn_fs_copy(root, "/foo", txn_root, "/bar\n", subpool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+
+ /* Attempt to create a file /foo/baz\n. This should fail. */
+ err = svn_fs_make_file(txn_root, "/foo/baz\n", subpool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+
+ /* Attempt to create a directory /foo/bang\n. This should fail. */
+ err = svn_fs_make_dir(txn_root, "/foo/bang\n", subpool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fs_info_format(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ int fs_format;
+ svn_version_t *supports_version;
+ svn_version_t v1_5_0 = {1, 5, 0, ""};
+ svn_version_t v1_10_0 = {1, 10, 0, ""};
+ svn_test_opts_t opts2;
+ svn_boolean_t is_fsx = strcmp(opts->fs_type, "fsx") == 0;
+
+ opts2 = *opts;
+ opts2.server_minor_version = is_fsx ? 10 : 5;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-fs-format-info", &opts2, pool));
+ SVN_ERR(svn_fs_info_format(&fs_format, &supports_version, fs, pool, pool));
+
+ if (is_fsx)
+ {
+ SVN_TEST_ASSERT(fs_format == 2);
+ SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_10_0));
+ }
+ else
+ {
+ /* happens to be the same for FSFS and BDB */
+ SVN_TEST_ASSERT(fs_format == 3);
+ SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_5_0));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Sleeps until apr_time_now() value changes. */
+static void sleep_for_timestamps(void)
+{
+ apr_time_t start = apr_time_now();
+
+ while (start == apr_time_now())
+ {
+ apr_sleep(APR_USEC_PER_SEC / 1000);
+ }
+}
+
+static svn_error_t *
+commit_timestamp(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_string_t *date = svn_string_create("Yesterday", pool);
+ svn_revnum_t rev = 0;
+ apr_hash_t *proplist;
+ svn_string_t *svn_date;
+ svn_string_t *txn_svn_date;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-fs-commit-timestamp",
+ opts, pool));
+
+ /* Commit with a specified svn:date. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/foo", pool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, date, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(svn_date && !strcmp(svn_date->data, date->data));
+
+ /* Commit that overwrites the specified svn:date. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/bar", pool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, date, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(svn_date && strcmp(svn_date->data, date->data));
+
+ /* Commit with a missing svn:date. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/zag", pool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, NULL, pool));
+ SVN_ERR(svn_fs_txn_prop(&svn_date, txn, SVN_PROP_REVISION_DATE, pool));
+ SVN_TEST_ASSERT(!svn_date);
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(!svn_date);
+
+ /* Commit that overwites a missing svn:date. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/zig", pool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, NULL, pool));
+ SVN_ERR(svn_fs_txn_prop(&svn_date, txn, SVN_PROP_REVISION_DATE, pool));
+ SVN_TEST_ASSERT(!svn_date);
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(svn_date);
+
+ /* Commit that doesn't do anything special about svn:date. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/zig/foo", pool));
+ SVN_ERR(svn_fs_txn_prop(&txn_svn_date, txn, SVN_PROP_REVISION_DATE, pool));
+ SVN_TEST_ASSERT(txn_svn_date);
+ sleep_for_timestamps();
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(svn_date);
+ SVN_TEST_ASSERT(!svn_string_compare(svn_date, txn_svn_date));
+
+ /* Commit that instructs the backend to use a specific svn:date, but
+ * doesn't provide one. This used to fail with BDB prior to r1663697. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/zig/bar", pool));
+ SVN_ERR(svn_fs_txn_prop(&txn_svn_date, txn, SVN_PROP_REVISION_DATE, pool));
+ SVN_TEST_ASSERT(txn_svn_date);
+ sleep_for_timestamps();
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool));
+ svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE,
+ APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(svn_date);
+ SVN_TEST_ASSERT(!svn_string_compare(svn_date, txn_svn_date));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_compat_version(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_version_t *compatible_version;
+ apr_hash_t *config = apr_hash_make(pool);
+
+ svn_version_t vcurrent = {SVN_VER_MAJOR, SVN_VER_MINOR, 0, ""};
+ svn_version_t v1_2_0 = {1, 2, 0, ""};
+ svn_version_t v1_3_0 = {1, 3, 0, ""};
+ svn_version_t v1_5_0 = {1, 5, 0, ""};
+
+ /* no version specified -> default to the current one */
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &vcurrent));
+
+ /* test specific compat option */
+ svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_6_COMPATIBLE, "1");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_5_0));
+
+ /* test precedence amongst compat options */
+ svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_8_COMPATIBLE, "1");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_5_0));
+
+ svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_4_COMPATIBLE, "1");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_3_0));
+
+ /* precedence should work with the generic option as well */
+ svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.4.17-??");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_3_0));
+
+ svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.2.3-no!");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_2_0));
+
+ /* test generic option alone */
+ config = apr_hash_make(pool);
+ svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.2.3-no!");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_2_0));
+
+ /* out of range values should be caped by the current tool version */
+ svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "2.3.4-x");
+ SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool));
+ SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &vcurrent));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+dir_prop_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t head_rev;
+ svn_fs_root_t *root;
+ svn_fs_txn_t *txn, *mid_txn, *top_txn, *sub_txn, *c_txn;
+ svn_boolean_t is_bdb = strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0;
+
+ /* Create test repository. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-fs-dir_prop-merge", opts,
+ pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+
+ /* Create and verify the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(root, pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Start concurrent transactions */
+
+ /* 1st: modify a mid-level directory */
+ SVN_ERR(svn_fs_begin_txn2(&mid_txn, fs, head_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, mid_txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "A/D", "test-prop",
+ svn_string_create("val1", pool), pool));
+ svn_fs_close_root(root);
+
+ /* 2st: modify a top-level directory */
+ SVN_ERR(svn_fs_begin_txn2(&top_txn, fs, head_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, top_txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "A", "test-prop",
+ svn_string_create("val2", pool), pool));
+ svn_fs_close_root(root);
+
+ SVN_ERR(svn_fs_begin_txn2(&sub_txn, fs, head_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, sub_txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "A/D/G", "test-prop",
+ svn_string_create("val3", pool), pool));
+ svn_fs_close_root(root);
+
+ /* 3rd: modify a conflicting change to the mid-level directory */
+ SVN_ERR(svn_fs_begin_txn2(&c_txn, fs, head_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, c_txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "A/D", "test-prop",
+ svn_string_create("valX", pool), pool));
+ svn_fs_close_root(root);
+
+ /* Prop changes to the same node should conflict */
+ SVN_ERR(test_commit_txn(&head_rev, mid_txn, NULL, pool));
+ SVN_ERR(test_commit_txn(&head_rev, c_txn, "/A/D", pool));
+ SVN_ERR(svn_fs_abort_txn(c_txn, pool));
+
+ /* Changes in a sub-tree should not conflict with prop changes to some
+ parent directory but some backends are cleverer than others. */
+ if (is_bdb)
+ {
+ SVN_ERR(test_commit_txn(&head_rev, top_txn, "/A", pool));
+ SVN_ERR(svn_fs_abort_txn(top_txn, pool));
+ }
+ else
+ {
+ SVN_ERR(test_commit_txn(&head_rev, top_txn, NULL, pool));
+ }
+
+ /* The inverted case is not that trivial to handle. Hence, conflict.
+ Depending on the checking order, the reported conflict path differs. */
+ SVN_ERR(test_commit_txn(&head_rev, sub_txn, is_bdb ? "/A/D" : "/A", pool));
+ SVN_ERR(svn_fs_abort_txn(sub_txn, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+upgrade_while_committing(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t head_rev = 0;
+ svn_fs_root_t *root;
+ svn_fs_txn_t *txn1, *txn2;
+ const char *fs_path;
+ apr_hash_t *fs_config = apr_hash_make(pool);
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 6))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.6 SVN doesn't support FSFS packing");
+
+ /* Create test repository with greek tree. */
+ fs_path = "test-repo-upgrade-while-committing";
+
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.7");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE, "2");
+ SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, fs_config, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn1, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn1, pool));
+ SVN_ERR(svn_test__create_greek_tree(root, pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn1, NULL, pool));
+
+ /* Create txn with changes. */
+ SVN_ERR(svn_fs_begin_txn(&txn1, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn1, pool));
+ SVN_ERR(svn_fs_make_dir(root, "/foo", pool));
+
+ /* Upgrade filesystem, but keep existing svn_fs_t object. */
+ SVN_ERR(svn_fs_upgrade(fs_path, pool));
+
+ /* Creating a new txn for the old svn_fs_t should not fail. */
+ SVN_ERR(svn_fs_begin_txn(&txn2, fs, head_rev, pool));
+
+ /* Committing the already existing txn should not fail. */
+ SVN_ERR(test_commit_txn(&head_rev, txn1, NULL, pool));
+
+ /* Verify filesystem content. */
+ SVN_ERR(svn_fs_verify(fs_path, NULL, 0, SVN_INVALID_REVNUM, NULL, NULL,
+ NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Utility method for test_paths_changed. Verify that REV in FS changes
+ * exactly one path and that that change is a property change. Expect
+ * the MERGEINFO_MOD flag of the change to have the given value.
+ */
+static svn_error_t *
+verify_root_prop_change(svn_fs_t *fs,
+ svn_revnum_t rev,
+ svn_tristate_t mergeinfo_mod,
+ apr_pool_t *pool)
+{
+ svn_fs_path_change2_t *change;
+ svn_fs_root_t *root;
+ apr_hash_t *changes;
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+ SVN_TEST_ASSERT(apr_hash_count(changes) == 1);
+ change = svn_hash_gets(changes, "/");
+
+ SVN_TEST_ASSERT(change->node_rev_id);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_modify);
+ SVN_TEST_ASSERT( change->node_kind == svn_node_dir
+ || change->node_kind == svn_node_unknown);
+ SVN_TEST_ASSERT(change->text_mod == FALSE);
+ SVN_TEST_ASSERT(change->prop_mod == TRUE);
+
+ if (change->copyfrom_known)
+ {
+ SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM);
+ SVN_TEST_ASSERT(change->copyfrom_path == NULL);
+ }
+
+ SVN_TEST_ASSERT(change->mergeinfo_mod == mergeinfo_mod);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_paths_changed(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t head_rev = 0;
+ svn_fs_root_t *root;
+ svn_fs_txn_t *txn;
+ const char *fs_path;
+ apr_hash_t *changes;
+ svn_boolean_t has_mergeinfo_mod = FALSE;
+ apr_hash_index_t *hi;
+ int i;
+
+ /* The "mergeinfo_mod flag will say "unknown" until recently. */
+ if ( strcmp(opts->fs_type, SVN_FS_TYPE_BDB) != 0
+ && (!opts->server_minor_version || (opts->server_minor_version >= 9)))
+ has_mergeinfo_mod = TRUE;
+
+ /* Create test repository with greek tree. */
+ fs_path = "test-repo-paths-changed";
+
+ SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, NULL, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(root, pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Create txns with various prop changes. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "/", "propname",
+ svn_string_create("propval", pool), pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "/", "svn:mergeinfo",
+ svn_string_create("/: 1\n", pool), pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Verify changed path lists. */
+
+ /* Greek tree creation rev. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, head_rev - 2, pool));
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+
+ /* Reports all paths? */
+ for (i = 0; svn_test__greek_tree_nodes[i].path; ++i)
+ {
+ const char *path
+ = svn_fspath__canonicalize(svn_test__greek_tree_nodes[i].path, pool);
+
+ SVN_TEST_ASSERT(svn_hash_gets(changes, path));
+ }
+
+ SVN_TEST_ASSERT(apr_hash_count(changes) == i);
+
+ /* Verify per-path info. */
+ for (hi = apr_hash_first(pool, changes); hi; hi = apr_hash_next(hi))
+ {
+ svn_fs_path_change2_t *change = apr_hash_this_val(hi);
+
+ SVN_TEST_ASSERT(change->node_rev_id);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_add);
+ SVN_TEST_ASSERT( change->node_kind == svn_node_file
+ || change->node_kind == svn_node_dir
+ || change->node_kind == svn_node_unknown);
+
+ if (change->node_kind != svn_node_unknown)
+ SVN_TEST_ASSERT(change->text_mod == ( change->node_kind
+ == svn_node_file));
+
+ SVN_TEST_ASSERT(change->prop_mod == FALSE);
+
+ if (change->copyfrom_known)
+ {
+ SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM);
+ SVN_TEST_ASSERT(change->copyfrom_path == NULL);
+ }
+
+ if (has_mergeinfo_mod)
+ SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_false);
+ else
+ SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_unknown);
+ }
+
+ /* Propset rev. */
+ SVN_ERR(verify_root_prop_change(fs, head_rev - 1,
+ has_mergeinfo_mod ? svn_tristate_false
+ : svn_tristate_unknown,
+ pool));
+
+ /* Mergeinfo set rev. */
+ SVN_ERR(verify_root_prop_change(fs, head_rev,
+ has_mergeinfo_mod ? svn_tristate_true
+ : svn_tristate_unknown,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_delete_replaced_paths_changed(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t head_rev = 0;
+ svn_fs_root_t *root;
+ svn_fs_txn_t *txn;
+ const char *fs_path;
+ apr_hash_t *changes;
+ svn_fs_path_change2_t *change;
+ const svn_fs_id_t *file_id;
+
+ /* Create test repository with greek tree. */
+ fs_path = "test-repo-delete-replace-paths-changed";
+
+ SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, NULL, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(root, pool));
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* Create that replaces a file with a folder and then deletes that
+ * replacement. Start with the deletion. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_delete(root, "/iota", pool));
+
+ /* The change list should now report a deleted file. */
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+ change = svn_hash_gets(changes, "/iota");
+ file_id = change->node_rev_id;
+ SVN_TEST_ASSERT( change->node_kind == svn_node_file
+ || change->node_kind == svn_node_unknown);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete);
+
+ /* Add a replacement. */
+ SVN_ERR(svn_fs_make_dir(root, "/iota", pool));
+
+ /* The change list now reports a replacement by a directory. */
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+ change = svn_hash_gets(changes, "/iota");
+ SVN_TEST_ASSERT( change->node_kind == svn_node_dir
+ || change->node_kind == svn_node_unknown);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_replace);
+ SVN_TEST_ASSERT(svn_fs_compare_ids(change->node_rev_id, file_id) != 0);
+
+ /* Delete the replacement again. */
+ SVN_ERR(svn_fs_delete(root, "/iota", pool));
+
+ /* The change list should now be reported as a deleted file again. */
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+ change = svn_hash_gets(changes, "/iota");
+ SVN_TEST_ASSERT( change->node_kind == svn_node_file
+ || change->node_kind == svn_node_unknown);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete);
+ SVN_TEST_ASSERT(svn_fs_compare_ids(change->node_rev_id, file_id) == 0);
+
+ /* Finally, commit the change. */
+ SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool));
+
+ /* The committed revision should still report the same change. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, head_rev, pool));
+ SVN_ERR(svn_fs_paths_changed2(&changes, root, pool));
+ change = svn_hash_gets(changes, "/iota");
+ SVN_TEST_ASSERT( change->node_kind == svn_node_file
+ || change->node_kind == svn_node_unknown);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete);
+
+ return SVN_NO_ERROR;
+}
+
+/* Get rid of transaction NAME in FS. This function deals with backend-
+ * specific behavior as permitted by the API. */
+static svn_error_t *
+cleanup_txn(svn_fs_t *fs,
+ const char *name,
+ apr_pool_t *scratch_pool)
+{
+ /* Get rid of the txns one at a time. */
+ svn_error_t *err = svn_fs_purge_txn(fs, name, scratch_pool);
+
+ /* Some backends (BDB) don't support purging transactions that have never
+ * seen an abort or commit attempt. Simply abort those txns. */
+ if (err && err->apr_err == SVN_ERR_FS_TRANSACTION_NOT_DEAD)
+ {
+ svn_fs_txn_t *txn;
+ svn_error_clear(err);
+ err = SVN_NO_ERROR;
+
+ SVN_ERR(svn_fs_open_txn(&txn, fs, name, scratch_pool));
+ SVN_ERR(svn_fs_abort_txn(txn, scratch_pool));
+
+ /* Should be gone now ... */
+ SVN_TEST_ASSERT_ERROR(svn_fs_open_txn(&txn, fs, name, scratch_pool),
+ SVN_ERR_FS_NO_SUCH_TRANSACTION);
+ }
+
+ return svn_error_trace(err);
+}
+
+/* Make sure we get txn lists correctly. */
+static svn_error_t *
+purge_txn_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ const char *name1, *name2;
+ apr_array_header_t *txn_list;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-purge-txn",
+ opts, pool));
+
+ /* Begin a new transaction, get its name (in the top pool), close it. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_name(&name1, txn, pool));
+
+ /* Begin *another* transaction, get its name (in the top pool), close it. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_name(&name2, txn, pool));
+ svn_pool_clear(subpool);
+
+ /* Get rid of the txns one at a time. */
+ SVN_ERR(cleanup_txn(fs, name1, pool));
+
+ /* There should be exactly one left. */
+ SVN_ERR(svn_fs_list_transactions(&txn_list, fs, pool));
+
+ /* Check the list. It should have *exactly* one entry. */
+ SVN_TEST_ASSERT( txn_list->nelts == 1
+ && !strcmp(name2, APR_ARRAY_IDX(txn_list, 0, const char *)));
+
+ /* Get rid of the other txn as well. */
+ SVN_ERR(cleanup_txn(fs, name2, pool));
+
+ /* There should be exactly one left. */
+ SVN_ERR(svn_fs_list_transactions(&txn_list, fs, pool));
+
+ /* Check the list. It should have no entries. */
+ SVN_TEST_ASSERT(txn_list->nelts == 0);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_fs_{contents,props}_{different,changed}().
+ * ### This currently only tests them on revision roots, not on txn roots.
+ */
+static svn_error_t *
+compare_contents(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root1, *root2;
+ const char *original = "original contents";
+ svn_revnum_t rev;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ svn_boolean_t changed;
+
+ /* Two similar but different texts that yield the same MD5 digest. */
+ const char *evil_text1
+ = "\xd1\x31\xdd\x02\xc5\xe6\xee\xc4\x69\x3d\x9a\x06\x98\xaf\xf9\x5c"
+ "\x2f\xca\xb5\x87\x12\x46\x7e\xab\x40\x04\x58\x3e\xb8\xfb\x7f\x89"
+ "\x55\xad\x34\x06\x09\xf4\xb3\x02\x83\xe4\x88\x83\x25\x71\x41\x5a"
+ "\x08\x51\x25\xe8\xf7\xcd\xc9\x9f\xd9\x1d\xbd\xf2\x80\x37\x3c\x5b"
+ "\xd8\x82\x3e\x31\x56\x34\x8f\x5b\xae\x6d\xac\xd4\x36\xc9\x19\xc6"
+ "\xdd\x53\xe2\xb4\x87\xda\x03\xfd\x02\x39\x63\x06\xd2\x48\xcd\xa0"
+ "\xe9\x9f\x33\x42\x0f\x57\x7e\xe8\xce\x54\xb6\x70\x80\xa8\x0d\x1e"
+ "\xc6\x98\x21\xbc\xb6\xa8\x83\x93\x96\xf9\x65\x2b\x6f\xf7\x2a\x70";
+ const char *evil_text2
+ = "\xd1\x31\xdd\x02\xc5\xe6\xee\xc4\x69\x3d\x9a\x06\x98\xaf\xf9\x5c"
+ "\x2f\xca\xb5\x07\x12\x46\x7e\xab\x40\x04\x58\x3e\xb8\xfb\x7f\x89"
+ "\x55\xad\x34\x06\x09\xf4\xb3\x02\x83\xe4\x88\x83\x25\xf1\x41\x5a"
+ "\x08\x51\x25\xe8\xf7\xcd\xc9\x9f\xd9\x1d\xbd\x72\x80\x37\x3c\x5b"
+ "\xd8\x82\x3e\x31\x56\x34\x8f\x5b\xae\x6d\xac\xd4\x36\xc9\x19\xc6"
+ "\xdd\x53\xe2\x34\x87\xda\x03\xfd\x02\x39\x63\x06\xd2\x48\xcd\xa0"
+ "\xe9\x9f\x33\x42\x0f\x57\x7e\xe8\xce\x54\xb6\x70\x80\x28\x0d\x1e"
+ "\xc6\x98\x21\xbc\xb6\xa8\x83\x93\x96\xf9\x65\xab\x6f\xf7\x2a\x70";
+ svn_checksum_t *checksum1, *checksum2;
+
+ /* (path, rev) pairs to compare plus the expected API return values */
+ struct
+ {
+ svn_revnum_t rev1;
+ const char *path1;
+ svn_revnum_t rev2;
+ const char *path2;
+
+ svn_boolean_t different; /* result of svn_fs_*_different */
+ svn_tristate_t changed; /* result of svn_fs_*_changed */
+ } to_compare[] =
+ {
+ /* same representation */
+ { 1, "foo", 2, "foo", FALSE, svn_tristate_false },
+ { 1, "foo", 2, "bar", FALSE, svn_tristate_false },
+ { 2, "foo", 2, "bar", FALSE, svn_tristate_false },
+
+ /* different content but MD5 check is not reliable */
+ { 3, "foo", 3, "bar", TRUE, svn_tristate_true },
+
+ /* different contents */
+ { 1, "foo", 3, "bar", TRUE, svn_tristate_true },
+ { 1, "foo", 3, "foo", TRUE, svn_tristate_true },
+ { 3, "foo", 4, "bar", TRUE, svn_tristate_true },
+ { 3, "foo", 4, "bar", TRUE, svn_tristate_true },
+ { 2, "bar", 3, "bar", TRUE, svn_tristate_true },
+ { 3, "bar", 4, "bar", TRUE, svn_tristate_true },
+
+ /* variations on the same theme: same content, possibly different rep */
+ { 4, "foo", 4, "bar", FALSE, svn_tristate_unknown },
+ { 1, "foo", 4, "bar", FALSE, svn_tristate_unknown },
+ { 2, "foo", 4, "bar", FALSE, svn_tristate_unknown },
+ { 1, "foo", 4, "foo", FALSE, svn_tristate_unknown },
+ { 2, "foo", 4, "foo", FALSE, svn_tristate_unknown },
+ { 2, "bar", 4, "bar", FALSE, svn_tristate_unknown },
+
+ /* EOL */
+ { 0 },
+ };
+
+ /* Same same, but different.
+ * Just checking that we actually have an MD5 collision. */
+ SVN_ERR(svn_checksum(&checksum1, svn_checksum_md5, evil_text1,
+ strlen(evil_text1), pool));
+ SVN_ERR(svn_checksum(&checksum2, svn_checksum_md5, evil_text2,
+ strlen(evil_text2), pool));
+ SVN_TEST_ASSERT(svn_checksum_match(checksum1, checksum1));
+ SVN_TEST_ASSERT(strcmp(evil_text1, evil_text2));
+
+ /* Now, build up our test repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-compare-contents",
+ opts, pool));
+
+ /* Rev 1: create a file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "foo", iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", original, iterpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop",
+ svn_string_create(original, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ SVN_TEST_ASSERT(rev == 1);
+ svn_pool_clear(iterpool);
+
+ /* Rev 2: copy that file. */
+ SVN_ERR(svn_fs_revision_root(&root1, fs, rev, iterpool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_fs_copy(root1, "foo", txn_root, "bar", iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ SVN_TEST_ASSERT(rev == 2);
+ svn_pool_clear(iterpool);
+
+ /* Rev 3: modify both files.
+ * The new contents differs for both files but has the same length and MD5.
+ */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", evil_text1, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "bar", evil_text2, iterpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop",
+ svn_string_create(evil_text1, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "bar", "prop",
+ svn_string_create(evil_text2, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ SVN_TEST_ASSERT(rev == 3);
+ svn_pool_clear(iterpool);
+
+ /* Rev 4: revert both file contents.
+ */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", original, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "bar", original, iterpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop",
+ svn_string_create(original, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "bar", "prop",
+ svn_string_create(original, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ SVN_TEST_ASSERT(rev == 4);
+ svn_pool_clear(iterpool);
+
+ /* Perform all comparisons listed in TO_COMPARE. */
+ for (i = 0; to_compare[i].rev1 > 0; ++i)
+ {
+ svn_boolean_t text_different;
+ svn_boolean_t text_changed;
+ svn_boolean_t props_different;
+ svn_boolean_t props_changed;
+
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_fs_revision_root(&root1, fs, to_compare[i].rev1, iterpool));
+ SVN_ERR(svn_fs_revision_root(&root2, fs, to_compare[i].rev2, iterpool));
+
+ /* Compare node texts. */
+ SVN_ERR(svn_fs_contents_different(&text_different,
+ root1, to_compare[i].path1,
+ root2, to_compare[i].path2,
+ iterpool));
+ SVN_ERR(svn_fs_contents_changed(&text_changed,
+ root1, to_compare[i].path1,
+ root2, to_compare[i].path2,
+ iterpool));
+
+ /* Compare properties. */
+ SVN_ERR(svn_fs_props_different(&props_different,
+ root1, to_compare[i].path1,
+ root2, to_compare[i].path2,
+ iterpool));
+ SVN_ERR(svn_fs_props_changed(&props_changed,
+ root1, to_compare[i].path1,
+ root2, to_compare[i].path2,
+ iterpool));
+
+ /* Check results. */
+ SVN_TEST_ASSERT(text_different == to_compare[i].different);
+ SVN_TEST_ASSERT(props_different == to_compare[i].different);
+
+ switch (to_compare[i].changed)
+ {
+ case svn_tristate_true:
+ SVN_TEST_ASSERT(text_changed);
+ SVN_TEST_ASSERT(props_changed);
+ break;
+
+ case svn_tristate_false:
+ SVN_TEST_ASSERT(!text_changed);
+ SVN_TEST_ASSERT(!props_changed);
+ break;
+
+ default:
+ break;
+ }
+ }
+
+ /* Check how svn_fs_contents_different() and svn_fs_contents_changed()
+ handles invalid path.*/
+ SVN_ERR(svn_fs_revision_root(&root1, fs, 1, iterpool));
+ SVN_TEST_ASSERT_ANY_ERROR(
+ svn_fs_contents_changed(&changed, root1, "/", root1, "/", iterpool));
+ SVN_TEST_ASSERT_ANY_ERROR(
+ svn_fs_contents_different(&changed, root1, "/", root1, "/", iterpool));
+
+ SVN_TEST_ASSERT_ANY_ERROR(
+ svn_fs_contents_changed(&changed, root1, "/non-existent", root1,
+ "/non-existent", iterpool));
+ SVN_TEST_ASSERT_ANY_ERROR(
+ svn_fs_contents_different(&changed, root1, "/non-existent", root1,
+ "/non-existent", iterpool));
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_change_create(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_root_t *root;
+ const svn_fs_id_t *id;
+ svn_fs_path_change2_t *change;
+
+ /* Build an empty test repo ... */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-path-change-create",
+ opts, pool));
+
+ /* ... just to give us a valid ID. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool));
+ SVN_ERR(svn_fs_node_id(&id, root, "", pool));
+
+ /* Do what we came here for. */
+ change = svn_fs_path_change2_create(id, svn_fs_path_change_replace, pool);
+
+ SVN_TEST_ASSERT(change);
+ SVN_TEST_ASSERT(change->node_rev_id == id);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_replace);
+
+ /* All other fields should be "empty" / "unused". */
+ SVN_TEST_ASSERT(change->node_kind == svn_node_none);
+
+ SVN_TEST_ASSERT(change->text_mod == FALSE);
+ SVN_TEST_ASSERT(change->prop_mod == FALSE);
+ SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_unknown);
+
+ SVN_TEST_ASSERT(change->copyfrom_known == FALSE);
+ SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM);
+ SVN_TEST_ASSERT(change->copyfrom_path == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_node_created_info(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t rev;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Test vectors. */
+ struct
+ {
+ svn_revnum_t rev;
+ const char *path;
+ svn_revnum_t crev;
+ const char *cpath;
+ } to_check[] =
+ {
+ /* New noderev only upon modification. */
+ { 1, "A/B/E/beta", 1, "/A/B/E/beta" },
+ { 2, "A/B/E/beta", 1, "/A/B/E/beta" },
+ { 3, "A/B/E/beta", 3, "/A/B/E/beta" },
+ { 4, "A/B/E/beta", 3, "/A/B/E/beta" },
+
+ /* Lazily copied node. */
+ { 2, "Z/B/E/beta", 1, "/A/B/E/beta" },
+ { 3, "Z/B/E/beta", 1, "/A/B/E/beta" },
+ { 4, "Z/B/E/beta", 4, "/Z/B/E/beta" },
+
+ /* Bubble-up upon sub-tree change. */
+ { 2, "Z", 2, "/Z" },
+ { 3, "Z", 2, "/Z" },
+ { 4, "Z", 4, "/Z" },
+
+ { 0 }
+ };
+
+ /* Start with a new repo and the greek tree in rev 1. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-created-path",
+ opts, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, iterpool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool));
+ svn_pool_clear(iterpool);
+
+ /* r2: copy a subtree */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, iterpool));
+ SVN_ERR(svn_fs_copy(root, "A", txn_root, "Z", iterpool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool));
+ svn_pool_clear(iterpool);
+
+ /* r3: touch node in copy source */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/beta", "new", iterpool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool));
+ svn_pool_clear(iterpool);
+
+ /* r4: touch same relative node in copy target */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "Z/B/E/beta", "new", iterpool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool));
+ svn_pool_clear(iterpool);
+
+ /* Now ask for some 'node created' info. */
+ for (i = 0; to_check[i].rev > 0; ++i)
+ {
+ svn_revnum_t crev;
+ const char *cpath;
+
+ svn_pool_clear(iterpool);
+
+ /* Get created path and rev. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, to_check[i].rev, iterpool));
+ SVN_ERR(svn_fs_node_created_path(&cpath, root, to_check[i].path,
+ iterpool));
+ SVN_ERR(svn_fs_node_created_rev(&crev, root, to_check[i].path,
+ iterpool));
+
+ /* Compare the results with our expectations. */
+ SVN_TEST_STRING_ASSERT(cpath, to_check[i].cpath);
+
+ if (crev != to_check[i].crev)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "created rev mismatch for %s@%ld:\n"
+ " expected '%ld'\n"
+ " found '%ld",
+ to_check[i].path,
+ to_check[i].rev,
+ to_check[i].crev,
+ crev);
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_print_modules(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *expected, *module_name;
+ svn_stringbuf_t *modules = svn_stringbuf_create_empty(pool);
+
+ /* Name of the providing module */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_FSX) == 0)
+ module_name = "fs_x";
+ else if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) == 0)
+ module_name = "fs_fs";
+ else if (strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0)
+ module_name = "fs_base";
+ else
+ return svn_error_createf(SVN_ERR_TEST_SKIPPED, NULL,
+ "don't know the module name for %s",
+ opts->fs_type);
+
+ SVN_ERR(svn_fs_print_modules(modules, pool));
+
+ /* The requested FS type must be listed amongst the available modules. */
+ expected = apr_psprintf(pool, "* %s : ", module_name);
+ SVN_TEST_ASSERT(strstr(modules->data, expected));
+
+ return SVN_NO_ERROR;
+}
+
+/* Baton to be used with process_file_contents. */
+typedef struct process_file_contents_baton_t
+{
+ const char *contents;
+ svn_boolean_t processed;
+} process_file_contents_baton_t;
+
+/* Implements svn_fs_process_contents_func_t.
+ * We flag the BATON as "processed" and compare the CONTENTS we've got to
+ * what we expect through the BATON.
+ */
+static svn_error_t *
+process_file_contents(const unsigned char *contents,
+ apr_size_t len,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ process_file_contents_baton_t *b = baton;
+
+ SVN_TEST_ASSERT(strlen(b->contents) == len);
+ SVN_TEST_ASSERT(memcmp(b->contents, contents, len) == 0);
+ b->processed = TRUE;
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_zero_copy_processsing(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t rev;
+ const struct svn_test__tree_entry_t *node;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Start with a new repo and the greek tree in rev 1. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-zero-copy-processing",
+ opts, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, iterpool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool));
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+
+ /* Prime the full-text cache by reading all file contents. */
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ if (node->contents)
+ {
+ svn_stream_t *stream;
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_file_contents(&stream, root, node->path, iterpool));
+ SVN_ERR(svn_stream_copy3(stream, svn_stream_buffered(iterpool),
+ NULL, NULL, iterpool));
+ }
+
+ /* Now, try to get the data directly from cache
+ * (if the backend has caches). */
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ if (node->contents)
+ {
+ svn_boolean_t success;
+
+ process_file_contents_baton_t baton;
+ baton.contents = node->contents;
+ baton.processed = FALSE;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_try_process_file_contents(&success, root, node->path,
+ process_file_contents, &baton,
+ iterpool));
+ SVN_TEST_ASSERT(success == baton.processed);
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dir_optimal_order(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t rev;
+ apr_hash_t *unordered;
+ apr_array_header_t *ordered;
+ int i;
+ apr_hash_index_t *hi;
+
+ /* Create a new repo and the greek tree in rev 1. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-dir-optimal-order",
+ opts, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, pool));
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+
+ /* Call the API function we are interested in. */
+ SVN_ERR(svn_fs_dir_entries(&unordered, root, "A", pool));
+ SVN_ERR(svn_fs_dir_optimal_order(&ordered, root, unordered, pool, pool));
+
+ /* Verify that all entries are returned. */
+ SVN_TEST_ASSERT(ordered->nelts == apr_hash_count(unordered));
+ for (hi = apr_hash_first(pool, unordered); hi; hi = apr_hash_next(hi))
+ {
+ svn_boolean_t found = FALSE;
+ const char *name = apr_hash_this_key(hi);
+
+ /* Compare hash -> array because the array might contain the same
+ * entry more than once. Since that can't happen in the hash, doing
+ * it in this direction ensures ORDERED won't contain duplicates.
+ */
+ for (i = 0; !found && i < ordered->nelts; ++i)
+ {
+ svn_fs_dirent_t *item = APR_ARRAY_IDX(ordered, i, svn_fs_dirent_t*);
+ if (strcmp(item->name, name) == 0)
+ {
+ found = TRUE;
+ SVN_TEST_ASSERT(item == apr_hash_this_val(hi));
+ }
+ }
+
+ SVN_TEST_ASSERT(found);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_config_files(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ apr_array_header_t *files;
+ int i;
+ const char *repo_name = "test-repo-config-files";
+
+ /* Create a empty and get its config files. */
+ SVN_ERR(svn_test__create_fs(&fs, repo_name, opts, pool));
+ SVN_ERR(svn_fs_info_config_files(&files, fs, pool, pool));
+
+ /* All files should exist and be below the repo. */
+ for (i = 0; i < files->nelts; ++i)
+ {
+ svn_node_kind_t kind;
+ const char *path = APR_ARRAY_IDX(files, i, const char*);
+
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+
+ SVN_TEST_ASSERT(kind == svn_node_file);
+ SVN_TEST_ASSERT(svn_dirent_is_ancestor(repo_name, path));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_delta_file_stream(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root1, *root2;
+ svn_revnum_t rev;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ const char *old_content = "some content";
+ const char *new_content = "some more content";
+ svn_txdelta_window_handler_t delta_handler;
+ void *delta_baton;
+ svn_txdelta_stream_t *delta_stream;
+ svn_stringbuf_t *source = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *dest = svn_stringbuf_create_empty(pool);
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-delta-file-stream",
+ opts, pool));
+
+ /* Revision 1: create a file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "foo", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", old_content, pool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, pool));
+
+ /* Revision 2: create a file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", new_content, pool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, pool));
+
+ SVN_ERR(svn_fs_revision_root(&root1, fs, 1, pool));
+ SVN_ERR(svn_fs_revision_root(&root2, fs, 2, pool));
+
+ /* Test 1: Get delta against empty target. */
+ SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream,
+ NULL, NULL, root1, "foo", subpool));
+
+ svn_stringbuf_setempty(source);
+ svn_stringbuf_setempty(dest);
+
+ svn_txdelta_apply(svn_stream_from_stringbuf(source, subpool),
+ svn_stream_from_stringbuf(dest, subpool),
+ NULL, NULL, subpool, &delta_handler, &delta_baton);
+ SVN_ERR(svn_txdelta_send_txstream(delta_stream,
+ delta_handler,
+ delta_baton,
+ subpool));
+ SVN_TEST_STRING_ASSERT(old_content, dest->data);
+ svn_pool_clear(subpool);
+
+ /* Test 2: Get delta against previous version. */
+ SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream,
+ root1, "foo", root2, "foo", subpool));
+
+ svn_stringbuf_set(source, old_content);
+ svn_stringbuf_setempty(dest);
+
+ svn_txdelta_apply(svn_stream_from_stringbuf(source, subpool),
+ svn_stream_from_stringbuf(dest, subpool),
+ NULL, NULL, subpool, &delta_handler, &delta_baton);
+ SVN_ERR(svn_txdelta_send_txstream(delta_stream,
+ delta_handler,
+ delta_baton,
+ subpool));
+ SVN_TEST_STRING_ASSERT(new_content, dest->data);
+ svn_pool_clear(subpool);
+
+ /* Test 3: Get reverse delta. */
+ SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream,
+ root2, "foo", root1, "foo", subpool));
+
+ svn_stringbuf_set(source, new_content);
+ svn_stringbuf_setempty(dest);
+
+ svn_txdelta_apply(svn_stream_from_stringbuf(source, subpool),
+ svn_stream_from_stringbuf(dest, subpool),
+ NULL, NULL, subpool, &delta_handler, &delta_baton);
+ SVN_ERR(svn_txdelta_send_txstream(delta_stream,
+ delta_handler,
+ delta_baton,
+ subpool));
+ SVN_TEST_STRING_ASSERT(old_content, dest->data);
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fs_merge(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root0, *root1;
+ svn_revnum_t rev;
+
+ /* Very basic test for svn_fs_merge because all the other interesting
+ * cases get tested implicitly with concurrent txn / commit tests.
+ * This API is just a thin layer around the internal merge function
+ * and we simply check that the plumbing between them works.
+ */
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-fs-merge",
+ opts, pool));
+ SVN_ERR(svn_fs_revision_root(&root0, fs, 0, pool));
+
+ /* Revision 1: create a file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "foo", pool));
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, pool));
+ SVN_ERR(svn_fs_revision_root(&root1, fs, rev, pool));
+
+ /* Merge-able txn: create another file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "bar", pool));
+
+ SVN_ERR(svn_fs_merge(NULL, root1, "/", txn_root, "/", root0, "/", pool));
+
+ /* Not merge-able: create the same file file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "foo", pool));
+
+ SVN_TEST_ASSERT_ERROR(svn_fs_merge(NULL, root1, "/", txn_root, "/", root0,
+ "/", pool), SVN_ERR_FS_CONFLICT);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fsfs_config_opts(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_hash_t *fs_config;
+ svn_fs_t *fs;
+ const svn_fs_info_placeholder_t *fs_info;
+ const svn_fs_fsfs_info_t *fsfs_info;
+ const char *dir_name = "test-repo-fsfs-config-opts";
+ const char *repo_name_default = "test-repo-fsfs-config-opts/default";
+ const char *repo_name_custom = "test-repo-fsfs-config-opts/custom";
+
+ /* Bail (with SKIP) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ /* Remove the test directory from previous runs. */
+ SVN_ERR(svn_io_remove_dir2(dir_name, TRUE, NULL, NULL, pool));
+
+ /* Create the test directory and add it to the test cleanup list. */
+ SVN_ERR(svn_io_dir_make(dir_name, APR_OS_DEFAULT, pool));
+ svn_test_add_dir_cleanup(dir_name);
+
+ /* Create an FSFS filesystem with default config.*/
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, SVN_FS_TYPE_FSFS);
+ SVN_ERR(svn_fs_create(&fs, repo_name_default, fs_config, pool));
+
+ /* Re-open FS to test the data on disk. */
+ SVN_ERR(svn_fs_open2(&fs, repo_name_default, NULL, pool, pool));
+
+ SVN_ERR(svn_fs_info(&fs_info, fs, pool, pool));
+ SVN_TEST_STRING_ASSERT(fs_info->fs_type, SVN_FS_TYPE_FSFS);
+ fsfs_info = (const void *) fs_info;
+
+ /* Check FSFS specific info. Don't check the SHARD_SIZE, because it depends
+ * on a compile-time constant and may be overridden. */
+ SVN_TEST_ASSERT(fsfs_info->log_addressing);
+ SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0);
+
+ /* Create an FSFS filesystem with custom settings: disabled log-addressing
+ * and custom shard size (123). */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, SVN_FS_TYPE_FSFS);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_LOG_ADDRESSING, "false");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE, "123");
+ SVN_ERR(svn_fs_create(&fs, repo_name_custom, fs_config, pool));
+
+ /* Re-open FS to test the data on disk. */
+ SVN_ERR(svn_fs_open2(&fs, repo_name_custom, NULL, pool, pool));
+
+ SVN_ERR(svn_fs_info(&fs_info, fs, pool, pool));
+ SVN_TEST_STRING_ASSERT(fs_info->fs_type, SVN_FS_TYPE_FSFS);
+ fsfs_info = (const void *) fs_info;
+
+ /* Check FSFS specific info, including the SHARD_SIZE. */
+ SVN_TEST_ASSERT(fsfs_info->log_addressing == FALSE);
+ SVN_TEST_ASSERT(fsfs_info->shard_size == 123);
+ SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_txn_pool_lifetime(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* Technically, the FS API makes no assumption on the lifetime of logically
+ * dependent objects. In particular, a txn root object may get destroyed
+ * after the FS object that it has been built upon. Actual data access is
+ * implied to be invalid without a valid svn_fs_t.
+ *
+ * This test verifies that at least the destruction order of those two
+ * objects is arbitrary.
+ */
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ /* We will allocate FS in FS_POOL. Using a separate allocator makes
+ * sure that we actually free the memory when destroying the pool.
+ */
+ apr_allocator_t *fs_allocator = svn_pool_create_allocator(FALSE);
+ apr_pool_t *fs_pool = apr_allocator_owner_get(fs_allocator);
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-pool-lifetime",
+ opts, fs_pool));
+
+ /* Create a TXN_ROOT referencing FS. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Destroy FS. Depending on the actual allocator implementation,
+ * these memory pages becomes inaccessible. */
+ svn_pool_destroy(fs_pool);
+
+ /* Unclean implementations will try to access FS and may segfault here. */
+ svn_fs_close_root(txn_root);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_modify_txn_being_written(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* FSFS has a limitation (and check) that only one file can be
+ * modified in TXN at time: see r861812 and svn_fs_apply_text() docstring.
+ * This is regression test for this behavior. */
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ const char *txn_name;
+ svn_fs_root_t *txn_root;
+ svn_stream_t *foo_contents;
+ svn_stream_t *bar_contents;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will not test BDB repositories");
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-modify-txn-being-written",
+ opts, pool));
+
+ /* Create a TXN_ROOT referencing FS. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Make file /foo and open for writing.*/
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool));
+ SVN_ERR(svn_fs_apply_text(&foo_contents, txn_root, "/foo", NULL, pool));
+
+ /* Attempt to modify another file '/bar' -- FSFS doesn't allow this. */
+ SVN_ERR(svn_fs_make_file(txn_root, "/bar", pool));
+ SVN_TEST_ASSERT_ERROR(
+ svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool),
+ SVN_ERR_FS_REP_BEING_WRITTEN);
+
+ /* *Reopen TXN. */
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Check that file '/bar' still cannot be modified */
+ SVN_TEST_ASSERT_ERROR(
+ svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool),
+ SVN_ERR_FS_REP_BEING_WRITTEN);
+
+ /* Close file '/foo'. */
+ SVN_ERR(svn_stream_close(foo_contents));
+
+ /* Now file '/bar' can be modified. */
+ SVN_ERR(svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_prop_and_text_rep_sharing_collision(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* Regression test for issue 4554: Wrong file length with PLAIN
+ * representations in FSFS. */
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_fs_root_t *rev_root;
+ svn_revnum_t new_rev;
+ svn_filesize_t length;
+ const char *testdir = "test-repo-prop-and-text-rep-sharing-collision";
+
+ /* Create a new repo. */
+ SVN_ERR(svn_test__create_fs(&fs, testdir, opts, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ /* Set node property for the root. */
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "/", "prop",
+ svn_string_create("value", pool),
+ pool));
+
+ /* Commit revision r1. */
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create file with same contents as property representation. */
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/foo",
+ "K 4\n"
+ "prop\n"
+ "V 5\n"
+ "value\n"
+ "END\n", pool));
+
+ /* Commit revision r2. */
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ /* Check that FS reports correct length for the file (23). */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 2, pool));
+ SVN_ERR(svn_fs_file_length(&length, rev_root, "/foo", pool));
+
+ SVN_TEST_ASSERT(length == 23);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_internal_txn_props(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_string_t *val;
+ svn_prop_t prop;
+ svn_prop_t internal_prop;
+ apr_array_header_t *props;
+ apr_hash_t *proplist;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-internal-txn-props",
+ opts, pool));
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0,
+ SVN_FS_TXN_CHECK_LOCKS |
+ SVN_FS_TXN_CHECK_OOD |
+ SVN_FS_TXN_CLIENT_DATE, pool));
+
+ /* Ensure that we cannot read internal transaction properties. */
+ SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CHECK_LOCKS, pool));
+ SVN_TEST_ASSERT(!val);
+ SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CHECK_OOD, pool));
+ SVN_TEST_ASSERT(!val);
+ SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CLIENT_DATE, pool));
+ SVN_TEST_ASSERT(!val);
+
+ SVN_ERR(svn_fs_txn_proplist(&proplist, txn, pool));
+ SVN_TEST_ASSERT(apr_hash_count(proplist) == 1);
+ val = svn_hash_gets(proplist, SVN_PROP_REVISION_DATE);
+ SVN_TEST_ASSERT(val);
+
+ /* We also cannot change or discard them. */
+ val = svn_string_create("Ooops!", pool);
+
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_LOCKS, val, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_LOCKS, NULL, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_OOD, val, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_OOD, NULL, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CLIENT_DATE, val, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+ err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CLIENT_DATE, NULL, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+
+ prop.name = "foo";
+ prop.value = svn_string_create("bar", pool);
+ internal_prop.name = SVN_FS__PROP_TXN_CHECK_LOCKS;
+ internal_prop.value = svn_string_create("Ooops!", pool);
+
+ props = apr_array_make(pool, 2, sizeof(svn_prop_t));
+ APR_ARRAY_PUSH(props, svn_prop_t) = prop;
+ APR_ARRAY_PUSH(props, svn_prop_t) = internal_prop;
+
+ err = svn_fs_change_txn_props(txn, props, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS);
+
+ return SVN_NO_ERROR;
+}
+
+/* A freeze function that expects an 'svn_error_t *' baton, and returns it. */
+/* This function implements svn_fs_freeze_func_t. */
+static svn_error_t *
+freeze_func(void *baton, apr_pool_t *pool)
+{
+ return baton;
+}
+
+static svn_error_t *
+freeze_and_commit(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ const char *repo_name = "test-repo-freeze-and-commit";
+
+ if (!strcmp(opts->fs_type, "bdb"))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will not test BDB repositories");
+
+ SVN_ERR(svn_test__create_fs(&fs, repo_name, opts, subpool));
+
+ /* This test used to FAIL with an SQLite error since svn_fs_freeze()
+ * wouldn't unlock rep-cache.db. Therefore, part of the role of creating
+ * the Greek tree is to create a rep-cache.db, in order to test that
+ * svn_fs_freeze() unlocks it. */
+
+ /* r1: Commit the Greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool));
+
+ /* Freeze and unfreeze. */
+ SVN_ERR(svn_fs_freeze(fs, freeze_func, SVN_NO_ERROR, pool));
+
+ /* Freeze again, but have freeze_func fail. */
+ {
+ svn_error_t *err = svn_error_create(APR_EGENERAL, NULL, NULL);
+ SVN_TEST_ASSERT_ERROR(svn_fs_freeze(fs, freeze_func, err, pool),
+ err->apr_err);
+ }
+
+ /* Make some commit using same FS instance. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "", "temperature",
+ svn_string_create("310.05", pool),
+ pool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ /* Re-open FS and make another commit. */
+ SVN_ERR(svn_fs_open(&fs, repo_name, NULL, subpool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "/", "temperature",
+ svn_string_create("451", pool),
+ pool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Number of changes in a revision.
+ * Should be > 100 to span multiple blocks. */
+#define CHANGES_COUNT 1017
+
+/* Check that REVISION in FS reports the expected changes. */
+static svn_error_t *
+verify_added_files_list(svn_fs_t *fs,
+ svn_revnum_t revision,
+ apr_pool_t *scratch_pool)
+{
+ int i;
+ svn_fs_root_t *root;
+ apr_hash_t *changed_paths;
+ svn_fs_path_change_iterator_t *iterator;
+ svn_fs_path_change3_t *change;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+
+ /* Collect changes and test that no path gets reported twice. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, revision, scratch_pool));
+ SVN_ERR(svn_fs_paths_changed3(&iterator, root, scratch_pool, scratch_pool));
+
+ changed_paths = apr_hash_make(scratch_pool);
+ SVN_ERR(svn_fs_path_change_get(&change, iterator));
+ while (change)
+ {
+ const char *path = apr_pstrmemdup(scratch_pool, change->path.data,
+ change->path.len);
+ SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_add);
+ SVN_TEST_ASSERT(!apr_hash_get(changed_paths, path, change->path.len));
+
+ apr_hash_set(changed_paths, path, change->path.len, path);
+ SVN_ERR(svn_fs_path_change_get(&change, iterator));
+ }
+
+ /* Verify that we've got exactly all paths that we added. */
+ SVN_TEST_ASSERT(CHANGES_COUNT == apr_hash_count(changed_paths));
+ for (i = 0; i < CHANGES_COUNT; ++i)
+ {
+ const char *file_name;
+ svn_pool_clear(iterpool);
+
+ file_name = apr_psprintf(iterpool, "/file-%d", i);
+ SVN_TEST_ASSERT(svn_hash_gets(changed_paths, file_name));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_large_changed_paths_list(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ int i;
+ svn_revnum_t rev = 0;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ const char *repo_name = "test-repo-changed-paths-list";
+
+ SVN_ERR(svn_test__create_fs(&fs, repo_name, opts, pool));
+
+ /* r1: Add many empty files - just to amass a long list of changes. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ for (i = 0; i < CHANGES_COUNT; ++i)
+ {
+ const char *file_name;
+ svn_pool_clear(iterpool);
+
+ file_name = apr_psprintf(iterpool, "/file-%d", i);
+ SVN_ERR(svn_fs_make_file(txn_root, file_name, iterpool));
+ }
+
+ SVN_ERR(test_commit_txn(&rev, txn, NULL, pool));
+
+ /* Now, read the change list.
+ * Do it twice to cover cached data as well. */
+ svn_pool_clear(iterpool);
+ SVN_ERR(verify_added_files_list(fs, rev, iterpool));
+ svn_pool_clear(iterpool);
+ SVN_ERR(verify_added_files_list(fs, rev, iterpool));
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+#undef CHANGES_COUNT
+
+static svn_error_t *
+commit_with_locked_rep_cache(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev;
+ svn_sqlite__db_t *sdb;
+ svn_error_t *err;
+ const char *fs_path;
+ const char *statements[] = { "SELECT MAX(revision) FROM rep_cache", NULL };
+
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will not test BDB repositories");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 6))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.6 SVN doesn't support FSFS rep-sharing");
+
+ fs_path = "test-repo-commit-with-locked-rep-cache";
+ SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, pool));
+
+ /* r1: Add a file. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/foo", "a", pool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+ SVN_TEST_INT_ASSERT(new_rev, 1);
+
+ /* Begin a new transaction based on r1. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 1, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/foo", "b", pool));
+
+ /* Obtain a shared lock on the rep-cache.db by starting a new read
+ * transaction. */
+ SVN_ERR(svn_sqlite__open(&sdb,
+ svn_dirent_join(fs_path, "rep-cache.db", pool),
+ svn_sqlite__mode_readonly, statements, 0, NULL,
+ 0, pool, pool));
+ SVN_ERR(svn_sqlite__begin_transaction(sdb));
+ SVN_ERR(svn_sqlite__exec_statements(sdb, 0));
+
+ /* Attempt to commit fs transaction. This should result in a commit
+ * post-processing error due to us still holding the shared lock on the
+ * rep-cache.db. */
+ err = svn_fs_commit_txn(NULL, &new_rev, txn, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_SQLITE_BUSY);
+ SVN_TEST_INT_ASSERT(new_rev, 2);
+
+ /* Release the shared lock. */
+ SVN_ERR(svn_sqlite__finish_transaction(sdb, SVN_NO_ERROR));
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ /* Try an operation that reads from rep-cache.db.
+ *
+ * XFAIL: Around r1740802, this call was producing an error due to the
+ * svn_fs_t keeping an unusable db connection (and associated file
+ * locks) within it.
+ */
+ SVN_ERR(svn_fs_verify(fs_path, NULL, 0, SVN_INVALID_REVNUM, NULL, NULL,
+ NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_cache_clear_during_stream(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t new_rev;
+ const char *fs_path;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_txdelta_window_handler_t consumer_func;
+ void *consumer_baton;
+ int i;
+ svn_stream_t *stream;
+ svn_stringbuf_t *buf;
+
+
+ fs_path = "test-repo-cache_clear_during_stream";
+ SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, pool));
+
+ /* r1: Add a file. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool));
+
+ /* Make the file large enough to span multiple txdelta windows.
+ * Just to be sure, make it not too uniform to keep self-txdelta at bay. */
+ SVN_ERR(svn_fs_apply_textdelta(&consumer_func, &consumer_baton,
+ txn_root, "/foo", NULL, NULL, subpool));
+ stream = svn_txdelta_target_push(consumer_func, consumer_baton,
+ svn_stream_empty(subpool), subpool);
+ for (i = 0; i < 10000; ++ i)
+ {
+ svn_string_t *text;
+
+ svn_pool_clear(iterpool);
+ text = svn_string_createf(iterpool, "some dummy text - %d\n", i);
+ SVN_ERR(svn_stream_write(stream, text->data, &text->len));
+ }
+
+ SVN_ERR(svn_stream_close(stream));
+ svn_pool_destroy(subpool);
+
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool));
+ SVN_TEST_INT_ASSERT(new_rev, 1);
+
+ /* Read the file once to populate the fulltext cache. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 1, pool));
+ SVN_ERR(svn_fs_file_contents(&stream, rev_root, "/foo", pool));
+ SVN_ERR(svn_test__stream_to_string(&buf, stream, pool));
+
+ /* Start reading it again from cache, clear the cache and continue.
+ * Make sure we read more than one txdelta window before clearing
+ * the cache. That gives the FS backend a chance to skip windows
+ * when continuing the read from disk. */
+ SVN_ERR(svn_fs_file_contents(&stream, rev_root, "/foo", pool));
+ buf->len = 2 * SVN_STREAM_CHUNK_SIZE;
+ SVN_ERR(svn_stream_read_full(stream, buf->data, &buf->len));
+ SVN_ERR(svn_cache__membuffer_clear(svn_cache__get_global_membuffer_cache()));
+ SVN_ERR(svn_test__stream_to_string(&buf, stream, pool));
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rep_sharing_strict_content_check(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev;
+ const char *fs_path, *fs_path2;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_error_t *err;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "BDB repositories don't support rep-sharing");
+
+ /* Create 2 repos with same structure & size but different contents */
+ fs_path = "test-rep-sharing-strict-content-check1";
+ fs_path2 = "test-rep-sharing-strict-content-check2";
+
+ SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, subpool));
+
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "/foo", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", "quite bad", subpool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool));
+ SVN_TEST_INT_ASSERT(new_rev, 1);
+
+ SVN_ERR(svn_test__create_fs(&fs, fs_path2, opts, subpool));
+
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "foo", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "foo", "very good", subpool));
+ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool));
+ SVN_TEST_INT_ASSERT(new_rev, 1);
+
+ /* Close both repositories. */
+ svn_pool_clear(subpool);
+
+ /* Doctor the first repo such that it uses the wrong rep-cache. */
+ SVN_ERR(svn_io_copy_file(svn_relpath_join(fs_path2, "rep-cache.db", pool),
+ svn_relpath_join(fs_path, "rep-cache.db", pool),
+ FALSE, pool));
+
+ /* Changing the file contents such that rep-sharing would kick in if
+ the file contents was not properly compared. */
+ SVN_ERR(svn_fs_open2(&fs, fs_path, NULL, subpool, subpool));
+
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 1, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ err = svn_test__set_file_contents(txn_root, "foo", "very good", subpool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_AMBIGUOUS_CHECKSUM_REP);
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+closest_copy_test_svn_4677(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root, *croot;
+ svn_revnum_t after_rev;
+ const char *cpath;
+ apr_pool_t *spool = svn_pool_create(pool);
+
+ /* Prepare a filesystem. */
+ SVN_ERR(svn_test__create_fs(&fs, "test-repo-svn-4677",
+ opts, pool));
+
+ /* In first txn, create file A/foo. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/foo", spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Move A to B, and commit. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "B", spool));
+ SVN_ERR(svn_fs_delete(txn_root, "A", spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* Replace file B/foo with directory B/foo, add B/foo/bar, and commit. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_delete(txn_root, "B/foo", spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "B/foo", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "B/foo/bar", spool));
+ SVN_ERR(test_commit_txn(&after_rev, txn, NULL, spool));
+ svn_pool_clear(spool);
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, after_rev, spool));
+
+ /* B/foo/bar has been copied.
+ Issue 4677 was caused by returning an error in this situation. */
+ SVN_ERR(svn_fs_closest_copy(&croot, &cpath, rev_root, "B/foo/bar", spool));
+ SVN_TEST_ASSERT(cpath == NULL);
+ SVN_TEST_ASSERT(croot == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 8;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(trivial_transaction,
+ "begin a txn, check its name, then close it"),
+ SVN_TEST_OPTS_PASS(reopen_trivial_transaction,
+ "open an existing transaction by name"),
+ SVN_TEST_OPTS_PASS(create_file_transaction,
+ "begin a txn, get the txn root, and add a file"),
+ SVN_TEST_OPTS_PASS(verify_txn_list,
+ "create 2 txns, list them, and verify the list"),
+ SVN_TEST_OPTS_PASS(txn_names_are_not_reused,
+ "check that transaction names are not reused"),
+ SVN_TEST_OPTS_PASS(write_and_read_file,
+ "write and read a file's contents"),
+ SVN_TEST_OPTS_PASS(almostmedium_file_integrity,
+ "create and modify almostmedium file"),
+ SVN_TEST_OPTS_PASS(medium_file_integrity,
+ "create and modify medium file"),
+ SVN_TEST_OPTS_PASS(large_file_integrity,
+ "create and modify large file"),
+ SVN_TEST_OPTS_PASS(create_mini_tree_transaction,
+ "test basic file and subdirectory creation"),
+ SVN_TEST_OPTS_PASS(create_greek_tree_transaction,
+ "make The Official Subversion Test Tree"),
+ SVN_TEST_OPTS_PASS(list_directory,
+ "fill a directory, then list it"),
+ SVN_TEST_OPTS_PASS(revision_props,
+ "set and get some revision properties"),
+ SVN_TEST_OPTS_PASS(transaction_props,
+ "set/get txn props, commit, validate new rev props"),
+ SVN_TEST_OPTS_PASS(node_props,
+ "set and get some node properties"),
+ SVN_TEST_OPTS_PASS(delete_mutables,
+ "delete mutable nodes from directories"),
+ SVN_TEST_OPTS_PASS(delete,
+ "delete nodes tree"),
+ SVN_TEST_OPTS_PASS(fetch_youngest_rev,
+ "fetch the youngest revision from a filesystem"),
+ SVN_TEST_OPTS_PASS(basic_commit,
+ "basic commit"),
+ SVN_TEST_OPTS_PASS(test_tree_node_validation,
+ "testing tree validation helper"),
+ SVN_TEST_OPTS_PASS(merging_commit, "merging commit"),
+ SVN_TEST_OPTS_PASS(copy_test,
+ "copying and tracking copy history"),
+ SVN_TEST_OPTS_PASS(commit_date,
+ "commit datestamps"),
+ SVN_TEST_OPTS_PASS(check_old_revisions,
+ "check old revisions"),
+ SVN_TEST_OPTS_PASS(check_all_revisions,
+ "after each commit, check all revisions"),
+ SVN_TEST_OPTS_PASS(check_root_revision,
+ "ensure accurate storage of root node"),
+ SVN_TEST_OPTS_PASS(test_node_created_rev,
+ "svn_fs_node_created_rev test"),
+ SVN_TEST_OPTS_PASS(check_related,
+ "test svn_fs_check_related"),
+ SVN_TEST_OPTS_PASS(branch_test,
+ "test complex copies (branches)"),
+ SVN_TEST_OPTS_PASS(verify_checksum,
+ "test checksums"),
+ SVN_TEST_OPTS_PASS(closest_copy_test,
+ "calculating closest history-affecting copies"),
+ SVN_TEST_OPTS_PASS(root_revisions,
+ "svn_fs_root_t (base) revisions"),
+ SVN_TEST_OPTS_PASS(unordered_txn_dirprops,
+ "test dir prop preservation in unordered txns"),
+ SVN_TEST_OPTS_PASS(set_uuid,
+ "test svn_fs_set_uuid"),
+ SVN_TEST_OPTS_PASS(node_origin_rev,
+ "test svn_fs_node_origin_rev"),
+ SVN_TEST_OPTS_PASS(small_file_integrity,
+ "create and modify small file"),
+ SVN_TEST_OPTS_PASS(node_history,
+ "test svn_fs_node_history"),
+ SVN_TEST_OPTS_PASS(delete_fs,
+ "test svn_fs_delete_fs"),
+ SVN_TEST_OPTS_PASS(filename_trailing_newline,
+ "filenames with trailing \\n might be rejected"),
+ SVN_TEST_OPTS_PASS(test_fs_info_format,
+ "test svn_fs_info_format"),
+ SVN_TEST_OPTS_PASS(commit_timestamp,
+ "commit timestamp"),
+ SVN_TEST_OPTS_PASS(test_compat_version,
+ "test svn_fs__compatible_version"),
+ SVN_TEST_OPTS_PASS(dir_prop_merge,
+ "test merge directory properties"),
+ SVN_TEST_OPTS_PASS(upgrade_while_committing,
+ "upgrade while committing"),
+ SVN_TEST_OPTS_PASS(test_paths_changed,
+ "test svn_fs_paths_changed"),
+ SVN_TEST_OPTS_PASS(test_delete_replaced_paths_changed,
+ "test deletion after replace in changed paths list"),
+ SVN_TEST_OPTS_PASS(purge_txn_test,
+ "test purging transactions"),
+ SVN_TEST_OPTS_PASS(compare_contents,
+ "compare contents of different nodes"),
+ SVN_TEST_OPTS_PASS(test_path_change_create,
+ "test svn_fs_path_change2_create"),
+ SVN_TEST_OPTS_PASS(test_node_created_info,
+ "test FS 'node created' info"),
+ SVN_TEST_OPTS_PASS(test_print_modules,
+ "test FS module listing"),
+ SVN_TEST_OPTS_PASS(test_zero_copy_processsing,
+ "test zero copy file processing"),
+ SVN_TEST_OPTS_PASS(test_dir_optimal_order,
+ "test svn_fs_dir_optimal_order"),
+ SVN_TEST_OPTS_PASS(test_config_files,
+ "get configuration files"),
+ SVN_TEST_OPTS_PASS(test_delta_file_stream,
+ "get a delta stream on a file"),
+ SVN_TEST_OPTS_PASS(test_fs_merge,
+ "get merging txns with newer revisions"),
+ SVN_TEST_OPTS_PASS(test_fsfs_config_opts,
+ "test creating FSFS repository with different opts"),
+ SVN_TEST_OPTS_PASS(test_txn_pool_lifetime,
+ "test pool lifetime dependencies with txn roots"),
+ SVN_TEST_OPTS_PASS(test_modify_txn_being_written,
+ "test modify txn being written"),
+ SVN_TEST_OPTS_PASS(test_prop_and_text_rep_sharing_collision,
+ "test property and text rep-sharing collision"),
+ SVN_TEST_OPTS_PASS(test_internal_txn_props,
+ "test setting and getting internal txn props"),
+ SVN_TEST_OPTS_PASS(check_txn_related,
+ "test svn_fs_check_related for transactions"),
+ SVN_TEST_OPTS_PASS(freeze_and_commit,
+ "freeze and commit"),
+ SVN_TEST_OPTS_PASS(test_large_changed_paths_list,
+ "test reading a large changed paths list"),
+ SVN_TEST_OPTS_PASS(commit_with_locked_rep_cache,
+ "test commit with locked rep-cache"),
+ SVN_TEST_OPTS_PASS(test_cache_clear_during_stream,
+ "test clearing the cache while streaming a rep"),
+ SVN_TEST_OPTS_PASS(test_rep_sharing_strict_content_check,
+ "test rep-sharing on content rather than SHA1"),
+ SVN_TEST_OPTS_PASS(closest_copy_test_svn_4677,
+ "test issue SVN-4677 regression"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs/locks-test.c b/subversion/tests/libsvn_fs/locks-test.c
new file mode 100644
index 0000000..6251f1b
--- /dev/null
+++ b/subversion/tests/libsvn_fs/locks-test.c
@@ -0,0 +1,1256 @@
+/* lock-test.c --- tests for the filesystem locking functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <string.h>
+#include <apr_pools.h>
+#include <apr_time.h>
+
+#include "../svn_test.h"
+
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_hash.h"
+
+#include "../svn_test_fs.h"
+
+
+/*-----------------------------------------------------------------*/
+
+/** Helper functions **/
+
+/* Implementations of the svn_fs_get_locks_callback_t interface and
+ baton, for verifying expected output from svn_fs_get_locks(). */
+
+struct get_locks_baton_t
+{
+ apr_hash_t *locks;
+};
+
+static svn_error_t *
+get_locks_callback(void *baton,
+ svn_lock_t *lock,
+ apr_pool_t *pool)
+{
+ struct get_locks_baton_t *b = baton;
+ apr_pool_t *hash_pool = apr_hash_pool_get(b->locks);
+ svn_string_t *lock_path = svn_string_create(lock->path, hash_pool);
+
+ if (!apr_hash_get(b->locks, lock_path->data, lock_path->len))
+ {
+ apr_hash_set(b->locks, lock_path->data, lock_path->len,
+ svn_lock_dup(lock, hash_pool));
+ return SVN_NO_ERROR;
+ }
+ else
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Lock for path '%s' is being reported twice.",
+ lock->path);
+ }
+}
+
+/* A factory function. */
+
+static struct get_locks_baton_t *
+make_get_locks_baton(apr_pool_t *pool)
+{
+ struct get_locks_baton_t *baton = apr_pcalloc(pool, sizeof(*baton));
+ baton->locks = apr_hash_make(pool);
+ return baton;
+}
+
+
+/* And verification function(s). */
+
+static svn_error_t *
+verify_matching_lock_paths(struct get_locks_baton_t *baton,
+ const char *expected_paths[],
+ apr_size_t num_expected_paths,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+ if (num_expected_paths != apr_hash_count(baton->locks))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Unexpected number of locks.");
+ for (i = 0; i < num_expected_paths; i++)
+ {
+ const char *path = expected_paths[i];
+ if (! apr_hash_get(baton->locks, path, APR_HASH_KEY_STRING))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Missing lock for path '%s'", path);
+ }
+ return SVN_NO_ERROR;
+}
+
+
+/* Create a filesystem in a directory called NAME, and populate it with
+ * the standard Greek tree. Set *FS_P to the new filesystem object and
+ * *NEWREV_P to the head revision number. Unwanted outputs may be NULL. */
+static svn_error_t *
+create_greek_fs(svn_fs_t **fs_p,
+ svn_revnum_t *newrev_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+
+ /* Prepare a filesystem and a new txn. */
+ SVN_ERR(svn_test__create_fs(&fs, name, opts, pool));
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree and commit it. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ if (fs_p)
+ *fs_p = fs;
+ if (newrev_p)
+ *newrev_p = newrev;
+ return SVN_NO_ERROR;
+}
+
+
+/*-----------------------------------------------------------------*/
+
+/** The actual lock-tests called by `make check` **/
+
+
+
+/* Test that we can create a lock--nothing more. */
+static svn_error_t *
+lock_only(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-lock-only",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock /A/D/G/rho. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+
+
+/* Test that we can create, fetch, and destroy a lock. It exercises
+ each of the five public fs locking functions. */
+static svn_error_t *
+lookup_lock_by_path(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock, *somelock;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-lookup-lock-by-path",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock /A/D/G/rho. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Can we look up the lock by path? */
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if ((! somelock) || (strcmp(somelock->token, mylock->token) != 0))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Couldn't look up a lock by pathname.");
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that we can create a lock outside of the fs and attach it to a
+ path. */
+static svn_error_t *
+attach_lock(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *somelock;
+ svn_lock_t *mylock;
+ const char *token;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-attach-lock",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ SVN_ERR(svn_fs_generate_lock_token(&token, fs, pool));
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", token,
+ "This is a comment. Yay comment!", 0,
+ apr_time_now() + apr_time_from_sec(3),
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Can we look up the lock by path? */
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if ((! somelock) || (strcmp(somelock->token, mylock->token) != 0))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Couldn't look up a lock by pathname.");
+
+ /* Unlock /A/D/G/rho, and verify that it's gone. */
+ SVN_ERR(svn_fs_unlock(fs, mylock->path, mylock->token, 0, pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (somelock)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Removed a lock, but it's still there.");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that we can get all locks under a directory. */
+static svn_error_t *
+get_locks(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+ struct get_locks_baton_t *get_locks_baton;
+ apr_size_t i, num_expected_paths;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-get-locks",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock our paths; verify from "/". */
+ {
+ static const char *expected_paths[] = {
+ "/A/D/G/pi",
+ "/A/D/G/rho",
+ "/A/D/G/tau",
+ "/A/D/H/psi",
+ "/A/D/H/chi",
+ "/A/D/H/omega",
+ "/A/B/E/alpha",
+ "/A/B/E/beta",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ for (i = 0; i < num_expected_paths; i++)
+ {
+ SVN_ERR(svn_fs_lock(&mylock, fs, expected_paths[i], NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+ }
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Verify from "/A/B". */
+ {
+ static const char *expected_paths[] = {
+ "/A/B/E/alpha",
+ "/A/B/E/beta",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "A/B", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Verify from "/A/D". */
+ {
+ static const char *expected_paths[] = {
+ "/A/D/G/pi",
+ "/A/D/G/rho",
+ "/A/D/G/tau",
+ "/A/D/H/psi",
+ "/A/D/H/chi",
+ "/A/D/H/omega",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "A/D", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Verify from "/A/D/G". */
+ {
+ static const char *expected_paths[] = {
+ "/A/D/G/pi",
+ "/A/D/G/rho",
+ "/A/D/G/tau",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "A/D/G", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Verify from "/A/D/H/omega". */
+ {
+ static const char *expected_paths[] = {
+ "/A/D/H/omega",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "A/D/H/omega", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Verify from "/iota" (which wasn't locked... tricky...). */
+ {
+ static const char *expected_paths[] = { 0 };
+ num_expected_paths = 0;
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "iota", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* A path that is longer and alphabetically earlier than some locked
+ paths, this exercises the r1205848 BDB lock code. */
+ {
+ static const char *expected_paths[] = { 0 };
+ num_expected_paths = 0;
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "A/D/H/ABCDEFGHIJKLMNOPQR", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that we can create, fetch, and destroy a lock. It exercises
+ each of the five public fs locking functions. */
+static svn_error_t *
+basic_lock(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock, *somelock;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-basic-lock",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock /A/D/G/rho. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Can we look up the lock by path? */
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if ((! somelock) || (strcmp(somelock->token, mylock->token) != 0))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Couldn't look up a lock by pathname.");
+
+ /* Unlock /A/D/G/rho, and verify that it's gone. */
+ SVN_ERR(svn_fs_unlock(fs, mylock->path, mylock->token, 0, pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (somelock)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Removed a lock, but it's still there.");
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test that locks are enforced -- specifically that both a username
+ and token are required to make use of the lock. */
+static svn_error_t *
+lock_credentials(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+ svn_error_t *err;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-repo-lock-credentials",
+ opts, pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock /A/D/G/rho. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Push the proper lock-token into the fs access context. */
+ SVN_ERR(svn_fs_access_add_lock_token(access, mylock->token));
+
+ /* Make a new transaction and change rho. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/A/D/G/rho",
+ "new contents", pool));
+
+ /* We are no longer 'bubba'. We're nobody. */
+ SVN_ERR(svn_fs_set_access(fs, NULL));
+
+ /* Try to commit the file change. Should fail, because we're nobody. */
+ err = svn_fs_commit_txn(&conflict, &newrev, txn, pool);
+ SVN_TEST_ASSERT(! SVN_IS_VALID_REVNUM(newrev));
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to commit locked file without any fs username.");
+ svn_error_clear(err);
+
+ /* We are now 'hortense'. */
+ SVN_ERR(svn_fs_create_access(&access, "hortense", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Try to commit the file change. Should fail, because we're 'hortense'. */
+ err = svn_fs_commit_txn(&conflict, &newrev, txn, pool);
+ SVN_TEST_ASSERT(! SVN_IS_VALID_REVNUM(newrev));
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to commit locked file as non-owner.");
+ svn_error_clear(err);
+
+ /* Be 'bubba' again. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Try to commit the file change. Should fail, because there's no token. */
+ err = svn_fs_commit_txn(&conflict, &newrev, txn, pool);
+ SVN_TEST_ASSERT(! SVN_IS_VALID_REVNUM(newrev));
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to commit locked file with no lock token.");
+ svn_error_clear(err);
+
+ /* Push the proper lock-token into the fs access context. */
+ SVN_ERR(svn_fs_access_add_lock_token(access, mylock->token));
+
+ /* Commit should now succeed. */
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test that locks are enforced at commit time. Somebody might lock
+ something behind your back, right before you run
+ svn_fs_commit_txn(). Also, this test verifies that recursive
+ lock-checks on directories is working properly. */
+static svn_error_t *
+final_lock_check(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+ svn_error_t *err;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-repo-final-lock-check",
+ opts, pool));
+
+ /* Make a new transaction and delete "/A" */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "/A", pool));
+
+ /* Become 'bubba' and lock "/A/D/G/rho". */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* We are no longer 'bubba'. We're nobody. */
+ SVN_ERR(svn_fs_set_access(fs, NULL));
+
+ /* Try to commit the transaction. Should fail, because a child of
+ the deleted directory is locked by someone else. */
+ err = svn_fs_commit_txn(&conflict, &newrev, txn, pool);
+ SVN_TEST_ASSERT(! SVN_IS_VALID_REVNUM(newrev));
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to commit dir deletion when a child is locked.");
+ svn_error_clear(err);
+
+ /* Supply correct username and token; commit should work. */
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_access_add_lock_token(access, mylock->token));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* If a directory's child is locked by someone else, we should still
+ be able to commit a propchange on the directory. */
+static svn_error_t *
+lock_dir_propchange(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-repo-lock-dir-propchange",
+ opts, pool));
+
+ /* Become 'bubba' and lock "/A/D/G/rho". */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* We are no longer 'bubba'. We're nobody. */
+ SVN_ERR(svn_fs_set_access(fs, NULL));
+
+ /* Make a new transaction and make a propchange on "/A" */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "/A",
+ "foo", svn_string_create("bar", pool),
+ pool));
+
+ /* Commit should succeed; this means we're doing a non-recursive
+ lock-check on directory, rather than a recursive one. */
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that locks auto-expire correctly. */
+static svn_error_t *
+lock_expiration(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+ svn_error_t *err;
+ struct get_locks_baton_t *get_locks_baton;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-repo-lock-expiration",
+ opts, pool));
+
+ /* Make a new transaction and change rho. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/A/D/G/rho",
+ "new contents", pool));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Lock /A/D/G/rho, with an expiration 2 seconds from now. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0,
+ apr_time_now() + apr_time_from_sec(2),
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Become nobody. */
+ SVN_ERR(svn_fs_set_access(fs, NULL));
+
+ /* Try to commit. Should fail because we're 'nobody', and the lock
+ hasn't expired yet. */
+ err = svn_fs_commit_txn(&conflict, &newrev, txn, pool);
+ SVN_TEST_ASSERT(! SVN_IS_VALID_REVNUM(newrev));
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to commit a file that has a non-expired lock.");
+ svn_error_clear(err);
+
+ /* Check that the lock is there, by getting it via the paths parent. */
+ {
+ static const char *expected_paths [] = {
+ "/A/D/G/rho"
+ };
+ apr_size_t num_expected_paths = (sizeof(expected_paths)
+ / sizeof(expected_paths[0]));
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "/A/D/G", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ /* Sleep 2 seconds, so the lock auto-expires. Anonymous commit
+ should then succeed. */
+ apr_sleep(apr_time_from_sec(3));
+
+ /* Verify that the lock auto-expired even in the recursive case. */
+ {
+ static const char *expected_paths [] = { 0 };
+ apr_size_t num_expected_paths = 0;
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "/A/D/G", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that a lock can be broken, stolen, or refreshed */
+static svn_error_t *
+lock_break_steal_refresh(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock, *somelock;
+
+ SVN_ERR(create_greek_fs(&fs, NULL, "test-repo-steal-refresh",
+ opts, pool));
+
+ /* Become 'bubba' and lock "/A/D/G/rho". */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+
+ /* Become 'hortense' and break bubba's lock, then verify it's gone. */
+ SVN_ERR(svn_fs_create_access(&access, "hortense", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_unlock(fs, mylock->path, mylock->token,
+ 1 /* FORCE BREAK */, pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (somelock)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Tried to break a lock, but it's still there.");
+
+ /* As hortense, create a new lock, and verify that we own it. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0,
+ SVN_INVALID_REVNUM, FALSE, pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (strcmp(somelock->owner, mylock->owner) != 0)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Made a lock, but we don't seem to own it.");
+
+ /* As bubba, steal hortense's lock, creating a new one that expires. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0,
+ apr_time_now() + apr_time_from_sec(300), /* 5 min. */
+ SVN_INVALID_REVNUM,
+ TRUE /* FORCE STEAL */,
+ pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (strcmp(somelock->owner, mylock->owner) != 0)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Made a lock, but we don't seem to own it.");
+ if (! somelock->expiration_date)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Made expiring lock, but seems not to expire.");
+
+ /* Refresh the lock, so that it never expires. */
+ SVN_ERR(svn_fs_lock(&somelock, fs, somelock->path, somelock->token,
+ somelock->comment, 0, 0,
+ SVN_INVALID_REVNUM,
+ TRUE /* FORCE STEAL */,
+ pool));
+ SVN_ERR(svn_fs_get_lock(&somelock, fs, "/A/D/G/rho", pool));
+ if (somelock->expiration_date)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Made non-expirirng lock, but it expires.");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that svn_fs_lock() and svn_fs_attach_lock() can do
+ out-of-dateness checks.. */
+static svn_error_t *
+lock_out_of_date(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_lock_t *mylock;
+ svn_error_t *err;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-repo-lock-out-of-date",
+ opts, pool));
+
+ /* Commit a small change to /A/D/G/rho, creating revision 2. */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/A/D/G/rho",
+ "new contents", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(newrev));
+
+ /* We are now 'bubba'. */
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Try to lock /A/D/G/rho, but claim that we still have r1 of the file. */
+ err = svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, 0, 1, FALSE, pool);
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to lock an out-of-date file.");
+ svn_error_clear(err);
+
+ /* Attempt lock again, this time claiming to have r2. */
+ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0,
+ 0, 2, FALSE, pool));
+
+ /* 'Refresh' the lock, claiming to have r1... should fail. */
+ err = svn_fs_lock(&mylock, fs, mylock->path,
+ mylock->token, mylock->comment, 0,
+ apr_time_now() + apr_time_from_sec(50),
+ 1,
+ TRUE /* FORCE STEAL */,
+ pool);
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Uhoh, able to refresh a lock on an out-of-date file.");
+ svn_error_clear(err);
+
+ return SVN_NO_ERROR;
+}
+
+struct lock_result_t {
+ const svn_lock_t *lock;
+ svn_error_t *fs_err;
+};
+
+static svn_error_t *
+expect_lock(const char *path,
+ apr_hash_t *results,
+ svn_fs_t *fs,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && result->lock && !result->fs_err);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool));
+ SVN_TEST_ASSERT(lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_error(const char *path,
+ apr_hash_t *results,
+ svn_fs_t *fs,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && !result->lock && result->fs_err);
+ svn_error_clear(result->fs_err);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool));
+ SVN_TEST_ASSERT(!lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_unlock(const char *path,
+ apr_hash_t *results,
+ svn_fs_t *fs,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && !result->fs_err);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool));
+ SVN_TEST_ASSERT(!lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_unlock_error(const char *path,
+ apr_hash_t *results,
+ svn_fs_t *fs,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && result->fs_err);
+ svn_error_clear(result->fs_err);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool));
+ SVN_TEST_ASSERT(lock);
+ return SVN_NO_ERROR;
+}
+
+struct lock_many_baton_t {
+ apr_hash_t *results;
+ apr_pool_t *pool;
+ int count;
+};
+
+/* Implements svn_fs_lock_callback_t. */
+static svn_error_t *
+lock_many_cb(void *lock_baton,
+ const char *path,
+ const svn_lock_t *lock,
+ svn_error_t *fs_err,
+ apr_pool_t *pool)
+{
+ struct lock_many_baton_t *b = lock_baton;
+ struct lock_result_t *result = apr_palloc(b->pool,
+ sizeof(struct lock_result_t));
+
+ result->lock = lock;
+ result->fs_err = svn_error_dup(fs_err);
+ svn_hash_sets(b->results, apr_pstrdup(b->pool, path), result);
+
+ if (b->count)
+ if (!--(b->count))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL, "lock_many_cb");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+lock_multiple_paths(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root, *txn_root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_fs_lock_target_t *target;
+ struct lock_many_baton_t baton;
+ apr_hash_t *lock_paths, *unlock_paths;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-lock-multiple-paths",
+ opts, pool));
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+ SVN_ERR(svn_fs_revision_root(&root, fs, newrev, pool));
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/A/BB", pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/A/BBB", pool));
+ SVN_ERR(svn_fs_copy(root, "/A/mu", txn_root, "/A/BB/mu", pool));
+ SVN_ERR(svn_fs_copy(root, "/A/mu", txn_root, "/A/BBB/mu", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+
+ baton.results = apr_hash_make(pool);
+ baton.pool = pool;
+ baton.count = 0;
+ lock_paths = apr_hash_make(pool);
+ unlock_paths = apr_hash_make(pool);
+ target = svn_fs_lock_target_create(NULL, newrev, pool);
+
+ svn_hash_sets(lock_paths, "/A/B/E/alpha", target);
+ svn_hash_sets(lock_paths, "/A/B/E/beta", target);
+ svn_hash_sets(lock_paths, "/A/B/E/zulu", target);
+ svn_hash_sets(lock_paths, "/A/BB/mu", target);
+ svn_hash_sets(lock_paths, "/A/BBB/mu", target);
+ svn_hash_sets(lock_paths, "/A/D/G/pi", target);
+ svn_hash_sets(lock_paths, "/A/D/G/rho", target);
+ svn_hash_sets(lock_paths, "/A/mu", target);
+ svn_hash_sets(lock_paths, "/X/zulu", target);
+
+ /* Lock some paths. */
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0,
+ lock_many_cb, &baton,
+ pool, pool));
+
+ SVN_ERR(expect_lock("/A/B/E/alpha", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/B/E/beta", baton.results, fs, pool));
+ SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/BB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/BBB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/D/G/pi", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/D/G/rho", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/mu", baton.results, fs, pool));
+ SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool));
+
+ /* Unlock without force and wrong tokens. */
+ for (hi = apr_hash_first(pool, lock_paths); hi; hi = apr_hash_next(hi))
+ svn_hash_sets(unlock_paths, apr_hash_this_key(hi), "wrong-token");
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, FALSE, lock_many_cb, &baton,
+ pool, pool));
+
+ SVN_ERR(expect_unlock_error("/A/B/E/alpha", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/B/E/beta", baton.results, fs, pool));
+ SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/BB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/BBB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/D/G/pi", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/D/G/rho", baton.results, fs, pool));
+ SVN_ERR(expect_unlock_error("/A/mu", baton.results, fs, pool));
+ SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool));
+
+ /* Force unlock. */
+ for (hi = apr_hash_first(pool, lock_paths); hi; hi = apr_hash_next(hi))
+ svn_hash_sets(unlock_paths, apr_hash_this_key(hi), "");
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, TRUE, lock_many_cb, &baton,
+ pool, pool));
+
+ SVN_ERR(expect_unlock("/A/B/E/alpha", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/B/E/beta", baton.results, fs, pool));
+ SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/BB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/BBB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/D/G/pi", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/D/G/rho", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/mu", baton.results, fs, pool));
+ SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool));
+
+ /* Lock again. */
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0,
+ lock_many_cb, &baton,
+ pool, pool));
+
+ SVN_ERR(expect_lock("/A/B/E/alpha", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/B/E/beta", baton.results, fs, pool));
+ SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/BB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/BBB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/D/G/pi", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/D/G/rho", baton.results, fs, pool));
+ SVN_ERR(expect_lock("/A/mu", baton.results, fs, pool));
+ SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool));
+
+ /* Unlock without force. */
+ for (hi = apr_hash_first(pool, baton.results); hi; hi = apr_hash_next(hi))
+ {
+ struct lock_result_t *result = apr_hash_this_val(hi);
+ svn_hash_sets(unlock_paths, apr_hash_this_key(hi),
+ result->lock ? result->lock->token : "non-existent-token");
+ }
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, FALSE, lock_many_cb, &baton,
+ pool, pool));
+
+ SVN_ERR(expect_unlock("/A/B/E/alpha", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/B/E/beta", baton.results, fs, pool));
+ SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/BB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/BBB/mu", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/D/G/pi", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/D/G/rho", baton.results, fs, pool));
+ SVN_ERR(expect_unlock("/A/mu", baton.results, fs, pool));
+ SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+lock_cb_error(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_fs_lock_target_t *target;
+ struct lock_many_baton_t baton;
+ apr_hash_t *lock_paths, *unlock_paths;
+ svn_lock_t *lock;
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-lock-cb-error", opts, pool));
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ baton.results = apr_hash_make(pool);
+ baton.pool = pool;
+ baton.count = 1;
+ lock_paths = apr_hash_make(pool);
+ unlock_paths = apr_hash_make(pool);
+ target = svn_fs_lock_target_create(NULL, newrev, pool);
+
+ svn_hash_sets(lock_paths, "/A/B/E/alpha", target);
+ svn_hash_sets(lock_paths, "/A/B/E/beta", target);
+
+ apr_hash_clear(baton.results);
+ SVN_TEST_ASSERT_ERROR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0,
+ lock_many_cb, &baton,
+ pool, pool),
+ SVN_ERR_FS_GENERAL);
+
+ SVN_TEST_ASSERT(apr_hash_count(baton.results) == 1);
+ SVN_TEST_ASSERT(svn_hash_gets(baton.results, "/A/B/E/alpha")
+ || svn_hash_gets(baton.results, "/A/B/E/beta"));
+ SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/alpha", pool));
+ SVN_TEST_ASSERT(lock);
+ svn_hash_sets(unlock_paths, "/A/B/E/alpha", lock->token);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/beta", pool));
+ SVN_TEST_ASSERT(lock);
+ svn_hash_sets(unlock_paths, "/A/B/E/beta", lock->token);
+
+ baton.count = 1;
+ apr_hash_clear(baton.results);
+ SVN_TEST_ASSERT_ERROR(svn_fs_unlock_many(fs, unlock_paths, FALSE,
+ lock_many_cb, &baton,
+ pool, pool),
+ SVN_ERR_FS_GENERAL);
+
+ SVN_TEST_ASSERT(apr_hash_count(baton.results) == 1);
+ SVN_TEST_ASSERT(svn_hash_gets(baton.results, "/A/B/E/alpha")
+ || svn_hash_gets(baton.results, "/A/B/E/beta"));
+
+ SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/alpha", pool));
+ SVN_TEST_ASSERT(!lock);
+ SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/beta", pool));
+ SVN_TEST_ASSERT(!lock);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+obtain_write_lock_failure(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t newrev;
+ svn_fs_access_t *access;
+ svn_fs_lock_target_t *target;
+ struct lock_many_baton_t baton;
+ apr_hash_t *lock_paths, *unlock_paths;
+
+ /* The test makes sense only for FSFS. */
+ if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0
+ && strcmp(opts->fs_type, SVN_FS_TYPE_FSX) != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS/FSX repositories only");
+
+ SVN_ERR(create_greek_fs(&fs, &newrev, "test-obtain-write-lock-failure",
+ opts, pool));
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Make a read only 'write-lock' file. This prevents any write operations
+ from being executed. */
+ SVN_ERR(svn_io_set_file_read_only("test-obtain-write-lock-failure/write-lock",
+ FALSE, pool));
+
+ baton.results = apr_hash_make(pool);
+ baton.pool = pool;
+ baton.count = 0;
+
+ /* Trying to lock some paths. We don't really care about error; the test
+ shouldn't crash. */
+ target = svn_fs_lock_target_create(NULL, newrev, pool);
+ lock_paths = apr_hash_make(pool);
+ svn_hash_sets(lock_paths, "/iota", target);
+ svn_hash_sets(lock_paths, "/A/mu", target);
+
+ apr_hash_clear(baton.results);
+ SVN_TEST_ASSERT_ANY_ERROR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0,
+ lock_many_cb, &baton, pool, pool));
+
+ /* Trying to unlock some paths. We don't really care about error; the test
+ shouldn't crash. */
+ unlock_paths = apr_hash_make(pool);
+ svn_hash_sets(unlock_paths, "/iota", "");
+ svn_hash_sets(unlock_paths, "/A/mu", "");
+
+ apr_hash_clear(baton.results);
+ SVN_TEST_ASSERT_ANY_ERROR(svn_fs_unlock_many(fs, unlock_paths, TRUE,
+ lock_many_cb, &baton, pool,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+parent_and_child_lock(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_access_t *access;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ const char *conflict;
+ svn_revnum_t newrev;
+ svn_lock_t *lock;
+ struct get_locks_baton_t *get_locks_baton;
+ apr_size_t num_expected_paths;
+
+ SVN_ERR(svn_test__create_fs(&fs, "test-parent-and-child-lock", opts, pool));
+ SVN_ERR(svn_fs_create_access(&access, "bubba", pool));
+ SVN_ERR(svn_fs_set_access(fs, access));
+
+ /* Make a file '/A'. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, "/A", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+
+ /* Obtain a lock on '/A'. */
+ SVN_ERR(svn_fs_lock(&lock, fs, "/A", NULL, NULL, FALSE, 0, newrev, FALSE,
+ pool));
+
+ /* Add a lock token to FS access context. */
+ SVN_ERR(svn_fs_access_add_lock_token(access, lock->token));
+
+ /* Make some weird change: replace file '/A' by a directory with a
+ child. Issue 2507 means that the result is that the directory /A
+ remains locked. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, newrev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_delete(root, "/A", pool));
+ SVN_ERR(svn_fs_make_dir(root, "/A", pool));
+ SVN_ERR(svn_fs_make_file(root, "/A/b", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool));
+
+ /* Obtain a lock on '/A/b'. Issue 2507 means that the lock index
+ for / refers to both /A and /A/b, and that the lock index for /A
+ refers to /A/b. */
+ SVN_ERR(svn_fs_lock(&lock, fs, "/A/b", NULL, NULL, FALSE, 0, newrev, FALSE,
+ pool));
+
+ /* Verify the locked paths. The lock for /A/b should not be reported
+ twice even though issue 2507 means we access the index for / and
+ the index for /A both of which refer to /A/b. */
+ {
+ static const char *expected_paths[] = {
+ "/A",
+ "/A/b",
+ };
+ num_expected_paths = sizeof(expected_paths) / sizeof(const char *);
+ get_locks_baton = make_get_locks_baton(pool);
+ SVN_ERR(svn_fs_get_locks(fs, "/", get_locks_callback,
+ get_locks_baton, pool));
+ SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths,
+ num_expected_paths, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 2;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(lock_expiration,
+ "test that locks can expire"),
+ SVN_TEST_OPTS_PASS(lock_only,
+ "lock only"),
+ SVN_TEST_OPTS_PASS(lookup_lock_by_path,
+ "lookup lock by path"),
+ SVN_TEST_OPTS_PASS(attach_lock,
+ "attach lock"),
+ SVN_TEST_OPTS_PASS(get_locks,
+ "get locks"),
+ SVN_TEST_OPTS_PASS(basic_lock,
+ "basic locking"),
+ SVN_TEST_OPTS_PASS(lock_credentials,
+ "test that locking requires proper credentials"),
+ SVN_TEST_OPTS_PASS(final_lock_check,
+ "test that locking is enforced in final commit step"),
+ SVN_TEST_OPTS_PASS(lock_dir_propchange,
+ "dir propchange can be committed with locked child"),
+ SVN_TEST_OPTS_PASS(lock_break_steal_refresh,
+ "breaking, stealing, refreshing a lock"),
+ SVN_TEST_OPTS_PASS(lock_out_of_date,
+ "check out-of-dateness before locking"),
+ SVN_TEST_OPTS_PASS(lock_multiple_paths,
+ "lock multiple paths"),
+ SVN_TEST_OPTS_PASS(lock_cb_error,
+ "lock callback error"),
+ SVN_TEST_OPTS_PASS(obtain_write_lock_failure,
+ "lock/unlock when 'write-lock' couldn't be obtained"),
+ SVN_TEST_OPTS_PASS(parent_and_child_lock,
+ "lock parent and it's child"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_base/changes-test.c b/subversion/tests/libsvn_fs_base/changes-test.c
new file mode 100644
index 0000000..a637203
--- /dev/null
+++ b/subversion/tests/libsvn_fs_base/changes-test.c
@@ -0,0 +1,926 @@
+/* changes-test.c --- test `changes' interfaces
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <stdio.h>
+
+#include <apr.h>
+
+#include "../svn_test.h"
+
+#include "svn_pools.h"
+#include "svn_error.h"
+#include "private/svn_skel.h"
+
+#include "../svn_test_fs.h"
+#include "../../libsvn_fs_base/util/fs_skels.h"
+#include "../../libsvn_fs_base/bdb/changes-table.h"
+
+
+
+/* Helper functions/variables. */
+static const char *standard_txns[]
+ = { "0", "1", "2", "3", "4", "5", "6" };
+static const char *standard_changes[19][6]
+ /* KEY PATH NODEREVID KIND TEXT PROP */
+ = { { "0", "/foo", "1.0.0", "add", 0, 0 },
+ { "0", "/foo", "1.0.0", "modify", 0, "1" },
+ { "0", "/bar", "2.0.0", "add", 0, 0 },
+ { "0", "/bar", "2.0.0", "modify", "1", 0 },
+ { "0", "/bar", "2.0.0", "modify", 0, "1" },
+ { "0", "/baz", "3.0.0", "add", 0, 0 },
+ { "0", "/baz", "3.0.0", "modify", "1", 0 },
+ { "1", "/foo", "1.0.1", "modify", "1", 0 },
+ { "2", "/foo", "1.0.2", "modify", 0, "1" },
+ { "2", "/bar", "2.0.2", "modify", "1", 0 },
+ { "3", "/baz", "3.0.3", "modify", "1", 0 },
+ { "4", "/fob", "4.0.4", "add", 0, 0 },
+ { "4", "/fob", "4.0.4", "modify", "1", 0 },
+ { "5", "/baz", "3.0.3", "delete", 0, 0 },
+ { "5", "/baz", "5.0.5", "add", 0, "1" },
+ { "5", "/baz", "5.0.5", "modify", "1", 0 },
+ { "6", "/fob", "4.0.6", "modify", "1", 0 },
+ { "6", "/fob", "4.0.6", "reset", 0, 0 },
+ { "6", "/fob", "4.0.6", "modify", 0, "1" } };
+
+
+static svn_fs_path_change_kind_t string_to_kind(const char *str)
+{
+ if (strcmp(str, "add") == 0)
+ return svn_fs_path_change_add;
+ if (strcmp(str, "delete") == 0)
+ return svn_fs_path_change_delete;
+ if (strcmp(str, "replace") == 0)
+ return svn_fs_path_change_replace;
+ if (strcmp(str, "modify") == 0)
+ return svn_fs_path_change_modify;
+ if (strcmp(str, "reset") == 0)
+ return svn_fs_path_change_reset;
+ return 0;
+}
+
+
+/* Common args structure for several different txn_body_* functions. */
+struct changes_args
+{
+ svn_fs_t *fs;
+ const char *key;
+ change_t *change;
+ apr_array_header_t *raw_changes;
+ apr_hash_t *changes;
+};
+
+
+static svn_error_t *
+txn_body_changes_add(void *baton, trail_t *trail)
+{
+ struct changes_args *b = baton;
+ return svn_fs_bdb__changes_add(b->fs, b->key, b->change,
+ trail, trail->pool);
+}
+
+
+static svn_error_t *
+add_standard_changes(svn_fs_t *fs,
+ apr_pool_t *pool)
+{
+ int i;
+ struct changes_args args;
+ int num_changes = sizeof(standard_changes) / sizeof(const char *) / 6;
+
+ for (i = 0; i < num_changes; i++)
+ {
+ change_t change;
+
+ /* Set up the current change item. */
+ change.path = standard_changes[i][1];
+ change.noderev_id = svn_fs_parse_id(standard_changes[i][2],
+ strlen(standard_changes[i][2]),
+ pool);
+ change.kind = string_to_kind(standard_changes[i][3]);
+ change.text_mod = standard_changes[i][4] ? 1 : 0;
+ change.prop_mod = standard_changes[i][5] ? 1 : 0;
+
+ /* Set up transaction baton. */
+ args.fs = fs;
+ args.key = standard_changes[i][0];
+ args.change = &change;
+
+ /* Write new changes to the changes table. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_add, &args,
+ TRUE, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+txn_body_changes_fetch_raw(void *baton, trail_t *trail)
+{
+ struct changes_args *b = baton;
+ return svn_fs_bdb__changes_fetch_raw(&(b->raw_changes), b->fs, b->key,
+ trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_changes_fetch(void *baton, trail_t *trail)
+{
+ struct changes_args *b = baton;
+ return svn_fs_bdb__changes_fetch(&(b->changes), b->fs, b->key,
+ trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_changes_delete(void *baton, trail_t *trail)
+{
+ struct changes_args *b = baton;
+ return svn_fs_bdb__changes_delete(b->fs, b->key, trail, trail->pool);
+}
+
+
+
+/* The tests. */
+
+static svn_error_t *
+changes_add(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-add", opts,
+ pool));
+
+ /* Add the standard slew of changes. */
+ SVN_ERR(add_standard_changes(fs, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+changes_fetch_raw(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ int i;
+ int num_txns = sizeof(standard_txns) / sizeof(const char *);
+ int cur_change_index = 0;
+ struct changes_args args;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-fetch-raw", opts,
+ pool));
+
+ /* First, verify that we can request changes for an arbitrary key
+ without error. */
+ args.fs = fs;
+ args.key = "blahbliggityblah";
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch_raw, &args,
+ FALSE, pool));
+ if ((! args.raw_changes) || (args.raw_changes->nelts))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "expected empty changes array");
+
+ /* Add the standard slew of changes. */
+ SVN_ERR(add_standard_changes(fs, pool));
+
+ /* For each transaction, fetch that transaction's changes, and
+ compare those changes against the standard changes list. Order
+ matters throughout all the changes code, so we shouldn't have to
+ worry about ordering of the arrays. */
+ for (i = 0; i < num_txns; i++)
+ {
+ const char *txn_id = standard_txns[i];
+ int j;
+
+ /* Setup the trail baton. */
+ args.fs = fs;
+ args.key = txn_id;
+
+ /* And get those changes. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch_raw,
+ &args, FALSE, pool));
+ if (! args.raw_changes)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "got no changes for key '%s'", txn_id);
+
+ for (j = 0; j < args.raw_changes->nelts; j++)
+ {
+ svn_string_t *noderev_id;
+ svn_fs_path_change_kind_t kind;
+ change_t *change = APR_ARRAY_IDX(args.raw_changes, j, change_t *);
+ int mod_bit = 0;
+
+ /* Verify that the TXN_ID matches. */
+ if (strcmp(standard_changes[cur_change_index][0], txn_id))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "missing some changes for key '%s'", txn_id);
+
+ /* Verify that the PATH matches. */
+ if (strcmp(standard_changes[cur_change_index][1], change->path))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "paths differ in change for key '%s'", txn_id);
+
+ /* Verify that the NODE-REV-ID matches. */
+ noderev_id = svn_fs_unparse_id(change->noderev_id, pool);
+ if (strcmp(standard_changes[cur_change_index][2], noderev_id->data))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "node revision ids differ in change for key '%s'", txn_id);
+
+ /* Verify that the change KIND matches. */
+ kind = string_to_kind(standard_changes[cur_change_index][3]);
+ if (kind != change->kind)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change kinds differ in change for key '%s'", txn_id);
+
+ /* Verify that the change TEXT-MOD bit matches. */
+ mod_bit = standard_changes[cur_change_index][4] ? 1 : 0;
+ if (mod_bit != change->text_mod)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change text-mod bits differ in change for key '%s'", txn_id);
+
+ /* Verify that the change PROP-MOD bit matches. */
+ mod_bit = standard_changes[cur_change_index][5] ? 1 : 0;
+ if (mod_bit != change->prop_mod)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change prop-mod bits differ in change for key '%s'", txn_id);
+
+ cur_change_index++;
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+changes_delete(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ int i;
+ int num_txns = sizeof(standard_txns) / sizeof(const char *);
+ struct changes_args args;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-delete", opts,
+ pool));
+
+ /* Add the standard slew of changes. */
+ SVN_ERR(add_standard_changes(fs, pool));
+
+ /* Now, delete all the changes we know about, verifying their removal. */
+ for (i = 0; i < num_txns; i++)
+ {
+ args.fs = fs;
+ args.key = standard_txns[i];
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_delete,
+ &args, FALSE, pool));
+ args.changes = 0;
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch_raw,
+ &args, FALSE, pool));
+ if ((! args.raw_changes) || (args.raw_changes->nelts))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "expected empty changes array for txn '%s'", args.key);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static apr_hash_t *
+get_ideal_changes(const char *txn_id,
+ apr_pool_t *pool)
+{
+ apr_hash_t *ideal = apr_hash_make(pool);
+ svn_fs_path_change_t *change;
+ if (strcmp(txn_id, "0") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("1.0.0", 5, pool);
+ change->change_kind = svn_fs_path_change_add;
+ change->text_mod = 0;
+ change->prop_mod = 1;
+ apr_hash_set(ideal, "/foo", APR_HASH_KEY_STRING, change);
+
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("2.0.0", 5, pool);
+ change->change_kind = svn_fs_path_change_add;
+ change->text_mod = 1;
+ change->prop_mod = 1;
+ apr_hash_set(ideal, "/bar", APR_HASH_KEY_STRING, change);
+
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("3.0.0", 5, pool);
+ change->change_kind = svn_fs_path_change_add;
+ change->text_mod = 1;
+ change->prop_mod = 0;
+ apr_hash_set(ideal, "/baz", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "1") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("1.0.1", 5, pool);
+ change->change_kind = svn_fs_path_change_modify;
+ change->text_mod = 1;
+ change->prop_mod = 0;
+ apr_hash_set(ideal, "/foo", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "2") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("1.0.2", 5, pool);
+ change->change_kind = svn_fs_path_change_modify;
+ change->text_mod = 0;
+ change->prop_mod = 1;
+ apr_hash_set(ideal, "/foo", APR_HASH_KEY_STRING, change);
+
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("2.0.2", 5, pool);
+ change->change_kind = svn_fs_path_change_modify;
+ change->text_mod = 1;
+ change->prop_mod = 0;
+ apr_hash_set(ideal, "/bar", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "3") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("3.0.3", 5, pool);
+ change->change_kind = svn_fs_path_change_modify;
+ change->text_mod = 1;
+ change->prop_mod = 0;
+ apr_hash_set(ideal, "/baz", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "4") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("4.0.4", 5, pool);
+ change->change_kind = svn_fs_path_change_add;
+ change->text_mod = 1;
+ change->prop_mod = 0;
+ apr_hash_set(ideal, "/fob", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "5") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("5.0.5", 5, pool);
+ change->change_kind = svn_fs_path_change_replace;
+ change->text_mod = 1;
+ change->prop_mod = 1;
+ apr_hash_set(ideal, "/baz", APR_HASH_KEY_STRING, change);
+ }
+ if (strcmp(txn_id, "6") == 0)
+ {
+ change = apr_palloc(pool, sizeof(*change));
+ change->node_rev_id = svn_fs_parse_id("4.0.6", 5, pool);
+ change->change_kind = svn_fs_path_change_modify;
+ change->text_mod = 0;
+ change->prop_mod = 1;
+ apr_hash_set(ideal, "/fob", APR_HASH_KEY_STRING, change);
+ }
+ return ideal;
+}
+
+
+static svn_error_t *
+compare_changes(apr_hash_t *ideals,
+ apr_hash_t *changes,
+ const svn_test_opts_t *opts,
+ const char *txn_id,
+ apr_pool_t *pool)
+{
+ apr_hash_index_t *hi;
+
+ for (hi = apr_hash_first(pool, ideals); hi; hi = apr_hash_next(hi))
+ {
+ const void *key;
+ void *val;
+ svn_fs_path_change_t *ideal_change, *change;
+ const char *path;
+
+ /* KEY will be the path, VAL the change. */
+ apr_hash_this(hi, &key, NULL, &val);
+ path = (const char *) key;
+ ideal_change = val;
+
+ /* Now get the change that refers to PATH in the actual
+ changes hash. */
+ change = apr_hash_get(changes, path, APR_HASH_KEY_STRING);
+ if (! change)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "missing expected change for path '%s' in txn_id '%s'",
+ path, txn_id);
+
+ /* Verify that the NODE-REV-ID matches. */
+ if (svn_fs_compare_ids(change->node_rev_id,
+ ideal_change->node_rev_id))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "node revision ids differ in change for key '%s'", txn_id);
+
+ /* Verify that the change KIND matches. */
+ if (change->change_kind != ideal_change->change_kind)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change kinds differ in change for key '%s'", txn_id);
+
+ /* Verify that the change TEXT-MOD bit matches. */
+ if (change->text_mod != ideal_change->text_mod)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change text-mod bits differ in change for key '%s'", txn_id);
+
+ /* Verify that the change PROP-MOD bit matches. */
+ if (change->prop_mod != ideal_change->prop_mod)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "change prop-mod bits differ in change for key '%s'", txn_id);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+changes_fetch(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ int i;
+ int num_txns = sizeof(standard_txns) / sizeof(const char *);
+ struct changes_args args;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-fetch", opts,
+ pool));
+
+ /* First, verify that we can request changes for an arbitrary key
+ without error. */
+ args.fs = fs;
+ args.key = "blahbliggityblah";
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_changes_fetch, &args,
+ FALSE, pool));
+ if ((! args.changes) || (apr_hash_count(args.changes)))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "expected empty changes hash");
+
+ /* Add the standard slew of changes. */
+ SVN_ERR(add_standard_changes(fs, pool));
+
+ /* For each transaction, fetch that transaction's changes, and
+ compare those changes against our ideal compressed changes
+ hash. */
+ for (i = 0; i < num_txns; i++)
+ {
+ const char *txn_id = standard_txns[i];
+ apr_hash_t *ideals;
+
+ /* Get the ideal changes hash. */
+ ideals = get_ideal_changes(txn_id, pool);
+
+ /* Setup the trail baton. */
+ args.fs = fs;
+ args.key = txn_id;
+
+ /* And get those changes via in the internal interface, and
+ verify that they are accurate. */
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_changes_fetch, &args,
+ FALSE, pool));
+ if (! args.changes)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "got no changes for key '%s'", txn_id);
+ if (apr_hash_count(ideals) != apr_hash_count(args.changes))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "unexpected number of changes for key '%s'", txn_id);
+ SVN_ERR(compare_changes(ideals, args.changes, opts, txn_id, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+changes_fetch_ordering(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t youngest_rev = 0;
+ const char *txn_name;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ struct changes_args args;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ apr_hash_index_t *hi;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-changes-fetch-ordering", opts,
+ pool));
+
+ /*** REVISION 1: Make some files and dirs. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "dir1", 0 },
+ { 'a', "file1", "This is the file 'file1'.\n" },
+ { 'a', "dir1/file2", "This is the file 'file2'.\n" },
+ { 'a', "dir1/file3", "This is the file 'file3'.\n" },
+ { 'a', "dir1/file4", "This is the file 'file4'.\n" },
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 5, subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /*** REVISION 2: Delete and add some stuff, non-depth-first. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ /* Don't use subpool, txn_name is used after subpool is cleared */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'd', "file1", "This is the file 'file1'.\n" },
+ { 'd', "dir1/file2", "This is the file 'file2'.\n" },
+ { 'd', "dir1/file3", "This is the file 'file3'.\n" },
+ { 'a', "dir1/file5", "This is the file 'file4'.\n" },
+ { 'a', "dir1/dir2", 0 },
+ { 'd', "dir1", 0 },
+ { 'a', "dir3", 0 },
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 7, subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /*** TEST: We should have only three changes, the deletion of 'file1'
+ the deletion of 'dir1', and the addition of 'dir3'. ***/
+ args.fs = fs;
+ args.key = txn_name;
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_changes_fetch, &args,
+ FALSE, subpool));
+ if ((! args.changes) || (apr_hash_count(args.changes) != 3))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "expected changes");
+ for (hi = apr_hash_first(subpool, args.changes);
+ hi; hi = apr_hash_next(hi))
+ {
+ const void *key;
+ void *val;
+ svn_fs_path_change_t *change;
+
+ /* KEY will be the path, VAL the change. */
+ apr_hash_this(hi, &key, NULL, &val);
+ change = val;
+
+ if ((change->change_kind == svn_fs_path_change_add)
+ && (strcmp(key, "/dir3") == 0))
+ ;
+ else if ((change->change_kind == svn_fs_path_change_delete)
+ && ((strcmp(key, "/dir1") == 0)
+ || (strcmp(key, "/file1") == 0)))
+ ;
+ else
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "got wrong changes");
+ }
+
+ /*** REVISION 3: Do the same stuff as in revision 1. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "dir1", 0 },
+ { 'a', "file1", "This is the file 'file1'.\n" },
+ { 'a', "dir1/file2", "This is the file 'file2'.\n" },
+ { 'a', "dir1/file3", "This is the file 'file3'.\n" },
+ { 'a', "dir1/file4", "This is the file 'file4'.\n" },
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 5, subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /*** REVISION 4: Do the same stuff as in revision 2, but use a copy
+ overwrite of the top directory (instead of a delete) to test
+ that the 'replace' change type works, too. (And add 'dir4'
+ instead of 'dir3', since 'dir3' still exists). ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ /* Don't use subpool, txn_name is used after subpool is cleared */
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 1, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'd', "file1", "This is the file 'file1'.\n" },
+ { 'd', "dir1/file2", "This is the file 'file2'.\n" },
+ { 'd', "dir1/file3", "This is the file 'file3'.\n" },
+ { 'a', "dir1/file5", "This is the file 'file4'.\n" },
+ { 'a', "dir1/dir2", 0 },
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 5, subpool));
+ SVN_ERR(svn_fs_copy(rev_root, "dir1", txn_root, "dir1", subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "dir4", subpool));
+ }
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /*** TEST: We should have only three changes, the deletion of 'file1'
+ the replacement of 'dir1', and the addition of 'dir4'. ***/
+ args.fs = fs;
+ args.key = txn_name;
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_changes_fetch, &args,
+ FALSE, subpool));
+ if ((! args.changes) || (apr_hash_count(args.changes) != 3))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "expected changes");
+ for (hi = apr_hash_first(subpool, args.changes);
+ hi; hi = apr_hash_next(hi))
+ {
+ const void *key;
+ void *val;
+ svn_fs_path_change_t *change;
+
+ /* KEY will be the path, VAL the change. */
+ apr_hash_this(hi, &key, NULL, &val);
+ change = val;
+
+ if ((change->change_kind == svn_fs_path_change_add)
+ && (strcmp(key, "/dir4") == 0))
+ ;
+ else if ((change->change_kind == svn_fs_path_change_replace)
+ && (strcmp(key, "/dir1") == 0))
+ ;
+ else if ((change->change_kind == svn_fs_path_change_delete)
+ && (strcmp(key, "/file1") == 0))
+ ;
+ else
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "got wrong changes");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+changes_bad_sequences(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_error_t *err;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-changes-bad-sequences", opts,
+ pool));
+
+ /* Test changes bogus because a path's node-rev-ID changed
+ unexpectedly. */
+ svn_pool_clear(subpool);
+ {
+ static const char *bogus_changes[][6]
+ /* KEY PATH NODEREVID KIND TEXT PROP */
+ = { { "x", "/foo", "1.0.0", "add", 0 , 0 },
+ { "x", "/foo", "1.0.0", "modify", 0 , "1" },
+ { "x", "/foo", "2.0.0", "modify", "1", "1" } };
+ int num_changes = sizeof(bogus_changes) / sizeof(const char *) / 6;
+ struct changes_args args;
+ int i;
+
+ for (i = 0; i < num_changes; i++)
+ {
+ change_t change;
+
+ /* Set up the current change item. */
+ change.path = bogus_changes[i][1];
+ change.noderev_id = svn_fs_parse_id(bogus_changes[i][2],
+ strlen(bogus_changes[i][2]),
+ subpool);
+ change.kind = string_to_kind(bogus_changes[i][3]);
+ change.text_mod = bogus_changes[i][4] ? 1 : 0;
+ change.prop_mod = bogus_changes[i][5] ? 1 : 0;
+
+ /* Set up transaction baton. */
+ args.fs = fs;
+ args.key = "x";
+ args.change = &change;
+
+ /* Write new changes to the changes table. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_add, &args,
+ TRUE, subpool));
+ }
+
+ /* Now read 'em back, looking for an error. */
+ args.fs = fs;
+ args.key = "x";
+ err = svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch, &args,
+ TRUE, subpool);
+ if (!err)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0,
+ "Expected SVN_ERR_FS_CORRUPT, got no error.");
+ }
+ else if (err->apr_err != SVN_ERR_FS_CORRUPT)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_FS_CORRUPT, got a different error.");
+ }
+ else
+ {
+ svn_error_clear(err);
+ }
+
+ /* Post-test cleanup. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_delete, &args,
+ TRUE, subpool));
+ }
+
+ /* Test changes bogus because there's a change other than an
+ add-type changes on a deleted path. */
+ svn_pool_clear(subpool);
+ {
+ static const char *bogus_changes[][6]
+ /* KEY PATH NODEREVID KIND TEXT PROP */
+ = { { "x", "/foo", "1.0.0", "delete", 0 , 0 },
+ { "x", "/foo", "1.0.0", "modify", "1", 0 } };
+ int num_changes = sizeof(bogus_changes) / sizeof(const char *) / 6;
+ struct changes_args args;
+ int i;
+
+ for (i = 0; i < num_changes; i++)
+ {
+ change_t change;
+
+ /* Set up the current change item. */
+ change.path = bogus_changes[i][1];
+ change.noderev_id = svn_fs_parse_id(bogus_changes[i][2],
+ strlen(bogus_changes[i][2]),
+ subpool);
+ change.kind = string_to_kind(bogus_changes[i][3]);
+ change.text_mod = bogus_changes[i][4] ? 1 : 0;
+ change.prop_mod = bogus_changes[i][5] ? 1 : 0;
+
+ /* Set up transaction baton. */
+ args.fs = fs;
+ args.key = "x";
+ args.change = &change;
+
+ /* Write new changes to the changes table. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_add, &args,
+ TRUE, subpool));
+ }
+
+ /* Now read 'em back, looking for an error. */
+ args.fs = fs;
+ args.key = "x";
+ err = svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch, &args,
+ TRUE, subpool);
+ if (!err)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0,
+ "Expected SVN_ERR_FS_CORRUPT, got no error.");
+ }
+ else if (err->apr_err != SVN_ERR_FS_CORRUPT)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_FS_CORRUPT, got a different error.");
+ }
+ else
+ {
+ svn_error_clear(err);
+ }
+
+ /* Post-test cleanup. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_delete, &args,
+ TRUE, subpool));
+ }
+
+ /* Test changes bogus because there's an add on a path that's got
+ previous non-delete changes on it. */
+ svn_pool_clear(subpool);
+ {
+ static const char *bogus_changes[][6]
+ /* KEY PATH NODEREVID KIND TEXT PROP */
+ = { { "x", "/foo", "1.0.0", "modify", "1", 0 },
+ { "x", "/foo", "1.0.0", "add", "1", 0 } };
+ int num_changes = sizeof(bogus_changes) / sizeof(const char *) / 6;
+ struct changes_args args;
+ int i;
+
+ for (i = 0; i < num_changes; i++)
+ {
+ change_t change;
+
+ /* Set up the current change item. */
+ change.path = bogus_changes[i][1];
+ change.noderev_id = svn_fs_parse_id(bogus_changes[i][2],
+ strlen(bogus_changes[i][2]),
+ subpool);
+ change.kind = string_to_kind(bogus_changes[i][3]);
+ change.text_mod = bogus_changes[i][4] ? 1 : 0;
+ change.prop_mod = bogus_changes[i][5] ? 1 : 0;
+
+ /* Set up transaction baton. */
+ args.fs = fs;
+ args.key = "x";
+ args.change = &change;
+
+ /* Write new changes to the changes table. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_add, &args,
+ TRUE, subpool));
+ }
+
+ /* Now read 'em back, looking for an error. */
+ args.fs = fs;
+ args.key = "x";
+ err = svn_fs_base__retry_txn(args.fs, txn_body_changes_fetch, &args,
+ TRUE, subpool);
+ if (!err)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0,
+ "Expected SVN_ERR_FS_CORRUPT, got no error.");
+ }
+ else if (err->apr_err != SVN_ERR_FS_CORRUPT)
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_FS_CORRUPT, got a different error.");
+ }
+ else
+ {
+ svn_error_clear(err);
+ }
+
+ /* Post-test cleanup. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_changes_delete, &args,
+ TRUE, subpool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(changes_add,
+ "add changes to the changes table"),
+ SVN_TEST_OPTS_PASS(changes_fetch_raw,
+ "fetch raw changes from the changes table"),
+ SVN_TEST_OPTS_PASS(changes_delete,
+ "delete changes from the changes table"),
+ SVN_TEST_OPTS_PASS(changes_fetch,
+ "fetch compressed changes from the changes table"),
+ SVN_TEST_OPTS_PASS(changes_fetch_ordering,
+ "verify ordered-ness of fetched compressed changes"),
+ SVN_TEST_OPTS_PASS(changes_bad_sequences,
+ "verify that bad change sequences raise errors"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_base/fs-base-test.c b/subversion/tests/libsvn_fs_base/fs-base-test.c
new file mode 100644
index 0000000..20d4d63
--- /dev/null
+++ b/subversion/tests/libsvn_fs_base/fs-base-test.c
@@ -0,0 +1,1552 @@
+/* fs-test.c --- tests for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_pools.h"
+#include "svn_time.h"
+#include "svn_string.h"
+#include "svn_fs.h"
+
+#include "../svn_test_fs.h"
+
+#include "../../libsvn_fs_base/id.h"
+#include "../../libsvn_fs_base/trail.h"
+#include "../../libsvn_fs_base/bdb/txn-table.h"
+#include "../../libsvn_fs_base/bdb/nodes-table.h"
+#include "../../libsvn_fs_base/key-gen.h"
+
+#include "private/svn_fs_util.h"
+#include "../../libsvn_delta/delta.h"
+
+#define SET_STR(ps, s) ((ps)->data = (s), (ps)->len = strlen(s))
+
+
+/*-----------------------------------------------------------------*/
+
+/** The actual fs-tests called by `make check` **/
+
+/* Create a filesystem. */
+static svn_error_t *
+create_berkeley_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+
+ /* Create and close a repository. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-create-berkeley", opts,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Generic Berkeley DB error handler function. */
+static void
+berkeley_error_handler(const char *errpfx, char *msg)
+{
+ fprintf(stderr, "%s%s\n", errpfx ? errpfx : "", msg);
+}
+
+
+/* Open an existing filesystem. */
+static svn_error_t *
+open_berkeley_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs, *fs2;
+
+ /* Create and close a repository (using fs). */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-open-berkeley", opts,
+ pool));
+
+ /* Create a different fs object, and use it to re-open the
+ repository again. */
+ SVN_ERR(svn_test__fs_new(&fs2, pool));
+ SVN_ERR(svn_fs_open_berkeley(fs2, "test-repo-open-berkeley"));
+
+ /* Provide a handler for Berkeley DB error messages. */
+ SVN_ERR(svn_fs_set_berkeley_errcall(fs2, berkeley_error_handler));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Set *PRESENT to true if entry NAME is present in directory PATH
+ under ROOT, else set *PRESENT to false. */
+static svn_error_t *
+check_entry(svn_fs_root_t *root,
+ const char *path,
+ const char *name,
+ svn_boolean_t *present,
+ apr_pool_t *pool)
+{
+ apr_hash_t *entries;
+ svn_fs_dirent_t *ent;
+
+ SVN_ERR(svn_fs_dir_entries(&entries, root, path, pool));
+ ent = apr_hash_get(entries, name, APR_HASH_KEY_STRING);
+
+ if (ent)
+ *present = TRUE;
+ else
+ *present = FALSE;
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return an error if entry NAME is absent in directory PATH under ROOT. */
+static svn_error_t *
+check_entry_present(svn_fs_root_t *root, const char *path,
+ const char *name, apr_pool_t *pool)
+{
+ svn_boolean_t present = FALSE;
+ SVN_ERR(check_entry(root, path, name, &present, pool));
+
+ if (! present)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "entry \"%s\" absent when it should be present", name);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return an error if entry NAME is present in directory PATH under ROOT. */
+static svn_error_t *
+check_entry_absent(svn_fs_root_t *root, const char *path,
+ const char *name, apr_pool_t *pool)
+{
+ svn_boolean_t present = TRUE;
+ SVN_ERR(check_entry(root, path, name, &present, pool));
+
+ if (present)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "entry \"%s\" present when it should be absent", name);
+
+ return SVN_NO_ERROR;
+}
+
+
+struct check_id_args
+{
+ svn_fs_t *fs;
+ const svn_fs_id_t *id;
+ svn_boolean_t present;
+};
+
+
+static svn_error_t *
+txn_body_check_id(void *baton, trail_t *trail)
+{
+ struct check_id_args *args = baton;
+ node_revision_t *noderev;
+ svn_error_t *err;
+
+ err = svn_fs_bdb__get_node_revision(&noderev, args->fs, args->id,
+ trail, trail->pool);
+
+ if (err && (err->apr_err == SVN_ERR_FS_ID_NOT_FOUND))
+ args->present = FALSE;
+ else if (! err)
+ args->present = TRUE;
+ else
+ {
+ svn_string_t *id_str = svn_fs_unparse_id(args->id, trail->pool);
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, err,
+ "error looking for node revision id \"%s\"", id_str->data);
+ }
+ svn_error_clear(err);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Set *PRESENT to true if node revision ID is present in filesystem
+ FS, else set *PRESENT to false. */
+static svn_error_t *
+check_id(svn_fs_t *fs, const svn_fs_id_t *id, svn_boolean_t *present,
+ apr_pool_t *pool)
+{
+ struct check_id_args args;
+
+ args.id = id;
+ args.fs = fs;
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_check_id, &args, TRUE, pool));
+
+ if (args.present)
+ *present = TRUE;
+ else
+ *present = FALSE;
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return error if node revision ID is not present in FS. */
+static svn_error_t *
+check_id_present(svn_fs_t *fs, const svn_fs_id_t *id, apr_pool_t *pool)
+{
+ svn_boolean_t present = FALSE;
+ SVN_ERR(check_id(fs, id, &present, pool));
+
+ if (! present)
+ {
+ svn_string_t *id_str = svn_fs_unparse_id(id, pool);
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "node revision id \"%s\" absent when should be present",
+ id_str->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Return error if node revision ID is present in FS. */
+static svn_error_t *
+check_id_absent(svn_fs_t *fs, const svn_fs_id_t *id, apr_pool_t *pool)
+{
+ svn_boolean_t present = TRUE;
+ SVN_ERR(check_id(fs, id, &present, pool));
+
+ if (present)
+ {
+ svn_string_t *id_str = svn_fs_unparse_id(id, pool);
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "node revision id \"%s\" present when should be absent",
+ id_str->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that aborting a Subversion transaction works.
+
+ NOTE: This function tests internal filesystem interfaces, not just
+ the public filesystem interface. */
+static svn_error_t *
+abort_txn(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn1, *txn2;
+ svn_fs_root_t *txn1_root, *txn2_root;
+ const char *txn1_name, *txn2_name;
+
+ /* Prepare two txns to receive the Greek tree. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-abort-txn", opts,
+ pool));
+ SVN_ERR(svn_fs_begin_txn(&txn1, fs, 0, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn2, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn1_root, txn1, pool));
+ SVN_ERR(svn_fs_txn_root(&txn2_root, txn2, pool));
+
+ /* Save their names for later. */
+ SVN_ERR(svn_fs_txn_name(&txn1_name, txn1, pool));
+ SVN_ERR(svn_fs_txn_name(&txn2_name, txn2, pool));
+
+ /* Create greek trees in them. */
+ SVN_ERR(svn_test__create_greek_tree(txn1_root, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn2_root, pool));
+
+ /* The test is to abort txn2, while leaving txn1.
+ *
+ * After we abort txn2, we make sure that a) all of its nodes
+ * disappeared from the database, and b) none of txn1's nodes
+ * disappeared.
+ *
+ * Finally, we create a third txn, and check that the name it got is
+ * different from the names of txn1 and txn2.
+ */
+
+ {
+ /* Yes, I really am this paranoid. */
+
+ /* IDs for every file in the standard Greek Tree. */
+ const svn_fs_id_t
+ *t1_root_id, *t2_root_id,
+ *t1_iota_id, *t2_iota_id,
+ *t1_A_id, *t2_A_id,
+ *t1_mu_id, *t2_mu_id,
+ *t1_B_id, *t2_B_id,
+ *t1_lambda_id, *t2_lambda_id,
+ *t1_E_id, *t2_E_id,
+ *t1_alpha_id, *t2_alpha_id,
+ *t1_beta_id, *t2_beta_id,
+ *t1_F_id, *t2_F_id,
+ *t1_C_id, *t2_C_id,
+ *t1_D_id, *t2_D_id,
+ *t1_gamma_id, *t2_gamma_id,
+ *t1_H_id, *t2_H_id,
+ *t1_chi_id, *t2_chi_id,
+ *t1_psi_id, *t2_psi_id,
+ *t1_omega_id, *t2_omega_id,
+ *t1_G_id, *t2_G_id,
+ *t1_pi_id, *t2_pi_id,
+ *t1_rho_id, *t2_rho_id,
+ *t1_tau_id, *t2_tau_id;
+
+ SVN_ERR(svn_fs_node_id(&t1_root_id, txn1_root, "", pool));
+ SVN_ERR(svn_fs_node_id(&t2_root_id, txn2_root, "", pool));
+ SVN_ERR(svn_fs_node_id(&t1_iota_id, txn1_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&t2_iota_id, txn2_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&t1_A_id, txn1_root, "/A", pool));
+ SVN_ERR(svn_fs_node_id(&t2_A_id, txn2_root, "/A", pool));
+ SVN_ERR(svn_fs_node_id(&t1_mu_id, txn1_root, "/A/mu", pool));
+ SVN_ERR(svn_fs_node_id(&t2_mu_id, txn2_root, "/A/mu", pool));
+ SVN_ERR(svn_fs_node_id(&t1_B_id, txn1_root, "/A/B", pool));
+ SVN_ERR(svn_fs_node_id(&t2_B_id, txn2_root, "/A/B", pool));
+ SVN_ERR(svn_fs_node_id(&t1_lambda_id, txn1_root, "/A/B/lambda", pool));
+ SVN_ERR(svn_fs_node_id(&t2_lambda_id, txn2_root, "/A/B/lambda", pool));
+ SVN_ERR(svn_fs_node_id(&t1_E_id, txn1_root, "/A/B/E", pool));
+ SVN_ERR(svn_fs_node_id(&t2_E_id, txn2_root, "/A/B/E", pool));
+ SVN_ERR(svn_fs_node_id(&t1_alpha_id, txn1_root, "/A/B/E/alpha", pool));
+ SVN_ERR(svn_fs_node_id(&t2_alpha_id, txn2_root, "/A/B/E/alpha", pool));
+ SVN_ERR(svn_fs_node_id(&t1_beta_id, txn1_root, "/A/B/E/beta", pool));
+ SVN_ERR(svn_fs_node_id(&t2_beta_id, txn2_root, "/A/B/E/beta", pool));
+ SVN_ERR(svn_fs_node_id(&t1_F_id, txn1_root, "/A/B/F", pool));
+ SVN_ERR(svn_fs_node_id(&t2_F_id, txn2_root, "/A/B/F", pool));
+ SVN_ERR(svn_fs_node_id(&t1_C_id, txn1_root, "/A/C", pool));
+ SVN_ERR(svn_fs_node_id(&t2_C_id, txn2_root, "/A/C", pool));
+ SVN_ERR(svn_fs_node_id(&t1_D_id, txn1_root, "/A/D", pool));
+ SVN_ERR(svn_fs_node_id(&t2_D_id, txn2_root, "/A/D", pool));
+ SVN_ERR(svn_fs_node_id(&t1_gamma_id, txn1_root, "/A/D/gamma", pool));
+ SVN_ERR(svn_fs_node_id(&t2_gamma_id, txn2_root, "/A/D/gamma", pool));
+ SVN_ERR(svn_fs_node_id(&t1_H_id, txn1_root, "/A/D/H", pool));
+ SVN_ERR(svn_fs_node_id(&t2_H_id, txn2_root, "/A/D/H", pool));
+ SVN_ERR(svn_fs_node_id(&t1_chi_id, txn1_root, "/A/D/H/chi", pool));
+ SVN_ERR(svn_fs_node_id(&t2_chi_id, txn2_root, "/A/D/H/chi", pool));
+ SVN_ERR(svn_fs_node_id(&t1_psi_id, txn1_root, "/A/D/H/psi", pool));
+ SVN_ERR(svn_fs_node_id(&t2_psi_id, txn2_root, "/A/D/H/psi", pool));
+ SVN_ERR(svn_fs_node_id(&t1_omega_id, txn1_root, "/A/D/H/omega", pool));
+ SVN_ERR(svn_fs_node_id(&t2_omega_id, txn2_root, "/A/D/H/omega", pool));
+ SVN_ERR(svn_fs_node_id(&t1_G_id, txn1_root, "/A/D/G", pool));
+ SVN_ERR(svn_fs_node_id(&t2_G_id, txn2_root, "/A/D/G", pool));
+ SVN_ERR(svn_fs_node_id(&t1_pi_id, txn1_root, "/A/D/G/pi", pool));
+ SVN_ERR(svn_fs_node_id(&t2_pi_id, txn2_root, "/A/D/G/pi", pool));
+ SVN_ERR(svn_fs_node_id(&t1_rho_id, txn1_root, "/A/D/G/rho", pool));
+ SVN_ERR(svn_fs_node_id(&t2_rho_id, txn2_root, "/A/D/G/rho", pool));
+ SVN_ERR(svn_fs_node_id(&t1_tau_id, txn1_root, "/A/D/G/tau", pool));
+ SVN_ERR(svn_fs_node_id(&t2_tau_id, txn2_root, "/A/D/G/tau", pool));
+
+ /* Abort just txn2. */
+ SVN_ERR(svn_fs_abort_txn(txn2, pool));
+
+ /* Now test that all the nodes in txn2 at the time of the abort
+ * are gone, but all of the ones in txn1 are still there.
+ */
+
+ /* Check that every node rev in t2 has vanished from the fs. */
+ SVN_ERR(check_id_absent(fs, t2_root_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_iota_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_A_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_mu_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_B_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_lambda_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_E_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_alpha_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_beta_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_F_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_C_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_D_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_gamma_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_H_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_chi_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_psi_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_omega_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_G_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_pi_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_rho_id, pool));
+ SVN_ERR(check_id_absent(fs, t2_tau_id, pool));
+
+ /* Check that every node rev in t1 is still in the fs. */
+ SVN_ERR(check_id_present(fs, t1_root_id, pool));
+ SVN_ERR(check_id_present(fs, t1_iota_id, pool));
+ SVN_ERR(check_id_present(fs, t1_A_id, pool));
+ SVN_ERR(check_id_present(fs, t1_mu_id, pool));
+ SVN_ERR(check_id_present(fs, t1_B_id, pool));
+ SVN_ERR(check_id_present(fs, t1_lambda_id, pool));
+ SVN_ERR(check_id_present(fs, t1_E_id, pool));
+ SVN_ERR(check_id_present(fs, t1_alpha_id, pool));
+ SVN_ERR(check_id_present(fs, t1_beta_id, pool));
+ SVN_ERR(check_id_present(fs, t1_F_id, pool));
+ SVN_ERR(check_id_present(fs, t1_C_id, pool));
+ SVN_ERR(check_id_present(fs, t1_D_id, pool));
+ SVN_ERR(check_id_present(fs, t1_gamma_id, pool));
+ SVN_ERR(check_id_present(fs, t1_H_id, pool));
+ SVN_ERR(check_id_present(fs, t1_chi_id, pool));
+ SVN_ERR(check_id_present(fs, t1_psi_id, pool));
+ SVN_ERR(check_id_present(fs, t1_omega_id, pool));
+ SVN_ERR(check_id_present(fs, t1_G_id, pool));
+ SVN_ERR(check_id_present(fs, t1_pi_id, pool));
+ SVN_ERR(check_id_present(fs, t1_rho_id, pool));
+ SVN_ERR(check_id_present(fs, t1_tau_id, pool));
+ }
+
+ /* Test that txn2 itself is gone, by trying to open it. */
+ {
+ svn_fs_txn_t *txn2_again;
+ svn_error_t *err;
+
+ err = svn_fs_open_txn(&txn2_again, fs, txn2_name, pool);
+ if (err && (err->apr_err != SVN_ERR_FS_NO_SUCH_TRANSACTION))
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, err,
+ "opening non-existent txn got wrong error");
+ }
+ else if (! err)
+ {
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "opening non-existent txn failed to get error");
+ }
+ svn_error_clear(err);
+ }
+
+ /* Test that txn names are not recycled, by opening a new txn. */
+ {
+ svn_fs_txn_t *txn3;
+ const char *txn3_name;
+
+ SVN_ERR(svn_fs_begin_txn(&txn3, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_name(&txn3_name, txn3, pool));
+
+ if ((strcmp(txn3_name, txn2_name) == 0)
+ || (strcmp(txn3_name, txn1_name) == 0))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "txn name \"%s\" was recycled", txn3_name);
+ }
+ }
+
+ /* Test that aborting a txn that's already committed fails. */
+ {
+ svn_fs_txn_t *txn4;
+ const char *txn4_name;
+ svn_revnum_t new_rev;
+ const char *conflict;
+ svn_error_t *err;
+
+ SVN_ERR(svn_fs_begin_txn(&txn4, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_name(&txn4_name, txn4, pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &new_rev, txn4, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(new_rev));
+ err = svn_fs_abort_txn(txn4, pool);
+ if (! err)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, NULL,
+ "expected error trying to abort a committed txn; got none");
+ else if (err->apr_err != SVN_ERR_FS_TRANSACTION_NOT_MUTABLE)
+ return svn_error_create
+ (SVN_ERR_FS_GENERAL, err,
+ "got an unexpected error trying to abort a committed txn");
+ else
+ svn_error_clear(err);
+ }
+ return SVN_NO_ERROR;
+}
+
+
+/* This tests deleting of mutable nodes. We build a tree in a
+ * transaction, then try to delete various items in the tree. We
+ * never commit the tree, so every entry being deleted points to a
+ * mutable node.
+ *
+ * ### todo: this test was written before commits worked. It might
+ * now be worthwhile to combine it with delete().
+ */
+static svn_error_t *
+delete_mutables(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_error_t *err;
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-del-from-dir", opts,
+ pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* Baby, it's time to test like you've never tested before. We do
+ * the following, in this order:
+ *
+ * 1. Delete a single file somewhere, succeed.
+ * 2. Delete two files of three, then make sure the third remains.
+ * 3. Delete the third and last file.
+ * 4. Try again to delete the dir, succeed.
+ * 5. Delete one of the natively empty dirs, succeed.
+ * 6. Try to delete root, fail.
+ * 7. Try to delete a top-level file, succeed.
+ *
+ * Specifically, that's:
+ *
+ * 1. Delete A/D/gamma.
+ * 2. Delete A/D/G/pi, A/D/G/rho.
+ * 3. Delete A/D/G/tau.
+ * 4. Try again to delete A/D/G, succeed.
+ * 5. Delete A/C.
+ * 6. Try to delete /, fail.
+ * 7. Try to delete iota, succeed.
+ *
+ * Before and after each deletion or attempted deletion, we probe
+ * the affected directory, to make sure everything is as it should
+ * be.
+ */
+
+ /* 1 */
+ {
+ const svn_fs_id_t *gamma_id;
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(check_id_absent(fs, gamma_id, pool));
+ }
+
+ /* 2 */
+ {
+ const svn_fs_id_t *pi_id, *rho_id, *tau_id;
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "A/D/G/pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "A/D/G/rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "A/D/G/tau", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(check_id_present(fs, pi_id, pool));
+ SVN_ERR(check_id_present(fs, rho_id, pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/pi", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(check_id_absent(fs, pi_id, pool));
+ SVN_ERR(check_id_present(fs, rho_id, pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/rho", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(check_id_absent(fs, pi_id, pool));
+ SVN_ERR(check_id_absent(fs, rho_id, pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+ }
+
+ /* 3 */
+ {
+ const svn_fs_id_t *tau_id;
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "A/D/G/tau", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G/tau", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(check_id_absent(fs, tau_id, pool));
+ }
+
+ /* 4 */
+ {
+ const svn_fs_id_t *G_id;
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "A/D/G", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(check_id_present(fs, G_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/G", pool)); /* succeed */
+
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "G", pool));
+ SVN_ERR(check_id_absent(fs, G_id, pool));
+ }
+
+ /* 5 */
+ {
+ const svn_fs_id_t *C_id;
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "A/C", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(check_id_present(fs, C_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "A/C", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "A", "C", pool));
+ SVN_ERR(check_id_absent(fs, C_id, pool));
+ }
+
+ /* 6 */
+ {
+ const svn_fs_id_t *root_id;
+ SVN_ERR(svn_fs_node_id(&root_id, txn_root, "", pool));
+
+ err = svn_fs_delete(txn_root, "", pool);
+
+ if (err && (err->apr_err != SVN_ERR_FS_ROOT_DIR))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, err,
+ "deleting root directory got wrong error");
+ }
+ else if (! err)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "deleting root directory failed to get error");
+ }
+ svn_error_clear(err);
+
+ SVN_ERR(check_id_present(fs, root_id, pool));
+ }
+
+ /* 7 */
+ {
+ const svn_fs_id_t *iota_id;
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+ SVN_ERR(check_id_present(fs, iota_id, pool));
+
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ SVN_ERR(check_id_absent(fs, iota_id, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* This tests deleting in general.
+ *
+ * ### todo: this test was written after (and independently of)
+ * delete_mutables(). It might be worthwhile to combine them.
+ */
+static svn_error_t *
+delete(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t new_rev;
+
+ /* This function tests 5 cases:
+ *
+ * 1. Delete mutable file.
+ * 2. Delete mutable directory.
+ * 3. Delete mutable directory with immutable nodes.
+ * 4. Delete immutable file.
+ * 5. Delete immutable directory.
+ */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-del-tree", opts,
+ pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* 1. Delete mutable file. */
+ {
+ const svn_fs_id_t *iota_id, *gamma_id;
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/C", 0 },
+ { "A/B/F", 0 },
+ { "A/D", 0 },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+
+ /* Check nodes revision ID is gone. */
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+ SVN_ERR(check_id_present(fs, iota_id, pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+
+ /* Try deleting mutable files. */
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(check_id_absent(fs, iota_id, pool));
+ SVN_ERR(check_id_absent(fs, gamma_id, pool));
+
+ /* Validate the tree. */
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 18, pool));
+ }
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 2. Delete mutable directory. */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id;
+
+ /* Check nodes revision ID is gone. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+
+ /* Try deleting a mutable empty dir. */
+ SVN_ERR(svn_fs_delete(txn_root, "A/C", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/B/F", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A", "C", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/B", "F", pool));
+ SVN_ERR(check_id_absent(fs, C_id, pool));
+ SVN_ERR(check_id_absent(fs, F_id, pool));
+
+ /* Now delete a mutable non-empty dir. */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+ SVN_ERR(check_id_absent(fs, A_id, pool));
+ SVN_ERR(check_id_absent(fs, mu_id, pool));
+ SVN_ERR(check_id_absent(fs, B_id, pool));
+ SVN_ERR(check_id_absent(fs, lambda_id, pool));
+ SVN_ERR(check_id_absent(fs, E_id, pool));
+ SVN_ERR(check_id_absent(fs, alpha_id, pool));
+ SVN_ERR(check_id_absent(fs, beta_id, pool));
+ SVN_ERR(check_id_absent(fs, D_id, pool));
+ SVN_ERR(check_id_absent(fs, gamma_id, pool));
+ SVN_ERR(check_id_absent(fs, H_id, pool));
+ SVN_ERR(check_id_absent(fs, chi_id, pool));
+ SVN_ERR(check_id_absent(fs, psi_id, pool));
+ SVN_ERR(check_id_absent(fs, omega_id, pool));
+ SVN_ERR(check_id_absent(fs, G_id, pool));
+ SVN_ERR(check_id_absent(fs, pi_id, pool));
+ SVN_ERR(check_id_absent(fs, rho_id, pool));
+ SVN_ERR(check_id_absent(fs, tau_id, pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" } };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 3. Delete mutable directory with immutable nodes. */
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+
+ /* Commit the greek tree. */
+ SVN_ERR(svn_fs_commit_txn(NULL, &new_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(new_rev));
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id, *sigma_id;
+
+ /* Create A/D/G/sigma. This makes all components of A/D/G
+ mutable. */
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G/sigma", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/sigma",
+ "This is another file 'sigma'.\n", pool));
+
+ /* Check that mutable node-revision-IDs are removed and immutable
+ ones still exist. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+ SVN_ERR(svn_fs_node_id(&sigma_id, txn_root, "/A/D/G/sigma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "sigma", pool));
+
+ /* Delete "A" */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+ SVN_ERR(check_id_absent(fs, A_id, pool));
+ SVN_ERR(check_id_present(fs, mu_id, pool));
+ SVN_ERR(check_id_present(fs, B_id, pool));
+ SVN_ERR(check_id_present(fs, lambda_id, pool));
+ SVN_ERR(check_id_present(fs, E_id, pool));
+ SVN_ERR(check_id_present(fs, alpha_id, pool));
+ SVN_ERR(check_id_present(fs, beta_id, pool));
+ SVN_ERR(check_id_present(fs, F_id, pool));
+ SVN_ERR(check_id_present(fs, C_id, pool));
+ SVN_ERR(check_id_absent(fs, D_id, pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+ SVN_ERR(check_id_present(fs, H_id, pool));
+ SVN_ERR(check_id_present(fs, chi_id, pool));
+ SVN_ERR(check_id_present(fs, psi_id, pool));
+ SVN_ERR(check_id_present(fs, omega_id, pool));
+ SVN_ERR(check_id_absent(fs, G_id, pool));
+ SVN_ERR(check_id_present(fs, pi_id, pool));
+ SVN_ERR(check_id_present(fs, rho_id, pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+ SVN_ERR(check_id_absent(fs, sigma_id, pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" }
+ };
+
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 4. Delete immutable file. */
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *iota_id, *gamma_id;
+
+ /* Check nodes revision ID is present. */
+ SVN_ERR(svn_fs_node_id(&iota_id, txn_root, "iota", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(check_id_present(fs, iota_id, pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+
+ /* Delete some files. */
+ SVN_ERR(svn_fs_delete(txn_root, "iota", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D/gamma", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "iota", pool));
+ SVN_ERR(check_entry_absent(txn_root, "A/D", "iota", pool));
+ SVN_ERR(check_id_present(fs, iota_id, pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 18, pool));
+ }
+ }
+
+ /* Abort transaction. */
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* 5. Delete immutable directory. */
+
+ /* Create new transaction. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ {
+ const svn_fs_id_t *A_id, *mu_id, *B_id, *lambda_id, *E_id, *alpha_id,
+ *beta_id, *F_id, *C_id, *D_id, *gamma_id, *H_id, *chi_id,
+ *psi_id, *omega_id, *G_id, *pi_id, *rho_id, *tau_id;
+
+ /* Check nodes revision ID is present. */
+ SVN_ERR(svn_fs_node_id(&A_id, txn_root, "/A", pool));
+ SVN_ERR(check_entry_present(txn_root, "", "A", pool));
+ SVN_ERR(svn_fs_node_id(&mu_id, txn_root, "/A/mu", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "mu", pool));
+ SVN_ERR(svn_fs_node_id(&B_id, txn_root, "/A/B", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "B", pool));
+ SVN_ERR(svn_fs_node_id(&lambda_id, txn_root, "/A/B/lambda", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "lambda", pool));
+ SVN_ERR(svn_fs_node_id(&E_id, txn_root, "/A/B/E", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "E", pool));
+ SVN_ERR(svn_fs_node_id(&alpha_id, txn_root, "/A/B/E/alpha", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "alpha", pool));
+ SVN_ERR(svn_fs_node_id(&beta_id, txn_root, "/A/B/E/beta", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B/E", "beta", pool));
+ SVN_ERR(svn_fs_node_id(&F_id, txn_root, "/A/B/F", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/B", "F", pool));
+ SVN_ERR(svn_fs_node_id(&C_id, txn_root, "/A/C", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "C", pool));
+ SVN_ERR(svn_fs_node_id(&D_id, txn_root, "/A/D", pool));
+ SVN_ERR(check_entry_present(txn_root, "A", "D", pool));
+ SVN_ERR(svn_fs_node_id(&gamma_id, txn_root, "/A/D/gamma", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "gamma", pool));
+ SVN_ERR(svn_fs_node_id(&H_id, txn_root, "/A/D/H", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "H", pool));
+ SVN_ERR(svn_fs_node_id(&chi_id, txn_root, "/A/D/H/chi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "chi", pool));
+ SVN_ERR(svn_fs_node_id(&psi_id, txn_root, "/A/D/H/psi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "psi", pool));
+ SVN_ERR(svn_fs_node_id(&omega_id, txn_root, "/A/D/H/omega", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/H", "omega", pool));
+ SVN_ERR(svn_fs_node_id(&G_id, txn_root, "/A/D/G", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D", "G", pool));
+ SVN_ERR(svn_fs_node_id(&pi_id, txn_root, "/A/D/G/pi", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "pi", pool));
+ SVN_ERR(svn_fs_node_id(&rho_id, txn_root, "/A/D/G/rho", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "rho", pool));
+ SVN_ERR(svn_fs_node_id(&tau_id, txn_root, "/A/D/G/tau", pool));
+ SVN_ERR(check_entry_present(txn_root, "A/D/G", "tau", pool));
+
+ /* Delete "A" */
+ SVN_ERR(svn_fs_delete(txn_root, "A", pool));
+ SVN_ERR(check_entry_absent(txn_root, "", "A", pool));
+ SVN_ERR(check_id_present(fs, A_id, pool));
+ SVN_ERR(check_id_present(fs, mu_id, pool));
+ SVN_ERR(check_id_present(fs, B_id, pool));
+ SVN_ERR(check_id_present(fs, lambda_id, pool));
+ SVN_ERR(check_id_present(fs, E_id, pool));
+ SVN_ERR(check_id_present(fs, alpha_id, pool));
+ SVN_ERR(check_id_present(fs, beta_id, pool));
+ SVN_ERR(check_id_present(fs, F_id, pool));
+ SVN_ERR(check_id_present(fs, C_id, pool));
+ SVN_ERR(check_id_present(fs, D_id, pool));
+ SVN_ERR(check_id_present(fs, gamma_id, pool));
+ SVN_ERR(check_id_present(fs, H_id, pool));
+ SVN_ERR(check_id_present(fs, chi_id, pool));
+ SVN_ERR(check_id_present(fs, psi_id, pool));
+ SVN_ERR(check_id_present(fs, omega_id, pool));
+ SVN_ERR(check_id_present(fs, G_id, pool));
+ SVN_ERR(check_id_present(fs, pi_id, pool));
+ SVN_ERR(check_id_present(fs, rho_id, pool));
+ SVN_ERR(check_id_present(fs, tau_id, pool));
+
+ /* Validate the tree. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" }
+ };
+ SVN_ERR(svn_test__validate_tree(txn_root, expected_entries, 1, pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+canonicalize_abspath(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+ const char *paths[21][2] =
+ /* in out */
+ { { NULL, NULL },
+ { "", "/" },
+ { "/", "/" },
+ { "//", "/" },
+ { "///", "/" },
+ { "foo", "/foo" },
+ { "foo/", "/foo" },
+ { "foo//", "/foo" },
+ { "/foo", "/foo" },
+ { "/foo/", "/foo" },
+ { "/foo//", "/foo" },
+ { "//foo//", "/foo" },
+ { "foo/bar", "/foo/bar" },
+ { "foo/bar/", "/foo/bar" },
+ { "foo/bar//", "/foo/bar" },
+ { "foo//bar", "/foo/bar" },
+ { "foo//bar/", "/foo/bar" },
+ { "foo//bar//", "/foo/bar" },
+ { "/foo//bar//", "/foo/bar" },
+ { "//foo//bar//", "/foo/bar" },
+ { "///foo///bar///baz///", "/foo/bar/baz" },
+ };
+
+ for (i = 0; i < (sizeof(paths) / 2 / sizeof(const char *)); i++)
+ {
+ const char *input = paths[i][0];
+ const char *output = paths[i][1];
+ const char *actual = svn_fs__canonicalize_abspath(input, pool);
+
+ if ((! output) && (! actual))
+ continue;
+ if ((! output) && actual)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected NULL path; got '%s'", actual);
+ if (output && (! actual))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s' path; got NULL", output);
+ if (strcmp(output, actual))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected '%s' path; got '%s'",
+ output, actual);
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+create_within_copy(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *spool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-create-within-copy", opts,
+ pool));
+
+ /*** Revision 1: Create the greek tree in revision. ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 2: Copy A/D to A/D3 ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D", txn_root, "A/D3", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 3: Copy A/D/G to A/D/G2 ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G", txn_root, "A/D/G2", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 4: Copy A/D to A/D2 and create up and I in the existing
+ A/D/G2, in the new A/D2, and in the nested, new A/D2/G2 ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D", txn_root, "A/D2", spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D/G2/I", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D/G2/up", spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D2/I", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D2/up", spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D2/G2/I", spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D2/G2/up", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /*** Revision 5: Create A/D3/down and A/D3/J ***/
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/D3/down", spool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/D3/J", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ {
+ /* New items should have same CopyID as their parent */
+ const char *pathgroup[4][3] =
+ {
+ { "A/D/G2",
+ "A/D/G2/I",
+ "A/D/G2/up" },
+ { "A/D2",
+ "A/D2/I",
+ "A/D2/up" },
+ { "A/D2/G2",
+ "A/D2/G2/I",
+ "A/D2/G2/up" },
+ { "A/D3",
+ "A/D3/down",
+ "A/D3/J" }
+ };
+ int i;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+
+ for (i = 0; i < 4; i++)
+ {
+ const svn_fs_id_t *lead_id;
+ const char *lead_copy_id;
+ int j;
+
+ /* Get the FSIdentifier for the first path in each group... */
+ SVN_ERR(svn_fs_node_id(&lead_id, rev_root, pathgroup[i][0], spool));
+ lead_copy_id = svn_fs_base__id_copy_id(lead_id);
+
+ for (j = 1; j < 3; j++)
+ {
+ const svn_fs_id_t *id;
+ const char *copy_id;
+
+ /* ... and make sure the other members of the group have
+ the same copy_id component as the 'lead' member. */
+
+ SVN_ERR(svn_fs_node_id(&id, rev_root, pathgroup[i][j], spool));
+ copy_id = svn_fs_base__id_copy_id(id);
+
+ if (strcmp(copy_id, lead_copy_id) != 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "'%s' id: expected copy_id '%s'; got copy_id '%s'",
+ pathgroup[i][j], lead_copy_id, copy_id);
+ }
+ }
+ svn_pool_clear(spool);
+ }
+
+ svn_pool_destroy(spool);
+ return SVN_NO_ERROR;
+}
+
+
+/* Test the skip delta support by commiting so many changes to a file
+ * that some of its older revisions become reachable by skip deltas,
+ * then try retrieving those revisions.
+ */
+static svn_error_t *
+skip_deltas(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_revnum_t youngest_rev = 0;
+ const char *one_line = "This is a line in file 'f'.\n";
+ svn_stringbuf_t *f = svn_stringbuf_create(one_line, pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-skip-deltas", opts,
+ pool));
+
+ /* Create the file. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "f", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "f", f->data, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ svn_pool_clear(subpool);
+
+ /* Now, commit changes to the file 128 times. */
+ while (youngest_rev <= 128)
+ {
+ /* Append another line to the ever-growing file contents. */
+ svn_stringbuf_appendcstr(f, one_line);
+
+ /* Commit the new contents. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "f", f->data, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ SVN_ERR(svn_fs_deltify_revision(fs, youngest_rev, subpool));
+ svn_pool_clear(subpool);
+ }
+
+ /* Now go back and check revision 1. */
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, 1, pool));
+ SVN_ERR(svn_test__get_file_contents(rev_root, "f", &f, pool));
+ if (strcmp(one_line, f->data) != 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Wrong contents. Expected:\n '%s'\nGot:\n '%s'\n",
+ one_line, f->data);
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+/* Trail-ish helpers for redundant_copy(). */
+struct get_txn_args
+{
+ transaction_t **txn;
+ const char *txn_name;
+ svn_fs_t *fs;
+};
+
+static svn_error_t *
+txn_body_get_txn(void *baton, trail_t *trail)
+{
+ struct get_txn_args *args = baton;
+ return svn_fs_bdb__get_txn(args->txn, args->fs, args->txn_name,
+ trail, trail->pool);
+}
+
+
+static svn_error_t *
+redundant_copy(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ const char *txn_name;
+ transaction_t *transaction;
+ svn_fs_root_t *txn_root, *rev_root;
+ const svn_fs_id_t *old_D_id, *new_D_id;
+ svn_revnum_t youngest_rev = 0;
+ struct get_txn_args args;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-redundant-copy", opts,
+ pool));
+
+ /* Create the greek tree in revision 1. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* In a transaction, copy A to Z. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "A", txn_root, "Z", pool));
+
+ /* Now, examine the transaction. There should have been only one
+ copy there. */
+ args.fs = fs;
+ args.txn_name = txn_name;
+ args.txn = &transaction;
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_get_txn, &args, FALSE, pool));
+ if (transaction->copies->nelts != 1)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected 1 copy; got %d",
+ transaction->copies->nelts);
+
+ /* Get the node-rev-id for A/D (the reason will be clear a little later). */
+ SVN_ERR(svn_fs_node_id(&old_D_id, txn_root, "A/D", pool));
+
+ /* Now copy A/D/G Z/D/G. */
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G", txn_root, "Z/D/G", pool));
+
+ /* Now, examine the transaction. There should still only have been
+ one copy operation that "took". */
+ SVN_ERR(svn_fs_base__retry_txn(fs, txn_body_get_txn, &args, FALSE, pool));
+ if (transaction->copies->nelts != 1)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected only 1 copy; got %d",
+ transaction->copies->nelts);
+
+ /* Finally, check the node-rev-id for "Z/D" -- it should never have
+ been made mutable (since the second copy should not have taken
+ place). */
+ SVN_ERR(svn_fs_node_id(&new_D_id, txn_root, "A/D", pool));
+ if (! svn_string_compare(svn_fs_unparse_id(old_D_id, pool),
+ svn_fs_unparse_id(new_D_id, pool)))
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Expected equivalent node-rev-ids; got differing ones");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+orphaned_textmod_change(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev = 0;
+ svn_txdelta_window_handler_t wh_func;
+ void *wh_baton;
+ apr_hash_t *changed_paths;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-orphaned-changes", opts,
+ pool));
+
+ /* Revision 1: Create and commit the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: Start to change "iota", but don't complete the work. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_apply_textdelta
+ (&wh_func, &wh_baton, txn_root, "iota", NULL, NULL, subpool));
+
+ /* Don't send any delta windows, but do commit the transaction.
+ According to the FS API docs, this is not a legal codepath. But
+ this requirement on the API was added *after* its BDB
+ implementation, and the BDB backend can't enforce compliance with
+ the additional API rules in this case. So we are really just
+ testing that misbehaving callers don't introduce more damage to
+ the repository than they have to. */
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Fetch changed paths for the youngest revision. We should find none. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_paths_changed(&changed_paths, root, subpool));
+ if (apr_hash_count(changed_paths) != 0)
+ {
+ svn_fs_path_change_t *change = apr_hash_get(changed_paths, "/iota",
+ APR_HASH_KEY_STRING);
+ if (change && change->text_mod)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected textmods changed path "
+ "for 'iota'");
+ else
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got non-empty changed paths hash where empty "
+ "one expected");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+key_test(apr_pool_t *pool)
+{
+ int i;
+ const char *keys[9][2] = {
+ { "0", "1" },
+ { "9", "a" },
+ { "zzzzz", "100000" },
+ { "z000000zzzzzz", "z000001000000" },
+ { "97hnq33jx2a", "97hnq33jx2b" },
+ { "97hnq33jx2z", "97hnq33jx30" },
+ { "999", "99a" },
+ { "a9z", "aa0" },
+ { "z", "10" }
+ };
+
+ for (i = 0; i < 9; i++)
+ {
+ char gen_key[MAX_KEY_SIZE];
+ const char *orig_key = keys[i][0];
+ const char *next_key = keys[i][1];
+ apr_size_t len, olen;
+
+ len = strlen(orig_key);
+ olen = len;
+
+ svn_fs_base__next_key(orig_key, &len, gen_key);
+ if (! (((len == olen) || (len == (olen + 1)))
+ && (strlen(next_key) == len)
+ && (strcmp(next_key, gen_key) == 0)))
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "failed to increment key \"%s\" correctly\n"
+ " expected: %s\n"
+ " actual: %s",
+ orig_key, next_key, gen_key);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 2;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(create_berkeley_filesystem,
+ "svn_fs_create_berkeley"),
+ SVN_TEST_OPTS_PASS(open_berkeley_filesystem,
+ "open an existing Berkeley DB filesystem"),
+ SVN_TEST_OPTS_PASS(delete_mutables,
+ "delete mutable nodes from directories"),
+ SVN_TEST_OPTS_PASS(delete,
+ "delete nodes tree"),
+ SVN_TEST_OPTS_PASS(abort_txn,
+ "abort a transaction"),
+ SVN_TEST_OPTS_PASS(create_within_copy,
+ "create new items within a copied directory"),
+ SVN_TEST_OPTS_PASS(canonicalize_abspath,
+ "test svn_fs__canonicalize_abspath"),
+ SVN_TEST_OPTS_PASS(skip_deltas,
+ "test skip deltas"),
+ SVN_TEST_OPTS_PASS(redundant_copy,
+ "ensure no-op for redundant copies"),
+ SVN_TEST_OPTS_PASS(orphaned_textmod_change,
+ "test for orphaned textmod changed paths"),
+ SVN_TEST_PASS2(key_test,
+ "testing sequential alphanumeric key generation"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_base/strings-reps-test.c b/subversion/tests/libsvn_fs_base/strings-reps-test.c
new file mode 100644
index 0000000..34e829c
--- /dev/null
+++ b/subversion/tests/libsvn_fs_base/strings-reps-test.c
@@ -0,0 +1,759 @@
+/* strings-reps-test.c --- test `strings' and `representations' interfaces
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <stdio.h>
+
+#include <apr.h>
+
+#include "svn_error.h"
+#include "private/svn_skel.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+#include "../../libsvn_fs/fs-loader.h"
+#include "../../libsvn_fs_base/fs.h"
+#include "../../libsvn_fs_base/util/fs_skels.h"
+#include "../../libsvn_fs_base/bdb/strings-table.h"
+#include "../../libsvn_fs_base/bdb/reps-table.h"
+
+
+
+/*-----------------------------------------------------------------*/
+/* Helper functions and batons for reps-table testing. */
+struct rep_args
+{
+ const char *key;
+ svn_fs_t *fs;
+ svn_skel_t *skel;
+};
+
+
+static svn_error_t *
+txn_body_write_new_rep(void *baton, trail_t *trail)
+{
+ struct rep_args *b = (struct rep_args *) baton;
+ representation_t *rep;
+ SVN_ERR(svn_fs_base__parse_representation_skel(&rep, b->skel,
+ trail->pool));
+ return svn_fs_bdb__write_new_rep(&(b->key), b->fs, rep, trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_write_rep(void *baton, trail_t *trail)
+{
+ struct rep_args *b = (struct rep_args *) baton;
+ representation_t *rep;
+ SVN_ERR(svn_fs_base__parse_representation_skel(&rep, b->skel,
+ trail->pool));
+ return svn_fs_bdb__write_rep(b->fs, b->key, rep, trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_read_rep(void *baton, trail_t *trail)
+{
+ struct rep_args *b = (struct rep_args *) baton;
+ representation_t *rep;
+ base_fs_data_t *bfd = b->fs->fsap_data;
+ SVN_ERR(svn_fs_bdb__read_rep(&rep, b->fs, b->key, trail, trail->pool));
+ return svn_fs_base__unparse_representation_skel(&(b->skel), rep,
+ bfd->format, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_delete_rep(void *baton, trail_t *trail)
+{
+ struct rep_args *b = (struct rep_args *) baton;
+ return svn_fs_bdb__delete_rep(b->fs, b->key, trail, trail->pool);
+}
+
+
+
+/* Representation Table Test functions. */
+
+static svn_error_t *
+write_new_rep(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct rep_args args;
+ const char *rep = "((fulltext 0 ) a83t2Z0q)";
+ svn_fs_t *fs;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-write-new-rep", opts,
+ pool));
+
+ /* Set up transaction baton */
+ args.fs = fs;
+ args.skel = svn_skel__parse(rep, strlen(rep), pool);
+ args.key = NULL;
+
+ /* Write new rep to reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_write_new_rep, &args,
+ FALSE, pool));
+
+ if (args.key == NULL)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error writing new representation");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+write_rep(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct rep_args new_args;
+ struct rep_args args;
+ const char *new_rep = "((fulltext 0 ) a83t2Z0q)";
+ const char *rep = "((fulltext 0 ) kfogel31337)";
+ svn_fs_t *fs;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-write-rep", opts,
+ pool));
+
+ /* Set up transaction baton */
+ new_args.fs = fs;
+ new_args.skel = svn_skel__parse(new_rep, strlen(new_rep), pool);
+ new_args.key = NULL;
+
+ /* Write new rep to reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_write_new_rep,
+ &new_args, FALSE, pool));
+
+ /* Make sure we got a valid key. */
+ if (new_args.key == NULL)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error writing new representation");
+
+ /* Set up transaction baton for re-writing reps. */
+ args.fs = new_args.fs;
+ args.skel = svn_skel__parse(rep, strlen(rep), pool);
+ args.key = new_args.key;
+
+ /* Overwrite first rep in reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_write_rep, &args,
+ FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+read_rep(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct rep_args new_args;
+ struct rep_args args;
+ struct rep_args read_args;
+ svn_stringbuf_t *skel_data;
+ svn_fs_t *fs;
+
+ const char *rep = "((fulltext 0 ) kfogel31337)";
+ const char *new_rep_before = "((fulltext 0 ) a83t2Z0)";
+
+ /* This test also tests the introduction of checksums into skels that
+ didn't have them. */
+
+ /* Get writeable strings. */
+ char *rep_after = apr_pstrdup
+ (pool, "((fulltext 0 (md5 16 XXXXXXXXXXXXXXXX)) kfogel31337");
+ char *new_rep_after = apr_pstrdup
+ (pool, "((fulltext 0 (md5 16 XXXXXXXXXXXXXXXX)) a83t2Z0");
+ size_t rep_after_len = strlen(rep_after);
+ size_t new_rep_after_len = strlen(new_rep_after);
+
+ /* Replace the fake fake checksums with the real fake checksums.
+ And someday, when checksums are actually calculated, we can
+ replace the real fake checksums with real real checksums. */
+ {
+ char *p;
+
+ for (p = rep_after; *p; p++)
+ if (*p == 'X')
+ *p = '\0';
+
+ for (p = new_rep_after; *p; p++)
+ if (*p == 'X')
+ *p = '\0';
+ }
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-read-rep", opts,
+ pool));
+
+ /* Set up transaction baton */
+ new_args.fs = fs;
+ new_args.skel = svn_skel__parse(new_rep_before, strlen(new_rep_before),
+ pool);
+ new_args.key = NULL;
+
+ /* Write new rep to reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_write_new_rep,
+ &new_args, FALSE, pool));
+
+ /* Make sure we got a valid key. */
+ if (new_args.key == NULL)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error writing new representation");
+
+ /* Read the new rep back from the reps table. */
+ read_args.fs = new_args.fs;
+ read_args.skel = NULL;
+ read_args.key = new_args.key;
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_read_rep, &read_args,
+ FALSE, pool));
+
+ /* Make sure the skel matches. */
+ if (! read_args.skel)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error reading new representation");
+
+ skel_data = svn_skel__unparse(read_args.skel, pool);
+ if (memcmp(skel_data->data, new_rep_after, new_rep_after_len) != 0)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "representation corrupted (first check)");
+
+ /* Set up transaction baton for re-writing reps. */
+ args.fs = new_args.fs;
+ args.skel = svn_skel__parse(rep, strlen(rep), pool);
+ args.key = new_args.key;
+
+ /* Overwrite first rep in reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_write_rep, &args,
+ FALSE, pool));
+
+ /* Read the new rep back from the reps table (using the same FS and
+ key as the first read...let's make sure this thing didn't get
+ written to the wrong place). */
+ read_args.skel = NULL;
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_read_rep, &read_args,
+ FALSE, pool));
+
+ /* Make sure the skel matches. */
+ if (! read_args.skel)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error reading new representation");
+
+ skel_data = svn_skel__unparse(read_args.skel, pool);
+ if (memcmp(skel_data->data, rep_after, rep_after_len) != 0)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "representation corrupted (second check)");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+delete_rep(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct rep_args new_args;
+ struct rep_args delete_args;
+ struct rep_args read_args;
+ const char *new_rep = "((fulltext 0 ) a83t2Z0q)";
+ svn_fs_t *fs;
+ svn_error_t *err;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-delete-rep", opts,
+ pool));
+
+ /* Set up transaction baton */
+ new_args.fs = fs;
+ new_args.skel = svn_skel__parse(new_rep, strlen(new_rep), pool);
+ new_args.key = NULL;
+
+ /* Write new rep to reps table. */
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_write_new_rep,
+ &new_args, FALSE, pool));
+
+ /* Make sure we got a valid key. */
+ if (new_args.key == NULL)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error writing new representation");
+
+ /* Delete the rep we just wrote. */
+ delete_args.fs = new_args.fs;
+ delete_args.key = new_args.key;
+ SVN_ERR(svn_fs_base__retry_txn(new_args.fs, txn_body_delete_rep,
+ &delete_args, FALSE, pool));
+
+ /* Try to read the new rep back from the reps table. */
+ read_args.fs = new_args.fs;
+ read_args.skel = NULL;
+ read_args.key = new_args.key;
+ err = svn_fs_base__retry_txn(new_args.fs, txn_body_read_rep, &read_args,
+ FALSE, pool);
+
+ /* We better have an error... */
+ if ((! err) && (read_args.skel))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "error deleting representation");
+ svn_error_clear(err);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* ------------------------------------------------------------------- */
+/* Helper functions and batons for strings-table testing. */
+
+static svn_error_t *
+verify_expected_record(svn_fs_t *fs,
+ const char *key,
+ const char *expected_text,
+ apr_size_t expected_len,
+ trail_t *trail)
+{
+ apr_size_t size;
+ char buf[100];
+ svn_stringbuf_t *text;
+ svn_filesize_t offset = 0;
+ svn_filesize_t string_size;
+
+ /* Check the string size. */
+ SVN_ERR(svn_fs_bdb__string_size(&string_size, fs, key,
+ trail, trail->pool));
+ if (string_size > SVN_MAX_OBJECT_SIZE)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "record size is too large "
+ "(got %" SVN_FILESIZE_T_FMT ", "
+ "limit is %" APR_SIZE_T_FMT ")",
+ string_size, SVN_MAX_OBJECT_SIZE);
+ size = (apr_size_t) string_size;
+ if (size != expected_len)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "record has unexpected size "
+ "(got %" APR_SIZE_T_FMT ", "
+ "expected %" APR_SIZE_T_FMT ")",
+ size, expected_len);
+
+ /* Read the string back in 100-byte chunks. */
+ text = svn_stringbuf_create_empty(trail->pool);
+ while (1)
+ {
+ size = sizeof(buf);
+ SVN_ERR(svn_fs_bdb__string_read(fs, key, buf, offset, &size,
+ trail, trail->pool));
+ if (size == 0)
+ break;
+ svn_stringbuf_appendbytes(text, buf, size);
+ offset += size;
+ }
+
+ /* Check the size and contents of the read data. */
+ if (text->len != expected_len)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "record read returned unexpected size "
+ "(got %" APR_SIZE_T_FMT ", "
+ "expected %" APR_SIZE_T_FMT ")",
+ size, expected_len);
+ if (memcmp(expected_text, text->data, expected_len))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "record read returned unexpected data");
+
+ return SVN_NO_ERROR;
+}
+
+
+struct string_args
+{
+ svn_fs_t *fs;
+ const char *key;
+ const char *text;
+ apr_size_t len;
+};
+
+
+static svn_error_t *
+txn_body_verify_string(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ return verify_expected_record(b->fs, b->key, b->text, b->len, trail);
+}
+
+
+static svn_error_t *
+txn_body_string_append(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ return svn_fs_bdb__string_append(b->fs, &(b->key), b->len,
+ b->text, trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_string_clear(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ return svn_fs_bdb__string_clear(b->fs, b->key, trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_string_delete(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ return svn_fs_bdb__string_delete(b->fs, b->key, trail, trail->pool);
+}
+
+
+static svn_error_t *
+txn_body_string_size(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ svn_filesize_t string_size;
+ SVN_ERR(svn_fs_bdb__string_size(&string_size, b->fs, b->key,
+ trail, trail->pool));
+ if (string_size > SVN_MAX_OBJECT_SIZE)
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "txn_body_string_size: string size is too large "
+ "(got %" SVN_FILESIZE_T_FMT ", limit is %" APR_SIZE_T_FMT ")",
+ string_size, SVN_MAX_OBJECT_SIZE);
+ b->len = (apr_size_t) string_size;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+txn_body_string_append_fail(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ SVN_ERR(svn_fs_bdb__string_append(b->fs, &(b->key), b->len,
+ b->text, trail, trail->pool));
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "la dee dah, la dee day...");
+}
+
+static svn_error_t *
+txn_body_string_copy(void *baton, trail_t *trail)
+{
+ struct string_args *b = (struct string_args *) baton;
+ return svn_fs_bdb__string_copy(b->fs, &(b->key), b->key,
+ trail, trail->pool);
+}
+
+
+static const char *bigstring1 =
+" Alice opened the door and found that it led into a small\n"
+"passage, not much larger than a rat-hole: she knelt down and\n"
+"looked along the passage into the loveliest garden you ever saw.\n"
+"How she longed to get out of that dark hall, and wander about\n"
+"among those beds of bright flowers and those cool fountains, but\n"
+"she could not even get her head though the doorway; 'and even if\n"
+"my head would go through,' thought poor Alice, 'it would be of\n"
+"very little use without my shoulders. Oh, how I wish\n"
+"I could shut up like a telescope! I think I could, if I only\n"
+"know how to begin.' For, you see, so many out-of-the-way things\n"
+"had happened lately, that Alice had begun to think that very few\n"
+"things indeed were really impossible.";
+
+static const char *bigstring2 =
+" There seemed to be no use in waiting by the little door, so she\n"
+"went back to the table, half hoping she might find another key on\n"
+"it, or at any rate a book of rules for shutting people up like\n"
+"telescopes: this time she found a little bottle on it, ('which\n"
+"certainly was not here before,' said Alice,) and round the neck\n"
+"of the bottle was a paper label, with the words 'DRINK ME'\n"
+"beautifully printed on it in large letters.";
+
+static const char *bigstring3 =
+" It was all very well to say 'Drink me,' but the wise little\n"
+"Alice was not going to do THAT in a hurry. 'No, I'll look\n"
+"first,' she said, 'and see whether it's marked \"poison\" or not';\n"
+"for she had read several nice little histories about children who\n"
+"had got burnt, and eaten up by wild beasts and other unpleasant\n"
+"things, all because they WOULD not remember the simple rules\n"
+"their friends had taught them: such as, that a red-hot poker\n"
+"will burn you if you hold it too long; and that if you cut your\n"
+"finger VERY deeply with a knife, it usually bleeds; and she had\n"
+"never forgotten that, if you drink much from a bottle marked\n"
+"'poison,' it is almost certain to disagree with you, sooner or\n"
+"later.";
+
+
+static svn_error_t *
+test_strings(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct string_args args;
+ svn_fs_t *fs;
+ svn_stringbuf_t *string;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-test-strings", opts,
+ pool));
+
+ /* The plan (after each step below, verify the size and contents of
+ the string):
+
+ 1. Write a new string (string1).
+ 2. Append string2 to string.
+ 3. Clear string.
+ 4. Append string3 to string.
+ 5. Delete string (verify by size requested failure).
+ 6. Write a new string (string1), appending string2, string3, and
+ string4.
+ */
+
+ /* 1. Write a new string (string1). */
+ args.fs = fs;
+ args.key = NULL;
+ args.text = bigstring1;
+ args.len = strlen(bigstring1);
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ /* Make sure a key was returned. */
+ if (! args.key)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "write of new string failed to return new key");
+
+ /* Verify record's size and contents. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ /* Append a second string to our first one. */
+ args.text = bigstring2;
+ args.len = strlen(bigstring2);
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ /* Verify record's size and contents. */
+ string = svn_stringbuf_create(bigstring1, pool);
+ svn_stringbuf_appendcstr(string, bigstring2);
+ args.text = string->data;
+ args.len = string->len;
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ /* Clear the record */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_clear, &args,
+ FALSE, pool));
+
+ /* Verify record's size and contents. */
+ args.text = "";
+ args.len = 0;
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ /* Append a third string to our first one. */
+ args.text = bigstring3;
+ args.len = strlen(bigstring3);
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ /* Verify record's size and contents. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ /* Delete our record...she's served us well. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_delete, &args,
+ FALSE, pool));
+
+ /* Now, we expect a size request on this record to fail with
+ SVN_ERR_FS_NO_SUCH_STRING. */
+ {
+ svn_error_t *err = svn_fs_base__retry_txn(args.fs, txn_body_string_size,
+ &args, FALSE, pool);
+
+ if (! err)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "query unexpectedly successful");
+ if (err->apr_err != SVN_ERR_FS_NO_SUCH_STRING)
+ return svn_error_create(SVN_ERR_FS_GENERAL, err,
+ "query failed with unexpected error");
+ svn_error_clear(err);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+write_null_string(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct string_args args;
+ svn_fs_t *fs;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-write-null-string", opts,
+ pool));
+
+ args.fs = fs;
+ args.key = NULL;
+ args.text = NULL;
+ args.len = 0;
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+abort_string(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct string_args args, args2;
+ svn_fs_t *fs;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-abort-string", opts,
+ pool));
+
+ /* The plan:
+
+ 1. Write a new string (string1).
+ 2. Overwrite string1 with string2, but then ABORT the transaction.
+ 3. Read string to make sure it is still string1.
+ */
+
+ /* 1. Write a new string (string1). */
+ args.fs = fs;
+ args.key = NULL;
+ args.text = bigstring1;
+ args.len = strlen(bigstring1);
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ /* Make sure a key was returned. */
+ if (! args.key)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "write of new string failed to return new key");
+
+ /* Verify record's size and contents. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ /* Append a second string to our first one. */
+ args2.fs = fs;
+ args2.key = args.key;
+ args2.text = bigstring2;
+ args2.len = strlen(bigstring2);
+ {
+ svn_error_t *err;
+
+ /* This function is *supposed* to fail with SVN_ERR_TEST_FAILED */
+ err = svn_fs_base__retry_txn(args.fs, txn_body_string_append_fail,
+ &args2, FALSE, pool);
+ if ((! err) || (err->apr_err != SVN_ERR_TEST_FAILED))
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "failed to intentionally abort a trail");
+ svn_error_clear(err);
+ }
+
+ /* Verify that record's size and contents are still that of string1 */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+copy_string(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ struct string_args args;
+ svn_fs_t *fs;
+ const char *old_key;
+
+ /* Create a new fs and repos */
+ SVN_ERR(svn_test__create_bdb_fs
+ (&fs, "test-repo-copy-string", opts,
+ pool));
+
+ /* Write a new string (string1). */
+ args.fs = fs;
+ args.key = NULL;
+ args.text = bigstring1;
+ args.len = strlen(bigstring1);
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_append, &args,
+ FALSE, pool));
+
+ /* Make sure a key was returned. */
+ if (! (old_key = args.key))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "write of new string failed to return new key");
+
+ /* Now copy that string into a new location. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_string_copy, &args,
+ FALSE, pool));
+
+ /* Make sure a different key was returned. */
+ if ((! args.key) || (! strcmp(old_key, args.key)))
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "copy of string failed to return new key");
+
+ /* Verify record's size and contents. */
+ SVN_ERR(svn_fs_base__retry_txn(args.fs, txn_body_verify_string, &args,
+ FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* The test table. */
+
+static int max_threads = 3;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(write_new_rep,
+ "write a new rep, get a new key back"),
+ SVN_TEST_OPTS_PASS(write_rep,
+ "write a new rep, then overwrite it"),
+ SVN_TEST_OPTS_PASS(read_rep,
+ "write and overwrite a new rep; confirm with reads"),
+ SVN_TEST_OPTS_PASS(delete_rep,
+ "write, then delete, a new rep; confirm deletion"),
+ SVN_TEST_OPTS_PASS(test_strings,
+ "test many strings table functions together"),
+ SVN_TEST_OPTS_PASS(write_null_string,
+ "write a null string"),
+ SVN_TEST_OPTS_PASS(abort_string,
+ "write a string, then abort during an overwrite"),
+ SVN_TEST_OPTS_PASS(copy_string,
+ "create and copy a string"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c
new file mode 100644
index 0000000..acc31fb
--- /dev/null
+++ b/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c
@@ -0,0 +1,394 @@
+/* fs-fs-fuzzy-test.c --- fuzzing tests for the FSFS filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+#include "../../libsvn_fs_fs/fs.h"
+#include "../../libsvn_fs_fs/fs_fs.h"
+#include "../../libsvn_fs_fs/rev_file.h"
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_fs.h"
+#include "private/svn_string_private.h"
+#include "private/svn_string_private.h"
+
+#include "../svn_test_fs.h"
+
+
+
+/*** Helper Functions ***/
+
+/* We won't log or malfunction() upon errors. */
+static void
+dont_filter_warnings(void *baton, svn_error_t *err)
+{
+ return;
+}
+
+
+/*** Test core code ***/
+
+/* Verify that a modification of any single byte in REVISION of FS at
+ * REPO_NAME using MODIFIER with BATON will be detected. */
+static svn_error_t *
+fuzzing_1_byte_1_rev(const char *repo_name,
+ svn_fs_t *fs,
+ svn_revnum_t revision,
+ unsigned char (* modifier)(unsigned char c, void *baton),
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ apr_hash_t *fs_config;
+ svn_fs_fs__revision_file_t *rev_file;
+ apr_off_t filesize = 0, offset;
+ apr_off_t i;
+ unsigned char footer_len;
+
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Open the revision file for modification. */
+ SVN_ERR(svn_fs_fs__open_pack_or_rev_file_writable(&rev_file, fs, revision,
+ pool, iterpool));
+ SVN_ERR(svn_fs_fs__auto_read_footer(rev_file));
+ SVN_ERR(svn_io_file_seek(rev_file->file, APR_END, &filesize, iterpool));
+
+ offset = filesize - 1;
+ SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &offset, iterpool));
+ SVN_ERR(svn_io_file_getc((char *)&footer_len, rev_file->file, iterpool));
+
+ /* We want all the caching we can get. More importantly, we want to
+ change the cache namespace before each test iteration. */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_DELTAS, "1");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_FULLTEXTS, "1");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NODEPROPS, "1");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, "2");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_BLOCK_READ, "0");
+
+ /* Manipulate all bytes one at a time. */
+ for (i = 0; i < filesize; ++i)
+ {
+ svn_error_t *err = SVN_NO_ERROR;
+
+ /* Read byte */
+ unsigned char c_old, c_new;
+ SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool));
+ SVN_ERR(svn_io_file_getc((char *)&c_old, rev_file->file, iterpool));
+
+ /* What to replace it with. Skip if there is no change. */
+ c_new = modifier(c_old, baton);
+ if (c_new == c_old)
+ continue;
+
+ /* Modify / corrupt the data. */
+ SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool));
+ SVN_ERR(svn_io_file_putc((char)c_new, rev_file->file, iterpool));
+ SVN_ERR(svn_io_file_flush(rev_file->file, iterpool));
+
+ /* Make sure we use a different namespace for the caches during
+ this iteration. */
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(iterpool));
+ SVN_ERR(svn_repos_open3(&repos, repo_name, fs_config, iterpool, iterpool));
+ svn_fs_set_warning_func(svn_repos_fs(repos), dont_filter_warnings, NULL);
+
+ /* This shall detect the corruption and return an error. */
+ err = svn_repos_verify_fs3(repos, revision, revision, FALSE, FALSE,
+ NULL, NULL, NULL, NULL, NULL, NULL,
+ iterpool);
+
+ /* Case-only changes in checksum digests are not an error.
+ * We allow upper case chars to be used in MD5 checksums in all other
+ * places, thus restricting them here would be inconsistent. */
+ if ( i >= filesize - footer_len /* Within footer */
+ && c_old >= 'a' && c_old <= 'f' /* 'a' to 'f', only appear
+ in checksum digests */
+ && c_new == c_old - 'a' + 'A') /* respective upper case */
+ {
+ if (err)
+ {
+ /* Let us know where we were too strict ... */
+ printf("Detected case change in checksum digest at offset 0x%"
+ APR_UINT64_T_HEX_FMT " (%" APR_OFF_T_FMT ") in r%ld: "
+ "%c -> %c\n", (apr_uint64_t)i, i, revision, c_old, c_new);
+
+ SVN_ERR(err);
+ }
+ }
+ else if (!err)
+ {
+ /* Let us know where we miss changes ... */
+ printf("Undetected mod at offset 0x%"APR_UINT64_T_HEX_FMT
+ " (%"APR_OFF_T_FMT") in r%ld: 0x%02x -> 0x%02x\n",
+ (apr_uint64_t)i, i, revision, c_old, c_new);
+
+ SVN_TEST_ASSERT(err);
+ }
+
+ svn_error_clear(err);
+
+ /* Undo the corruption. */
+ SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool));
+ SVN_ERR(svn_io_file_putc((char)c_old, rev_file->file, iterpool));
+
+ svn_pool_clear(iterpool);
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Create a greek repo with OPTS at REPO_NAME. Verify that a modification
+ * of any single byte using MODIFIER with BATON will be detected. */
+static svn_error_t *
+fuzzing_1_byte_test(const svn_test_opts_t *opts,
+ const char *repo_name,
+ unsigned char (* modifier)(unsigned char c, void *baton),
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t rev;
+ svn_revnum_t i;
+
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+ /* Create a filesystem */
+ SVN_ERR(svn_test__create_repos(&repos, repo_name, opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1 (one and only revision): the Greek tree */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev));
+
+ for (i = 0; i <= rev; ++i)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(fuzzing_1_byte_1_rev(repo_name, fs, i, modifier, baton,
+ iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Modifier function to be used with fuzzing_set_byte_test.
+ * We return the fixed char value given as *BATON. */
+static unsigned char
+set_byte(unsigned char c, void *baton)
+{
+ return *(const unsigned char *)baton;
+}
+
+/* Run the fuzzing test setting any byte in the repo to all values MIN to
+ * MAX-1. */
+static svn_error_t *
+fuzzing_set_byte_test(const svn_test_opts_t *opts,
+ int min,
+ int max,
+ apr_pool_t *pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ unsigned i = 0;
+ for (i = min; i < max; ++i)
+ {
+ unsigned char c = i;
+ const char *repo_name;
+ svn_pool_clear(iterpool);
+
+ repo_name = apr_psprintf(iterpool, "test-repo-fuzzing_set_byte_%d_%d",
+ min, max);
+ SVN_ERR(fuzzing_1_byte_test(opts, repo_name, set_byte, &c, iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+
+
+/*** Tests ***/
+
+/* ------------------------------------------------------------------------ */
+
+static unsigned char
+invert_byte(unsigned char c, void *baton)
+{
+ return ~c;
+}
+
+static svn_error_t *
+fuzzing_invert_byte_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_invert_byte",
+ invert_byte, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+static unsigned char
+increment_byte(unsigned char c, void *baton)
+{
+ return c + 1;
+}
+
+static svn_error_t *
+fuzzing_increment_byte_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_increment_byte",
+ increment_byte, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+static unsigned char
+decrement_byte(unsigned char c, void *baton)
+{
+ return c - 1;
+}
+
+static svn_error_t *
+fuzzing_decrement_byte_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_decrement_byte",
+ decrement_byte, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+static unsigned char
+null_byte(unsigned char c, void *baton)
+{
+ return 0;
+}
+
+static svn_error_t *
+fuzzing_null_byte_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_null_byte",
+ null_byte, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* ------------------------------------------------------------------------ */
+
+/* Generator macro: define a test function covering byte values N to M-1 */
+#define FUZZING_SET_BYTE_TEST_N(N,M)\
+ static svn_error_t * \
+ fuzzing_set_byte_test_ ##N(const svn_test_opts_t *opts, \
+ apr_pool_t *pool) \
+ { \
+ return svn_error_trace(fuzzing_set_byte_test(opts, N, M, pool)); \
+ }
+
+/* Add the test function declared above to the test_funcs array. */
+#define TEST_FUZZING_SET_BYTE_TEST_N(N,M)\
+ SVN_TEST_OPTS_PASS(fuzzing_set_byte_test_ ##N, \
+ "set any byte to any value between " #N " and " #M)
+
+/* Declare tests that will cover all possible byte values. */
+FUZZING_SET_BYTE_TEST_N(0,16)
+FUZZING_SET_BYTE_TEST_N(16,32)
+FUZZING_SET_BYTE_TEST_N(32,48)
+FUZZING_SET_BYTE_TEST_N(48,64)
+FUZZING_SET_BYTE_TEST_N(64,80)
+FUZZING_SET_BYTE_TEST_N(80,96)
+FUZZING_SET_BYTE_TEST_N(96,112)
+FUZZING_SET_BYTE_TEST_N(112,128)
+FUZZING_SET_BYTE_TEST_N(128,144)
+FUZZING_SET_BYTE_TEST_N(144,160)
+FUZZING_SET_BYTE_TEST_N(160,176)
+FUZZING_SET_BYTE_TEST_N(176,192)
+FUZZING_SET_BYTE_TEST_N(192,208)
+FUZZING_SET_BYTE_TEST_N(208,224)
+FUZZING_SET_BYTE_TEST_N(224,240)
+FUZZING_SET_BYTE_TEST_N(240,256)
+
+
+/* The test table. */
+
+/* Allow for any number of tests to run in parallel. */
+static int max_threads = 0;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(fuzzing_invert_byte_test,
+ "fuzzing: invert any byte"),
+ SVN_TEST_OPTS_PASS(fuzzing_increment_byte_test,
+ "fuzzing: increment any byte"),
+ SVN_TEST_OPTS_PASS(fuzzing_decrement_byte_test,
+ "fuzzing: decrement any byte"),
+ SVN_TEST_OPTS_PASS(fuzzing_null_byte_test,
+ "fuzzing: set any byte to 0"),
+
+ /* Register generated tests. */
+ TEST_FUZZING_SET_BYTE_TEST_N(0,16),
+ TEST_FUZZING_SET_BYTE_TEST_N(16,32),
+ TEST_FUZZING_SET_BYTE_TEST_N(32,48),
+ TEST_FUZZING_SET_BYTE_TEST_N(48,64),
+ TEST_FUZZING_SET_BYTE_TEST_N(64,80),
+ TEST_FUZZING_SET_BYTE_TEST_N(80,96),
+ TEST_FUZZING_SET_BYTE_TEST_N(96,112),
+ TEST_FUZZING_SET_BYTE_TEST_N(112,128),
+ TEST_FUZZING_SET_BYTE_TEST_N(128,144),
+ TEST_FUZZING_SET_BYTE_TEST_N(144,160),
+ TEST_FUZZING_SET_BYTE_TEST_N(160,176),
+ TEST_FUZZING_SET_BYTE_TEST_N(176,192),
+ TEST_FUZZING_SET_BYTE_TEST_N(192,208),
+ TEST_FUZZING_SET_BYTE_TEST_N(208,224),
+ TEST_FUZZING_SET_BYTE_TEST_N(224,240),
+ TEST_FUZZING_SET_BYTE_TEST_N(240,256),
+
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c
new file mode 100644
index 0000000..7a6e910
--- /dev/null
+++ b/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c
@@ -0,0 +1,1948 @@
+/* fs-fs-pack-test.c --- tests for the FSFS filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+#include "../../libsvn_fs/fs-loader.h"
+#include "../../libsvn_fs_fs/fs.h"
+#include "../../libsvn_fs_fs/fs_fs.h"
+#include "../../libsvn_fs_fs/low_level.h"
+#include "../../libsvn_fs_fs/pack.h"
+#include "../../libsvn_fs_fs/util.h"
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_fs.h"
+#include "private/svn_string_private.h"
+
+#include "../svn_test_fs.h"
+
+
+
+/*** Helper Functions ***/
+
+static void
+ignore_fs_warnings(void *baton, svn_error_t *err)
+{
+#ifdef SVN_DEBUG
+ SVN_DBG(("Ignoring FS warning %s\n",
+ svn_error_symbolic_name(err ? err->apr_err : 0)));
+#endif
+ return;
+}
+
+/* Return the expected contents of "iota" in revision REV. */
+static const char *
+get_rev_contents(svn_revnum_t rev, apr_pool_t *pool)
+{
+ /* Toss in a bunch of magic numbers for spice. */
+ apr_int64_t num = ((rev * 1234353 + 4358) * 4583 + ((rev % 4) << 1)) / 42;
+ return apr_psprintf(pool, "%" APR_INT64_T_FMT "\n", num);
+}
+
+struct pack_notify_baton
+{
+ apr_int64_t expected_shard;
+ svn_fs_pack_notify_action_t expected_action;
+};
+
+static svn_error_t *
+pack_notify(void *baton,
+ apr_int64_t shard,
+ svn_fs_pack_notify_action_t action,
+ apr_pool_t *pool)
+{
+ struct pack_notify_baton *pnb = baton;
+
+ SVN_TEST_ASSERT(shard == pnb->expected_shard);
+ SVN_TEST_ASSERT(action == pnb->expected_action);
+
+ /* Update expectations. */
+ switch (action)
+ {
+ case svn_fs_pack_notify_start:
+ pnb->expected_action = svn_fs_pack_notify_end;
+ break;
+
+ case svn_fs_pack_notify_end:
+ pnb->expected_action = svn_fs_pack_notify_start;
+ pnb->expected_shard++;
+ break;
+
+ default:
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Unknown notification action when packing");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#define R1_LOG_MSG "Let's serf"
+
+/* Create a filesystem in DIR. Set the shard size to SHARD_SIZE and create
+ NUM_REVS number of revisions (in addition to r0). Use POOL for
+ allocations. After this function successfully completes, the filesystem's
+ youngest revision number will be NUM_REVS. */
+static svn_error_t *
+create_non_packed_filesystem(const char *dir,
+ const svn_test_opts_t *opts,
+ svn_revnum_t num_revs,
+ int shard_size,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ apr_pool_t *iterpool;
+ apr_hash_t *fs_config;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 6))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.6 SVN doesn't support FSFS packing");
+
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE,
+ apr_itoa(pool, shard_size));
+
+ /* Create a filesystem. */
+ SVN_ERR(svn_test__create_fs2(&fs, dir, opts, fs_config, subpool));
+
+ /* Revision 1: the Greek tree */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_LOG,
+ svn_string_create(R1_LOG_MSG, pool),
+ pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ /* Revisions 2 thru NUM_REVS-1: content tweaks to "iota". */
+ iterpool = svn_pool_create(subpool);
+ while (after_rev < num_revs)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota",
+ get_rev_contents(after_rev + 1,
+ iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, iterpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ }
+ svn_pool_destroy(iterpool);
+ svn_pool_destroy(subpool);
+
+ /* Done */
+ return SVN_NO_ERROR;
+}
+
+/* Create a packed filesystem in DIR. Set the shard size to
+ SHARD_SIZE and create NUM_REVS number of revisions (in addition to
+ r0). Use POOL for allocations. After this function successfully
+ completes, the filesystem's youngest revision number will be the
+ same as NUM_REVS. */
+static svn_error_t *
+create_packed_filesystem(const char *dir,
+ const svn_test_opts_t *opts,
+ svn_revnum_t num_revs,
+ int shard_size,
+ apr_pool_t *pool)
+{
+ struct pack_notify_baton pnb;
+
+ /* Create the repo and fill it. */
+ SVN_ERR(create_non_packed_filesystem(dir, opts, num_revs, shard_size,
+ pool));
+
+ /* Now pack the FS */
+ pnb.expected_shard = 0;
+ pnb.expected_action = svn_fs_pack_notify_start;
+ return svn_fs_pack(dir, pack_notify, &pnb, NULL, NULL, pool);
+}
+
+/* Create a packed FSFS filesystem for revprop tests at REPO_NAME with
+ * MAX_REV revisions and the given SHARD_SIZE and OPTS. Return it in *FS.
+ * Use POOL for allocations.
+ */
+static svn_error_t *
+prepare_revprop_repo(svn_fs_t **fs,
+ const char *repo_name,
+ svn_revnum_t max_rev,
+ int shard_size,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ apr_pool_t *subpool;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(create_packed_filesystem(repo_name, opts, max_rev, shard_size, pool));
+ SVN_ERR(svn_fs_open2(fs, repo_name, NULL, pool, pool));
+
+ subpool = svn_pool_create(pool);
+ /* Do a commit to trigger packing. */
+ SVN_ERR(svn_fs_begin_txn(&txn, *fs, max_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "new-iota", subpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ svn_pool_destroy(subpool);
+
+ /* Pack the repository. */
+ SVN_ERR(svn_fs_pack(repo_name, NULL, NULL, NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* For revision REV, return a short log message allocated in POOL.
+ */
+static svn_string_t *
+default_log(svn_revnum_t rev, apr_pool_t *pool)
+{
+ return svn_string_createf(pool, "Default message for rev %ld", rev);
+}
+
+/* For revision REV, return a long log message allocated in POOL.
+ */
+static svn_string_t *
+large_log(svn_revnum_t rev, apr_size_t length, apr_pool_t *pool)
+{
+ svn_stringbuf_t *temp = svn_stringbuf_create_ensure(100000, pool);
+ int i, count = (int)(length - 50) / 6;
+
+ svn_stringbuf_appendcstr(temp, "A ");
+ for (i = 0; i < count; ++i)
+ svn_stringbuf_appendcstr(temp, "very, ");
+
+ svn_stringbuf_appendcstr(temp,
+ apr_psprintf(pool, "very long message for rev %ld, indeed", rev));
+
+ return svn_stringbuf__morph_into_string(temp);
+}
+
+/* For revision REV, return a long log message allocated in POOL.
+ */
+static svn_string_t *
+huge_log(svn_revnum_t rev, apr_pool_t *pool)
+{
+ return large_log(rev, 90000, pool);
+}
+
+
+/*** Tests ***/
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsfs-pack"
+#define SHARD_SIZE 7
+#define MAX_REV 53
+static svn_error_t *
+pack_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ int i;
+ svn_node_kind_t kind;
+ const char *path;
+ char buf[80];
+ apr_file_t *file;
+ apr_size_t len;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ /* Check to see that the pack files exist, and that the rev directories
+ don't. */
+ for (i = 0; i < (MAX_REV + 1) / SHARD_SIZE; i++)
+ {
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d.pack", i / SHARD_SIZE),
+ "pack", SVN_VA_NULL);
+
+ /* These files should exist. */
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_file)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Expected pack file '%s' not found", path);
+
+ if (opts->server_minor_version && (opts->server_minor_version < 9))
+ {
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d.pack", i / SHARD_SIZE),
+ "manifest", SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_file)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Expected manifest file '%s' not found",
+ path);
+ }
+
+ /* This directory should not exist. */
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d", i / SHARD_SIZE),
+ SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_none)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Unexpected directory '%s' found", path);
+ }
+
+ /* Ensure the min-unpacked-rev jives with the above operations. */
+ SVN_ERR(svn_io_file_open(&file,
+ svn_dirent_join(REPO_NAME, PATH_MIN_UNPACKED_REV,
+ pool),
+ APR_READ | APR_BUFFERED, APR_OS_DEFAULT, pool));
+ len = sizeof(buf);
+ SVN_ERR(svn_io_read_length_line(file, buf, &len, pool));
+ SVN_ERR(svn_io_file_close(file, pool));
+ if (SVN_STR_TO_REV(buf) != (MAX_REV / SHARD_SIZE) * SHARD_SIZE)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Bad '%s' contents", PATH_MIN_UNPACKED_REV);
+
+ /* Finally, make sure the final revision directory does exist. */
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d", (i / SHARD_SIZE) + 1),
+ SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_none)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Expected directory '%s' not found", path);
+
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsfs-pack-even"
+#define SHARD_SIZE 4
+#define MAX_REV 11
+static svn_error_t *
+pack_even_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_node_kind_t kind;
+ const char *path;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs", "2.pack", SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_dir)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Packing did not complete as expected");
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-read-packed-fs"
+#define SHARD_SIZE 5
+#define MAX_REV 11
+static svn_error_t *
+read_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_stream_t *rstream;
+ svn_stringbuf_t *rstring;
+ svn_revnum_t i;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ for (i = 1; i < (MAX_REV + 1); i++)
+ {
+ svn_fs_root_t *rev_root;
+ svn_stringbuf_t *sb;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, i, pool));
+ SVN_ERR(svn_fs_file_contents(&rstream, rev_root, "iota", pool));
+ SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool));
+
+ if (i == 1)
+ sb = svn_stringbuf_create("This is the file 'iota'.\n", pool);
+ else
+ sb = svn_stringbuf_create(get_rev_contents(i, pool), pool);
+
+ if (! svn_stringbuf_compare(rstring, sb))
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Bad data in revision %ld.", i);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-commit-packed-fs"
+#define SHARD_SIZE 5
+#define MAX_REV 10
+static svn_error_t *
+commit_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, 5, pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ /* Now do a commit. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota",
+ "How much better is it to get wisdom than gold! and to get "
+ "understanding rather to be chosen than silver!", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 10
+static svn_error_t *
+get_set_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Try to get revprop for revision 0
+ * (non-packed due to special handling). */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR,
+ pool));
+
+ /* Try to change revprop for revision 0
+ * (non-packed due to special handling). */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, SVN_PROP_REVISION_AUTHOR,
+ svn_string_create("tweaked-author", pool),
+ pool));
+
+ /* verify */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR,
+ pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author");
+
+ /* Try to get packed revprop for revision 5. */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR,
+ pool));
+
+ /* Try to change packed revprop for revision 5. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_AUTHOR,
+ svn_string_create("tweaked-author2", pool),
+ pool));
+
+ /* verify */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR,
+ pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author2");
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-large-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 11
+static svn_error_t *
+get_set_large_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+ svn_revnum_t rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Set commit messages to different, large values that fill the pack
+ * files but do not exceed the pack size limit. */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG,
+ large_log(rev, 1000, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 1000, pool)->data);
+ }
+
+ /* Put a larger revprop into the last, some middle and the first revision
+ * of a pack. This should cause the packs to split in the middle. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG,
+ /* rev 0 is not packed */
+ large_log(3, 2400, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG,
+ large_log(5, 1500, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG,
+ large_log(8, 1500, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+
+ if (rev == 3)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 2400, pool)->data);
+ else if (rev == 5 || rev == 8)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 1500, pool)->data);
+ else
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 1000, pool)->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-huge-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 10
+static svn_error_t *
+get_set_huge_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+ svn_revnum_t rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Set commit messages to different values */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG,
+ default_log(rev, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data);
+ }
+
+ /* Put a huge revprop into the last, some middle and the first revision
+ * of a pack. They will cause the pack files to split accordingly. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG,
+ huge_log(3, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG,
+ huge_log(5, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG,
+ huge_log(8, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+
+ if (rev == 3 || rev == 5 || rev == 8)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ huge_log(rev, pool)->data);
+ else
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ default_log(rev, pool)->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+/* Regression test for issue #3571 (fsfs 'svnadmin recover' expects
+ youngest revprop to be outside revprops.db). */
+#define REPO_NAME "test-repo-recover-fully-packed"
+#define SHARD_SIZE 4
+#define MAX_REV 7
+static svn_error_t *
+recover_fully_packed(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ svn_error_t *err;
+
+ /* Create a packed FS for which every revision will live in a pack
+ digest file, and then recover it. */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+ SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool));
+
+ /* Add another revision, re-pack, re-recover. */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "new-mu", subpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ svn_pool_destroy(subpool);
+ SVN_ERR(svn_fs_pack(REPO_NAME, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool));
+
+ /* Now, delete the youngest revprop file, and recover again. This
+ time we want to see an error! */
+ SVN_ERR(svn_io_remove_file2(
+ svn_dirent_join_many(pool, REPO_NAME, PATH_REVPROPS_DIR,
+ apr_psprintf(pool, "%ld/%ld",
+ after_rev / SHARD_SIZE,
+ after_rev),
+ SVN_VA_NULL),
+ FALSE, pool));
+ err = svn_fs_recover(REPO_NAME, NULL, NULL, pool);
+ if (! err)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Expected SVN_ERR_FS_CORRUPT error; got none");
+ if (err->apr_err != SVN_ERR_FS_CORRUPT)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_FS_CORRUPT error; got:");
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+/* Regression test for issue #4320 (fsfs file-hinting fails when reading a rep
+ from the transaction that is committing rev = SHARD_SIZE). */
+#define REPO_NAME "test-repo-file-hint-at-shard-boundary"
+#define SHARD_SIZE 4
+#define MAX_REV (SHARD_SIZE - 1)
+static svn_error_t *
+file_hint_at_shard_boundary(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *file_contents;
+ svn_stringbuf_t *retrieved_contents;
+ svn_error_t *err = SVN_NO_ERROR;
+
+ /* Create a packed FS and MAX_REV revisions */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+
+ /* Reopen the filesystem */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool));
+
+ /* Revision = SHARD_SIZE */
+ file_contents = get_rev_contents(SHARD_SIZE, subpool);
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", file_contents,
+ subpool));
+
+ /* Retrieve the file. */
+ SVN_ERR(svn_test__get_file_contents(txn_root, "iota", &retrieved_contents,
+ subpool));
+ if (strcmp(retrieved_contents->data, file_contents))
+ {
+ err = svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Retrieved incorrect contents from iota.");
+ }
+
+ /* Close the repo. */
+ svn_pool_destroy(subpool);
+
+ return err;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsfs-info"
+#define SHARD_SIZE 3
+#define MAX_REV 5
+static svn_error_t *
+test_info(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ const svn_fs_fsfs_info_t *fsfs_info;
+ const svn_fs_info_placeholder_t *info;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+ SVN_ERR(svn_fs_info(&info, fs, pool, pool));
+ info = svn_fs_info_dup(info, pool, pool);
+
+ SVN_TEST_STRING_ASSERT(opts->fs_type, info->fs_type);
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return SVN_NO_ERROR;
+
+ fsfs_info = (const void *)info;
+ if (opts->server_minor_version && (opts->server_minor_version < 6))
+ {
+ SVN_TEST_ASSERT(fsfs_info->shard_size == 0);
+ SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0);
+ }
+ else
+ {
+ SVN_TEST_ASSERT(fsfs_info->shard_size == SHARD_SIZE);
+ SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev
+ == (MAX_REV + 1) / SHARD_SIZE * SHARD_SIZE);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsfs-pack-shard-size-one"
+#define SHARD_SIZE 1
+#define MAX_REV 4
+static svn_error_t *
+pack_shard_size_one(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_string_t *propval;
+ svn_fs_t *fs;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+ /* whitebox: revprop packing special-cases r0, which causes
+ (start_rev==1, end_rev==0) in pack_revprops_shard(). So test that. */
+ SVN_ERR(svn_fs_revision_prop(&propval, fs, 1, SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(propval->data, R1_LOG_MSG);
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get_set_multiple_huge_revprops_packed_fs"
+#define SHARD_SIZE 4
+#define MAX_REV 9
+static svn_error_t *
+get_set_multiple_huge_revprops_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+ svn_revnum_t rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Set commit messages to different values */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG,
+ default_log(rev, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data);
+ }
+
+ /* Put a huge revprop into revision 1 and 2. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 1, SVN_PROP_REVISION_LOG,
+ huge_log(1, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 2, SVN_PROP_REVISION_LOG,
+ huge_log(2, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG,
+ huge_log(5, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 6, SVN_PROP_REVISION_LOG,
+ huge_log(6, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+
+ if (rev == 1 || rev == 2 || rev == 5 || rev == 6)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ huge_log(rev, pool)->data);
+ else
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ default_log(rev, pool)->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define SHARD_SIZE 4
+static svn_error_t *
+upgrade_txns_to_log_addressing(const svn_test_opts_t *opts,
+ const char *repo_name,
+ svn_revnum_t max_rev,
+ svn_boolean_t upgrade_before_txns,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_revnum_t rev;
+ apr_array_header_t *txns;
+ apr_array_header_t *txn_names;
+ int i, k;
+ svn_test_opts_t temp_opts;
+ svn_fs_root_t *root;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ static const char * const paths[SHARD_SIZE][2]
+ = {
+ { "A/mu", "A/B/lambda" },
+ { "A/B/E/alpha", "A/D/H/psi" },
+ { "A/D/gamma", "A/B/E/beta" },
+ { "A/D/G/pi", "A/D/G/rho" }
+ };
+
+ /* Bail (with success) on known-untestable scenarios */
+ if ((strcmp(opts->fs_type, "fsfs") != 0)
+ || (opts->server_minor_version && (opts->server_minor_version < 9)))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't support log addressing");
+
+ /* Create the packed FS in phys addressing format and open it. */
+ temp_opts = *opts;
+ temp_opts.server_minor_version = 8;
+ SVN_ERR(prepare_revprop_repo(&fs, repo_name, max_rev, SHARD_SIZE,
+ &temp_opts, pool));
+
+ if (upgrade_before_txns)
+ {
+ /* upgrade to final repo format (using log addressing) and re-open */
+ SVN_ERR(svn_fs_upgrade2(repo_name, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_fs_open2(&fs, repo_name, svn_fs_config(fs, pool), pool,
+ pool));
+ }
+
+ /* Create 4 concurrent transactions */
+ txns = apr_array_make(pool, SHARD_SIZE, sizeof(svn_fs_txn_t *));
+ txn_names = apr_array_make(pool, SHARD_SIZE, sizeof(const char *));
+ for (i = 0; i < SHARD_SIZE; ++i)
+ {
+ svn_fs_txn_t *txn;
+ const char *txn_name;
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, max_rev, pool));
+ APR_ARRAY_PUSH(txns, svn_fs_txn_t *) = txn;
+
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool));
+ APR_ARRAY_PUSH(txn_names, const char *) = txn_name;
+ }
+
+ /* Let all txns touch at least 2 files.
+ * Thus, the addressing data of at least one representation in the txn
+ * will differ between addressing modes. */
+ for (i = 0; i < SHARD_SIZE; ++i)
+ {
+ svn_fs_txn_t *txn = APR_ARRAY_IDX(txns, i, svn_fs_txn_t *);
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+
+ for (k = 0; k < 2; ++k)
+ {
+ svn_stream_t *stream;
+ const char *file_path = paths[i][k];
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_apply_text(&stream, root, file_path, NULL, iterpool));
+ SVN_ERR(svn_stream_printf(stream, iterpool,
+ "This is file %s in txn %d",
+ file_path, i));
+ SVN_ERR(svn_stream_close(stream));
+ }
+ }
+
+ if (!upgrade_before_txns)
+ {
+ /* upgrade to final repo format (using log addressing) and re-open */
+ SVN_ERR(svn_fs_upgrade2(repo_name, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_fs_open2(&fs, repo_name, svn_fs_config(fs, pool), pool,
+ pool));
+ }
+
+ /* Commit all transactions
+ * (in reverse order to make things more interesting) */
+ for (i = SHARD_SIZE - 1; i >= 0; --i)
+ {
+ svn_fs_txn_t *txn;
+ const char *txn_name = APR_ARRAY_IDX(txn_names, i, const char *);
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ }
+
+ /* Further changes to fill the shard */
+
+ SVN_ERR(svn_fs_youngest_rev(&rev, fs, pool));
+ SVN_TEST_ASSERT(rev == SHARD_SIZE + max_rev + 1);
+
+ while ((rev + 1) % SHARD_SIZE)
+ {
+ svn_fs_txn_t *txn;
+ if (rev % SHARD_SIZE == 0)
+ break;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(root, "iota",
+ get_rev_contents(rev + 1, iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool));
+ }
+
+ /* Make sure to close all file handles etc. from the last iteration */
+
+ svn_pool_clear(iterpool);
+
+ /* Pack repo to verify that old and new shard get packed according to
+ their respective addressing mode */
+
+ SVN_ERR(svn_fs_pack(repo_name, NULL, NULL, NULL, NULL, pool));
+
+ /* verify that our changes got in */
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ for (i = 0; i < SHARD_SIZE; ++i)
+ {
+ for (k = 0; k < 2; ++k)
+ {
+ svn_stream_t *stream;
+ const char *file_path = paths[i][k];
+ svn_string_t *string;
+ const char *expected;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_file_contents(&stream, root, file_path, iterpool));
+ SVN_ERR(svn_string_from_stream(&string, stream, iterpool, iterpool));
+
+ expected = apr_psprintf(pool,"This is file %s in txn %d",
+ file_path, i);
+ SVN_TEST_STRING_ASSERT(string->data, expected);
+ }
+ }
+
+ /* verify that the indexes are consistent, we calculated the correct
+ low-level checksums etc. */
+ SVN_ERR(svn_fs_verify(repo_name, NULL,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ for (; rev >= 0; --rev)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, iterpool));
+ SVN_ERR(svn_fs_verify_root(root, iterpool));
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef SHARD_SIZE
+
+#define REPO_NAME "test-repo-upgrade_new_txns_to_log_addressing"
+#define MAX_REV 8
+static svn_error_t *
+upgrade_new_txns_to_log_addressing(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(upgrade_txns_to_log_addressing(opts, REPO_NAME, MAX_REV, TRUE,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-upgrade_old_txns_to_log_addressing"
+#define MAX_REV 8
+static svn_error_t *
+upgrade_old_txns_to_log_addressing(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ SVN_ERR(upgrade_txns_to_log_addressing(opts, REPO_NAME, MAX_REV, FALSE,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-metadata_checksumming"
+static svn_error_t *
+metadata_checksumming(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ const char *repo_path, *r0_path;
+ apr_hash_t *fs_config = apr_hash_make(pool);
+ svn_stringbuf_t *r0;
+ svn_fs_root_t *root;
+ apr_hash_t *dir;
+
+ /* Skip this test unless we are FSFS f7+ */
+ if ((strcmp(opts->fs_type, "fsfs") != 0)
+ || (opts->server_minor_version && (opts->server_minor_version < 9)))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't checksum metadata");
+
+ /* Create the file system to fiddle with. */
+ SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool));
+ repo_path = svn_fs_path(fs, pool);
+
+ /* Manipulate the data on disk.
+ * (change id from '0.0.*' to '1.0.*') */
+ r0_path = svn_dirent_join_many(pool, repo_path, "revs", "0", "0",
+ SVN_VA_NULL);
+ SVN_ERR(svn_stringbuf_from_file2(&r0, r0_path, pool));
+ r0->data[21] = '1';
+ SVN_ERR(svn_io_remove_file2(r0_path, FALSE, pool));
+ SVN_ERR(svn_io_file_create_bytes(r0_path, r0->data, r0->len, pool));
+
+ /* Reading the corrupted data on the normal code path triggers no error.
+ * Use a separate namespace to avoid simply reading data from cache. */
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(pool));
+ SVN_ERR(svn_fs_open2(&fs, repo_path, fs_config, pool, pool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool));
+ SVN_ERR(svn_fs_dir_entries(&dir, root, "/", pool));
+
+ /* The block-read code path uses the P2L index information and compares
+ * low-level checksums. Again, separate cache namespace. */
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(pool));
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_BLOCK_READ, "1");
+ SVN_ERR(svn_fs_open2(&fs, repo_path, fs_config, pool, pool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool));
+ SVN_TEST_ASSERT_ERROR(svn_fs_dir_entries(&dir, root, "/", pool),
+ SVN_ERR_CHECKSUM_MISMATCH);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-revprop_caching_on_off"
+static svn_error_t *
+revprop_caching_on_off(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs1;
+ svn_fs_t *fs2;
+ apr_hash_t *fs_config;
+ svn_string_t *value;
+ const svn_string_t *another_value_for_avoiding_warnings_from_a_broken_api;
+ const svn_string_t *new_value = svn_string_create("new", pool);
+
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* Open two filesystem objects, enable revision property caching
+ * in one of them. */
+ SVN_ERR(svn_test__create_fs(&fs1, REPO_NAME, opts, pool));
+
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, "1");
+
+ SVN_ERR(svn_fs_open2(&fs2, svn_fs_path(fs1, pool), fs_config, pool, pool));
+
+ /* With inefficient named atomics, the filesystem will output a warning
+ and disable the revprop caching, but we still would like to test
+ these cases. Ignore the warning(s). */
+ svn_fs_set_warning_func(fs2, ignore_fs_warnings, NULL);
+
+ SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool));
+ another_value_for_avoiding_warnings_from_a_broken_api = value;
+ SVN_ERR(svn_fs_change_rev_prop2(
+ fs1, 0, "svn:date",
+ &another_value_for_avoiding_warnings_from_a_broken_api,
+ new_value, pool));
+
+ /* Expect the change to be visible through both objects.*/
+ SVN_ERR(svn_fs_revision_prop(&value, fs1, 0, "svn:date", pool));
+ SVN_TEST_STRING_ASSERT(value->data, "new");
+
+ SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool));
+ SVN_TEST_STRING_ASSERT(value->data, "new");
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+static svn_error_t *
+id_parser_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ #define LONG_MAX_STR #LONG_MAX
+
+ /* Verify the revision number parser (e.g. first element of a txn ID) */
+ svn_fs_fs__id_part_t id_part;
+ SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "0-0"));
+
+#if LONG_MAX == 2147483647L
+ SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "2147483647-0"));
+
+ /* Trigger all sorts of overflow conditions. */
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2147483648-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "21474836470-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "21474836479-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967295-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967296-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967304-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967305-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "42949672950-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "42949672959-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+
+ /* 0x120000000 = 4831838208.
+ * 483183820 < 10*483183820 mod 2^32 = 536870904 */
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4831838208-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+#else
+ SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "9223372036854775807-0"));
+
+ /* Trigger all sorts of overflow conditions. */
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "9223372036854775808-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "92233720368547758070-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "92233720368547758079-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "18446744073709551615-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "18446744073709551616-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "18446744073709551624-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "18446744073709551625-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "184467440737095516150-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "184467440737095516159-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+
+ /* 0x12000000000000000 = 20752587082923245568.
+ * 2075258708292324556 < 10*2075258708292324556 mod 2^32 = 2305843009213693944 */
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part,
+ "20752587082923245568-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+#endif
+
+ /* Invalid characters */
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2e4-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+ SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2-4-0"),
+ SVN_ERR_FS_MALFORMED_TXN_ID);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-plain_0_length"
+
+static svn_error_t *
+receive_index(const svn_fs_fs__p2l_entry_t *entry,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *entries = baton;
+ APR_ARRAY_PUSH(entries, svn_fs_fs__p2l_entry_t *)
+ = apr_pmemdup(entries->pool, entry, sizeof(*entry));
+
+ return SVN_NO_ERROR;
+}
+
+static apr_size_t
+stringbuf_find(svn_stringbuf_t *rev_contents,
+ const char *substring)
+{
+ apr_size_t i;
+ apr_size_t len = strlen(substring);
+
+ for (i = 0; i < rev_contents->len - len + 1; ++i)
+ if (!memcmp(rev_contents->data + i, substring, len))
+ return i;
+
+ return APR_SIZE_MAX;
+}
+
+static svn_error_t *
+plain_0_length(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ fs_fs_data_t *ffd;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *rev_path;
+ svn_stringbuf_t *rev_contents;
+ apr_hash_t *fs_config;
+ svn_filesize_t file_length;
+ apr_size_t offset;
+
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* Create a repo that does not deltify properties and does not share reps
+ on its own - makes it easier to do that later by hand. */
+ SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool));
+ ffd = fs->fsap_data;
+ ffd->deltify_properties = FALSE;
+ ffd->rep_sharing_allowed = FALSE;
+
+ /* Create one file node with matching contents and property reps. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, "foo", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", "END\n", pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "foo", "x", NULL, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Redirect text rep to props rep. */
+ rev_path = svn_fs_fs__path_rev_absolute(fs, rev, pool);
+ SVN_ERR(svn_stringbuf_from_file2(&rev_contents, rev_path, pool));
+
+ offset = stringbuf_find(rev_contents, "id: ");
+ if (offset != APR_SIZE_MAX)
+ {
+ node_revision_t *noderev;
+ svn_stringbuf_t *noderev_str;
+
+ /* Read the noderev. */
+ svn_stream_t *stream = svn_stream_from_stringbuf(rev_contents, pool);
+ SVN_ERR(svn_stream_skip(stream, offset));
+ SVN_ERR(svn_fs_fs__read_noderev(&noderev, stream, pool, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Tweak the DATA_REP. */
+ noderev->data_rep->revision = noderev->prop_rep->revision;
+ noderev->data_rep->item_index = noderev->prop_rep->item_index;
+ noderev->data_rep->size = noderev->prop_rep->size;
+ noderev->data_rep->expanded_size = 0;
+
+ /* Serialize it back. */
+ noderev_str = svn_stringbuf_create_empty(pool);
+ stream = svn_stream_from_stringbuf(noderev_str, pool);
+ SVN_ERR(svn_fs_fs__write_noderev(stream, noderev, ffd->format,
+ svn_fs_fs__fs_supports_mergeinfo(fs),
+ pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Patch the revision contents */
+ memcpy(rev_contents->data + offset, noderev_str->data, noderev_str->len);
+ }
+
+ SVN_ERR(svn_io_write_atomic2(rev_path, rev_contents->data,
+ rev_contents->len, NULL, FALSE,
+ pool));
+
+ if (svn_fs_fs__use_log_addressing(fs))
+ {
+ /* Refresh index data (checksums). */
+ apr_array_header_t *entries = apr_array_make(pool, 4, sizeof(void *));
+ SVN_ERR(svn_fs_fs__dump_index(fs, rev, receive_index, entries,
+ NULL, NULL, pool));
+ SVN_ERR(svn_fs_fs__load_index(fs, rev, entries, pool));
+ }
+
+ /* Create an independent FS instances with separate caches etc. */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool));
+
+ /* Now, check that we get the correct file length. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ SVN_ERR(svn_fs_file_length(&file_length, root, "foo", pool));
+
+ SVN_TEST_ASSERT(file_length == 4);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-rep_sharing_effectiveness"
+
+static int
+count_substring(svn_stringbuf_t *string,
+ const char *needle)
+{
+ int count = 0;
+ apr_size_t len = strlen(needle);
+ apr_size_t pos;
+
+ for (pos = 0; pos + len <= string->len; ++pos)
+ if (memcmp(string->data + pos, needle, len) == 0)
+ ++count;
+
+ return count;
+}
+
+static svn_error_t *
+count_representations(int *count,
+ svn_fs_t *fs,
+ svn_revnum_t revision,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *rev_contents;
+ const char *rev_path = svn_fs_fs__path_rev_absolute(fs, revision, pool);
+ SVN_ERR(svn_stringbuf_from_file2(&rev_contents, rev_path, pool));
+
+ *count = count_substring(rev_contents, "PLAIN")
+ + count_substring(rev_contents, "DELTA");
+
+ return SVN_NO_ERROR;
+}
+
+/* Repeat string S many times to make it big enough for deltification etc.
+ to kick in. */
+static const char*
+multiply_string(const char *s,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *temp = svn_stringbuf_create(s, pool);
+
+ int i;
+ for (i = 0; i < 7; ++i)
+ svn_stringbuf_insert(temp, temp->len, temp->data, temp->len);
+
+ return temp->data;
+}
+
+static svn_error_t *
+rep_sharing_effectiveness(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ fs_fs_data_t *ffd;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ const char *hello_str = multiply_string("Hello, ", pool);
+ const char *world_str = multiply_string("World!", pool);
+ const char *goodbye_str = multiply_string("Goodbye!", pool);
+
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* Create a repo that and explicitly enable rep sharing. */
+ SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool));
+
+ ffd = fs->fsap_data;
+ if (ffd->format < SVN_FS_FS__MIN_REP_SHARING_FORMAT)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ ffd->rep_sharing_allowed = TRUE;
+
+ /* Revision 1: create 2 files with different content. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, "foo", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", hello_str, pool));
+ SVN_ERR(svn_fs_make_file(root, "bar", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "bar", world_str, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Revision 2: modify a file to match another file's r1 content and
+ add another with the same content.
+ (classic rep-sharing). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", world_str, pool));
+ SVN_ERR(svn_fs_make_file(root, "baz", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "baz", hello_str, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Revision 3: modify all files to some new, identical content and add
+ another with the same content.
+ (in-revision rep-sharing). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", goodbye_str, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "bar", goodbye_str, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "baz", goodbye_str, pool));
+ SVN_ERR(svn_fs_make_file(root, "qux", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "qux", goodbye_str, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Verify revision contents. */
+ {
+ const struct {
+ svn_revnum_t revision;
+ const char *file;
+ const char *contents;
+ } expected[] = {
+ { 1, "foo", "Hello, " },
+ { 1, "bar", "World!" },
+ { 2, "foo", "World!" },
+ { 2, "bar", "World!" },
+ { 2, "baz", "Hello, " },
+ { 3, "foo", "Goodbye!" },
+ { 3, "bar", "Goodbye!" },
+ { 3, "baz", "Goodbye!" },
+ { 3, "qux", "Goodbye!" },
+ { SVN_INVALID_REVNUM, NULL, NULL }
+ };
+
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ for (i = 0; SVN_IS_VALID_REVNUM(expected[i].revision); ++i)
+ {
+ svn_stringbuf_t *str;
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, expected[i].revision,
+ iterpool));
+ SVN_ERR(svn_test__get_file_contents(root, expected[i].file, &str,
+ iterpool));
+
+ SVN_TEST_STRING_ASSERT(str->data,
+ multiply_string(expected[i].contents,
+ iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+ }
+
+ /* Verify that rep sharing eliminated most reps. */
+ {
+ /* Number of expected representations (including the root directory). */
+ const int expected[] = { 1, 3, 1, 2 } ;
+
+ svn_revnum_t i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ for (i = 0; i <= rev; ++i)
+ {
+ int count;
+ SVN_ERR(count_representations(&count, fs, i, iterpool));
+ SVN_TEST_ASSERT(count == expected[i]);
+ }
+
+ svn_pool_destroy(iterpool);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-delta_chain_with_plain"
+
+static svn_error_t *
+delta_chain_with_plain(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ fs_fs_data_t *ffd;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ svn_stringbuf_t *prop_value, *contents, *contents2, *hash_rep;
+ int i;
+ apr_hash_t *fs_config, *props;
+
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* Reproducing issue #4577 without the r1676667 fix is much harder in 1.9+
+ * than it was in 1.8. The reason is that 1.9+ won't deltify small reps
+ * nor against small reps. So, we must construct relatively large PLAIN
+ * and DELTA reps.
+ *
+ * The idea is to construct a PLAIN prop rep, make a file share that as
+ * its text rep, grow the file considerably (to make the PLAIN rep later
+ * read beyond EOF) and then replace it entirely with another longish
+ * contents.
+ */
+
+ /* Create a repo that and explicitly enable rep sharing. */
+ SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool));
+
+ ffd = fs->fsap_data;
+ if (ffd->format < SVN_FS_FS__MIN_REP_SHARING_FORMAT)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ ffd->rep_sharing_allowed = TRUE;
+
+ /* Make sure all props are stored as PLAIN reps. */
+ ffd->deltify_properties = FALSE;
+
+ /* Construct various content strings.
+ * Note that props need to be shorter than the file contents. */
+ prop_value = svn_stringbuf_create("prop", pool);
+ for (i = 0; i < 10; ++i)
+ svn_stringbuf_appendstr(prop_value, prop_value);
+
+ contents = svn_stringbuf_create("Some text.", pool);
+ for (i = 0; i < 10; ++i)
+ svn_stringbuf_appendstr(contents, contents);
+
+ contents2 = svn_stringbuf_create("Totally new!", pool);
+ for (i = 0; i < 10; ++i)
+ svn_stringbuf_appendstr(contents2, contents2);
+
+ /* Revision 1: create a property rep. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "/", "p",
+ svn_string_create(prop_value->data, pool),
+ pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Revision 2: create a file that shares the text rep with the PLAIN
+ * property rep from r1. */
+ props = apr_hash_make(pool);
+ svn_hash_sets(props, "p", svn_string_create(prop_value->data, pool));
+
+ hash_rep = svn_stringbuf_create_empty(pool);
+ svn_hash_write2(props, svn_stream_from_stringbuf(hash_rep, pool), "END",
+ pool);
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, "foo", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", hash_rep->data, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Revision 3: modify the file contents to a long-ish full text
+ * (~10kByte, longer than the r1 revision file). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", contents->data, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Revision 4: replace file contents to something disjoint from r3. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(root, "foo", contents2->data, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Getting foo@4 must work. To make sure we actually read from disk,
+ * use a new FS instance with disjoint caches. */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool));
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ SVN_ERR(svn_test__get_file_contents(root, "foo", &contents, pool));
+ SVN_TEST_STRING_ASSERT(contents->data, contents2->data);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-compare_0_length_rep"
+
+static svn_error_t *
+compare_0_length_rep(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ int i, k;
+ apr_hash_t *fs_config;
+
+ /* Test expectations. */
+#define no_rep_file "no-rep"
+#define empty_plain_file "empty-plain"
+#define plain_file "plain"
+#define empty_delta_file "empty-delta"
+#define delta_file "delta"
+
+ enum { COUNT = 5 };
+ const char *file_names[COUNT] = { no_rep_file,
+ empty_plain_file,
+ plain_file,
+ empty_delta_file,
+ delta_file };
+
+ int equal[COUNT][COUNT] = { { 1, 1, 0, 1, 0 },
+ { 1, 1, 0, 1, 0 },
+ { 0, 0, 1, 0, 1 },
+ { 1, 1, 0, 1, 0 },
+ { 0, 0, 1, 0, 1 } };
+
+ /* Well, this club is FSFS only ... */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* We want to check that whether NULL reps, empty PLAIN reps and empty
+ * DELTA reps are all considered equal, yet different from non-empty reps.
+ *
+ * Because we can't create empty PLAIN reps with recent formats anymore,
+ * some format selection & upgrade gymnastics is needed. */
+
+ /* Create a format 1 repository.
+ * This one does not support DELTA reps, so all is PLAIN. */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_PRE_1_4_COMPATIBLE, "x");
+ SVN_ERR(svn_test__create_fs2(&fs, REPO_NAME, opts, fs_config, pool));
+
+ /* Revision 1, create 3 files:
+ * One with no rep, one with an empty rep and a non-empty one. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, no_rep_file, pool));
+ SVN_ERR(svn_fs_make_file(root, empty_plain_file, pool));
+ SVN_ERR(svn_test__set_file_contents(root, empty_plain_file, "", pool));
+ SVN_ERR(svn_fs_make_file(root, plain_file, pool));
+ SVN_ERR(svn_test__set_file_contents(root, plain_file, "x", pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Upgrade the file system format. */
+ SVN_ERR(svn_fs_upgrade2(REPO_NAME, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ /* Revision 2, create two more files:
+ * a file with an empty DELTA rep and a non-empty one. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_file(root, empty_delta_file, pool));
+ SVN_ERR(svn_test__set_file_contents(root, empty_delta_file, "", pool));
+ SVN_ERR(svn_fs_make_file(root, delta_file, pool));
+ SVN_ERR(svn_test__set_file_contents(root, delta_file, "x", pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Now compare. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ for (i = 0; i < COUNT; ++i)
+ for (k = 0; k < COUNT; ++k)
+ {
+ svn_boolean_t different;
+ SVN_ERR(svn_fs_contents_different(&different, root, file_names[i],
+ root, file_names[k], pool));
+ SVN_TEST_ASSERT(different != equal[i][k]);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+/* Verify that the format 7 pack logic works even if we can't fit all index
+ metadata into memory. */
+#define REPO_NAME "test-repo-pack-with-limited-memory"
+#define SHARD_SIZE 4
+#define MAX_REV (2 * SHARD_SIZE - 1)
+static svn_error_t *
+pack_with_limited_memory(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_size_t max_mem;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (opts->server_minor_version && (opts->server_minor_version < 9))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't support reordering packs");
+
+ /* Run with an increasing memory allowance such that we cover all
+ splitting scenarios. */
+ for (max_mem = 350; max_mem < 8000; max_mem += max_mem / 2)
+ {
+ const char *dir;
+ svn_fs_t *fs;
+
+ svn_pool_clear(iterpool);
+
+ /* Create a filesystem. */
+ dir = apr_psprintf(iterpool, "%s-%d", REPO_NAME, (int)max_mem);
+ SVN_ERR(create_non_packed_filesystem(dir, opts, MAX_REV, SHARD_SIZE,
+ iterpool));
+
+ /* Pack it with a narrow memory budget. */
+ SVN_ERR(svn_fs_open2(&fs, dir, NULL, iterpool, iterpool));
+ SVN_ERR(svn_fs_fs__pack(fs, max_mem, NULL, NULL, NULL, NULL,
+ iterpool));
+
+ /* To be sure: Verify that we didn't break the repo. */
+ SVN_ERR(svn_fs_verify(dir, NULL, 0, MAX_REV, NULL, NULL, NULL, NULL,
+ iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-large_delta_against_plain"
+
+static svn_error_t *
+large_delta_against_plain(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ fs_fs_data_t *ffd;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_revnum_t rev;
+ svn_stringbuf_t *prop_value;
+ svn_string_t *prop_read;
+ int i;
+ apr_hash_t *fs_config;
+
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ /* Create a repo that and explicitly enable rep sharing. */
+ SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool));
+ ffd = fs->fsap_data;
+
+ /* Make sure all props are stored as PLAIN reps. */
+ ffd->deltify_properties = FALSE;
+
+ /* Construct a property larger than 2 txdelta windows. */
+ prop_value = svn_stringbuf_create("prop", pool);
+ while (prop_value->len <= 2 * 102400)
+ svn_stringbuf_appendstr(prop_value, prop_value);
+
+ /* Revision 1: create a property rep. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "/", "p",
+ svn_string_create(prop_value->data, pool),
+ pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Now, store them as DELTA reps. */
+ ffd->deltify_properties = TRUE;
+
+ /* Construct a property larger than 2 txdelta windows, distinct from the
+ * previous one but with a matching "tail". */
+ prop_value = svn_stringbuf_create("blob", pool);
+ while (prop_value->len <= 2 * 102400)
+ svn_stringbuf_appendstr(prop_value, prop_value);
+ for (i = 0; i < 100; ++i)
+ svn_stringbuf_appendcstr(prop_value, "prop");
+
+ /* Revision 2: modify the property. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(root, "/", "p",
+ svn_string_create(prop_value->data, pool),
+ pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* Reconstructing the property deltified must work. To make sure we
+ * actually read from disk, use a new FS instance with disjoint caches. */
+ fs_config = apr_hash_make(pool);
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS,
+ svn_uuid_generate(pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool));
+
+ SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool));
+ SVN_ERR(svn_fs_node_prop(&prop_read, root, "/", "p", pool));
+ SVN_TEST_STRING_ASSERT(prop_read->data, prop_value->data);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(pack_filesystem,
+ "pack a FSFS filesystem"),
+ SVN_TEST_OPTS_PASS(pack_even_filesystem,
+ "pack FSFS where revs % shard = 0"),
+ SVN_TEST_OPTS_PASS(read_packed_fs,
+ "read from a packed FSFS filesystem"),
+ SVN_TEST_OPTS_PASS(commit_packed_fs,
+ "commit to a packed FSFS filesystem"),
+ SVN_TEST_OPTS_PASS(get_set_revprop_packed_fs,
+ "get/set revprop while packing FSFS filesystem"),
+ SVN_TEST_OPTS_PASS(get_set_large_revprop_packed_fs,
+ "get/set large packed revprops in FSFS"),
+ SVN_TEST_OPTS_PASS(get_set_huge_revprop_packed_fs,
+ "get/set huge packed revprops in FSFS"),
+ SVN_TEST_OPTS_PASS(recover_fully_packed,
+ "recover a fully packed filesystem"),
+ SVN_TEST_OPTS_PASS(file_hint_at_shard_boundary,
+ "test file hint at shard boundary"),
+ SVN_TEST_OPTS_PASS(test_info,
+ "test svn_fs_info"),
+ SVN_TEST_OPTS_PASS(pack_shard_size_one,
+ "test packing with shard size = 1"),
+ SVN_TEST_OPTS_PASS(get_set_multiple_huge_revprops_packed_fs,
+ "set multiple huge revprops in packed FSFS"),
+ SVN_TEST_OPTS_PASS(upgrade_new_txns_to_log_addressing,
+ "upgrade txns to log addressing in shared FSFS"),
+ SVN_TEST_OPTS_PASS(upgrade_old_txns_to_log_addressing,
+ "upgrade txns started before svnadmin upgrade"),
+ SVN_TEST_OPTS_PASS(metadata_checksumming,
+ "metadata checksums being checked"),
+ SVN_TEST_OPTS_PASS(revprop_caching_on_off,
+ "change revprops with enabled and disabled caching"),
+ SVN_TEST_OPTS_PASS(id_parser_test,
+ "id parser test"),
+ SVN_TEST_OPTS_PASS(plain_0_length,
+ "file with 0 expanded-length, issue #4554"),
+ SVN_TEST_OPTS_PASS(rep_sharing_effectiveness,
+ "rep-sharing effectiveness"),
+ SVN_TEST_OPTS_PASS(delta_chain_with_plain,
+ "delta chains starting with PLAIN, issue #4577"),
+ SVN_TEST_OPTS_PASS(compare_0_length_rep,
+ "compare empty PLAIN and non-existent reps"),
+ SVN_TEST_OPTS_PASS(pack_with_limited_memory,
+ "pack with limited memory for metadata"),
+ SVN_TEST_OPTS_PASS(large_delta_against_plain,
+ "large deltas against PLAIN, issue #4658"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c
new file mode 100644
index 0000000..9ca0805
--- /dev/null
+++ b/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c
@@ -0,0 +1,441 @@
+/* fs-fs-private-test.c --- tests FSFS's private API
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "../svn_test.h"
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_fs.h"
+
+#include "private/svn_string_private.h"
+#include "private/svn_fs_fs_private.h"
+#include "private/svn_subr_private.h"
+
+#include "../../libsvn_fs_fs/index.h"
+
+#include "../svn_test_fs.h"
+
+
+
+/* Utility functions */
+
+/* Create a repo under REPO_NAME using OPTS. Allocate the repository in
+ * RESULT_POOL and return it in *REPOS. Set *REV to the revision containing
+ * the Greek tree addition. Use SCRATCH_POOL for temporary allocations.
+ */
+static svn_error_t *
+create_greek_repo(svn_repos_t **repos,
+ svn_revnum_t *rev,
+ const svn_test_opts_t *opts,
+ const char *repo_name,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+
+ /* Create a filesystem */
+ SVN_ERR(svn_test__create_repos(repos, repo_name, opts, result_pool));
+ fs = svn_repos_fs(*repos);
+
+ /* Add the Greek tree */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, scratch_pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, scratch_pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, scratch_pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, rev, txn, scratch_pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(*rev));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-get-repo-stats-test"
+
+static svn_error_t *
+verify_representation_stats(const svn_fs_fs__representation_stats_t *stats,
+ apr_uint64_t expected_count)
+{
+ /* Small items, no packing (but inefficiency due to packing attempt). */
+ SVN_TEST_ASSERT(stats->total.count == expected_count);
+ SVN_TEST_ASSERT( stats->total.packed_size >= 10 * expected_count
+ && stats->total.packed_size <= 1000 * expected_count);
+ /* Expect the packed size to be sane, keeping in mind that it might
+ * be less or more than the expanded size due differences in the
+ * compression algorithms or options such as directory deltification. */
+ SVN_TEST_ASSERT(stats->total.packed_size <= 2 * stats->total.expanded_size);
+ SVN_TEST_ASSERT( stats->total.overhead_size >= 5 * expected_count
+ && stats->total.overhead_size <= 100 * expected_count);
+
+ /* Rep sharing has no effect on the Greek tree. */
+ SVN_TEST_ASSERT(stats->total.count == stats->uniques.count);
+ SVN_TEST_ASSERT(stats->total.packed_size == stats->uniques.packed_size);
+ SVN_TEST_ASSERT(stats->total.expanded_size == stats->uniques.expanded_size);
+ SVN_TEST_ASSERT(stats->total.overhead_size == stats->uniques.overhead_size);
+
+ SVN_TEST_ASSERT(stats->shared.count == 0);
+ SVN_TEST_ASSERT(stats->shared.packed_size == 0);
+ SVN_TEST_ASSERT(stats->shared.expanded_size == 0);
+ SVN_TEST_ASSERT(stats->shared.overhead_size == 0);
+
+ /* No rep sharing. */
+ SVN_TEST_ASSERT(stats->references == stats->total.count);
+ SVN_TEST_ASSERT(stats->expanded_size == stats->total.expanded_size);
+
+ /* Reasonable delta chain lengths */
+ SVN_TEST_ASSERT( stats->chain_len >= stats->total.count
+ && stats->chain_len <= 5 * stats->total.count);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_node_stats(const svn_fs_fs__node_stats_t *node_stats,
+ apr_uint64_t expected_count)
+{
+ SVN_TEST_ASSERT(node_stats->count == expected_count);
+ SVN_TEST_ASSERT( node_stats->size > 100 * node_stats->count
+ && node_stats->size < 1000 * node_stats->count);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_large_change(const svn_fs_fs__large_change_info_t *change,
+ svn_revnum_t revision)
+{
+ if (change->revision == SVN_INVALID_REVNUM)
+ {
+ /* Unused entry due to the Greek tree being small. */
+ SVN_TEST_ASSERT(change->path->len == 0);
+ SVN_TEST_ASSERT(change->size == 0);
+ }
+ else if (strcmp(change->path->data, "/") == 0)
+ {
+ /* The root folder nodes are always there, i.e. aren't in the
+ * Greek tree "do add" list. */
+ SVN_TEST_ASSERT( SVN_IS_VALID_REVNUM(change->revision)
+ && change->revision <= revision);
+ }
+ else
+ {
+ const struct svn_test__tree_entry_t *node;
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ if (strcmp(node->path, change->path->data + 1) == 0)
+ {
+ SVN_TEST_ASSERT(change->revision == revision);
+
+ /* When checking content sizes, keep in mind the optional
+ * SVNDIFF overhead.*/
+ if (node->contents)
+ SVN_TEST_ASSERT( change->size >= strlen(node->contents)
+ && change->size <= 12 + strlen(node->contents));
+
+ return SVN_NO_ERROR;
+ }
+
+ SVN_TEST_ASSERT(!"Change is part of Greek tree");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_histogram(const svn_fs_fs__histogram_t *histogram)
+{
+ apr_uint64_t sum_count = 0;
+ apr_uint64_t sum_size = 0;
+
+ int i;
+ for (i = 0; i < 64; ++i)
+ {
+ svn_fs_fs__histogram_line_t line = histogram->lines[i];
+
+ if (i > 10 || i < 1)
+ SVN_TEST_ASSERT(line.sum == 0 && line.count == 0);
+ else
+ SVN_TEST_ASSERT( line.sum >= (line.count << (i-1))
+ && line.sum <= (line.count << i));
+
+ sum_count += line.count;
+ sum_size += line.sum;
+ }
+
+ SVN_TEST_ASSERT(histogram->total.count == sum_count);
+ SVN_TEST_ASSERT(histogram->total.sum == sum_size);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+get_repo_stats(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_revnum_t rev;
+ apr_size_t i;
+ svn_fs_fs__stats_t *stats;
+ svn_fs_fs__extension_info_t *extension_info;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ /* Create a filesystem */
+ SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool));
+
+ /* Gather statistics info on that repo. */
+ SVN_ERR(svn_fs_fs__get_stats(&stats, svn_repos_fs(repos), NULL, NULL,
+ NULL, NULL, pool, pool));
+
+ /* Check that the stats make sense. */
+ SVN_TEST_ASSERT(stats->total_size > 1000 && stats->total_size < 10000);
+ SVN_TEST_ASSERT(stats->revision_count == 2);
+ SVN_TEST_ASSERT(stats->change_count == 20);
+ SVN_TEST_ASSERT(stats->change_len > 500 && stats->change_len < 2000);
+
+ /* Check representation stats. */
+ SVN_ERR(verify_representation_stats(&stats->total_rep_stats, 20));
+ SVN_ERR(verify_representation_stats(&stats->file_rep_stats, 12));
+ SVN_ERR(verify_representation_stats(&stats->dir_rep_stats, 8));
+ SVN_ERR(verify_representation_stats(&stats->file_prop_rep_stats, 0));
+ SVN_ERR(verify_representation_stats(&stats->dir_prop_rep_stats, 0));
+
+ /* Check node stats against rep stats. */
+ SVN_ERR(verify_node_stats(&stats->total_node_stats, 22));
+ SVN_ERR(verify_node_stats(&stats->file_node_stats, 12));
+ SVN_ERR(verify_node_stats(&stats->dir_node_stats, 10));
+
+ /* Check largest changes. */
+ SVN_TEST_ASSERT(stats->largest_changes->count == 64);
+ SVN_TEST_ASSERT(stats->largest_changes->min_size == 0);
+
+ for (i = 0; i < stats->largest_changes->count; ++i)
+ SVN_ERR(verify_large_change(stats->largest_changes->changes[i], rev));
+
+ /* Check histograms. */
+ SVN_ERR(verify_histogram(&stats->rep_size_histogram));
+ SVN_ERR(verify_histogram(&stats->node_size_histogram));
+ SVN_ERR(verify_histogram(&stats->added_rep_size_histogram));
+ SVN_ERR(verify_histogram(&stats->added_node_size_histogram));
+ SVN_ERR(verify_histogram(&stats->unused_rep_histogram));
+ SVN_ERR(verify_histogram(&stats->file_histogram));
+ SVN_ERR(verify_histogram(&stats->file_rep_histogram));
+ SVN_ERR(verify_histogram(&stats->file_prop_histogram));
+ SVN_ERR(verify_histogram(&stats->file_prop_rep_histogram));
+ SVN_ERR(verify_histogram(&stats->dir_histogram));
+ SVN_ERR(verify_histogram(&stats->dir_rep_histogram));
+ SVN_ERR(verify_histogram(&stats->dir_prop_histogram));
+ SVN_ERR(verify_histogram(&stats->dir_prop_rep_histogram));
+
+ /* No file in the Greek tree has an externsion */
+ SVN_TEST_ASSERT(apr_hash_count(stats->by_extension) == 1);
+ extension_info = svn_hash_gets(stats->by_extension, "(none)");
+ SVN_TEST_ASSERT(extension_info);
+
+ SVN_ERR(verify_histogram(&extension_info->rep_histogram));
+ SVN_ERR(verify_histogram(&extension_info->node_histogram));
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+#define REPO_NAME "test-repo-dump-index-test"
+
+typedef struct dump_baton_t
+{
+ /* Number of callback invocations so far */
+ int invocations;
+
+ /* Rev file location we expect to be reported next */
+ apr_off_t offset;
+
+ /* All items must be from this revision. */
+ svn_revnum_t revision;
+
+ /* Track the item numbers we have already seen. */
+ svn_bit_array__t *numbers_seen;
+} dump_baton_t;
+
+static svn_error_t *
+dump_index_entry(const svn_fs_fs__p2l_entry_t *entry,
+ void *baton_p,
+ apr_pool_t *scratch_pool)
+{
+ dump_baton_t *baton = baton_p;
+
+ /* Count invocations. */
+ baton->invocations++;
+
+ /* We expect a report of contiguous non-empty items. */
+ SVN_TEST_ASSERT(entry->offset == baton->offset);
+ SVN_TEST_ASSERT(entry->size > 0 && entry->size < 1000);
+ baton->offset += entry->size;
+
+ /* Type must be valid. */
+ SVN_TEST_ASSERT( entry->type > SVN_FS_FS__ITEM_TYPE_UNUSED
+ && entry->type <= SVN_FS_FS__ITEM_TYPE_CHANGES);
+
+ /* We expect all items to be from the specified revision. */
+ SVN_TEST_ASSERT(entry->item.revision == baton->revision);
+
+ /* Item numnber must be plausibly small and unique. */
+ SVN_TEST_ASSERT(entry->item.number < 100);
+ SVN_TEST_ASSERT(!svn_bit_array__get(baton->numbers_seen,
+ (apr_size_t)entry->item.number));
+ svn_bit_array__set(baton->numbers_seen, (apr_size_t)entry->item.number, 1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+dump_index(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_revnum_t rev;
+ dump_baton_t baton;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 9))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't have FSFS indexes");
+
+ /* Create a filesystem */
+ SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool));
+
+ /* Read the index data for REV from that repo. */
+ baton.invocations = 0;
+ baton.offset = 0;
+ baton.revision = rev;
+ baton.numbers_seen = svn_bit_array__create(100, pool);
+ SVN_ERR(svn_fs_fs__dump_index(svn_repos_fs(repos), rev, dump_index_entry,
+ &baton, NULL, NULL, pool));
+
+ /* Check that we've got all data (20 noderevs + 20 reps + 1 changes list). */
+ SVN_TEST_ASSERT(baton.invocations == 41);
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+/* ------------------------------------------------------------------------ */
+
+static svn_error_t *
+receive_index(const svn_fs_fs__p2l_entry_t *entry,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *entries = baton;
+ APR_ARRAY_PUSH(entries, svn_fs_fs__p2l_entry_t *)
+ = apr_pmemdup(entries->pool, entry, sizeof(*entry));
+
+ return SVN_NO_ERROR;
+}
+
+#define REPO_NAME "test-repo-load-index-test"
+
+static svn_error_t *
+load_index(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_revnum_t rev;
+ apr_array_header_t *entries = apr_array_make(pool, 41, sizeof(void *));
+ apr_array_header_t *alt_entries = apr_array_make(pool, 1, sizeof(void *));
+ svn_fs_fs__p2l_entry_t entry;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsfs") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSFS repositories only");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 9))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't have FSFS indexes");
+
+ /* Create a filesystem */
+ SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool));
+
+ /* Read the original index contents for REV in ENTRIES. */
+ SVN_ERR(svn_fs_fs__dump_index(svn_repos_fs(repos), rev, receive_index,
+ entries, NULL, NULL, pool));
+
+ /* Replace it with an index that declares the whole revision contents as
+ * "unused". */
+ entry = *APR_ARRAY_IDX(entries, entries->nelts-1, svn_fs_fs__p2l_entry_t *);
+ entry.size += entry.offset;
+ entry.offset = 0;
+ entry.type = SVN_FS_FS__ITEM_TYPE_UNUSED;
+ entry.item.number = SVN_FS_FS__ITEM_INDEX_UNUSED;
+ entry.item.revision = SVN_INVALID_REVNUM;
+ APR_ARRAY_PUSH(alt_entries, svn_fs_fs__p2l_entry_t *) = &entry;
+
+ SVN_ERR(svn_fs_fs__load_index(svn_repos_fs(repos), rev, alt_entries, pool));
+ SVN_TEST_ASSERT_ERROR(svn_repos_verify_fs3(repos, rev, rev, FALSE, FALSE,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, pool),
+ SVN_ERR_FS_INDEX_CORRUPTION);
+
+ /* Restore the original index. */
+ SVN_ERR(svn_fs_fs__load_index(svn_repos_fs(repos), rev, entries, pool));
+ SVN_ERR(svn_repos_verify_fs3(repos, rev, rev, FALSE, FALSE, NULL, NULL,
+ NULL, NULL, NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+
+
+
+/* The test table. */
+
+static int max_threads = 0;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(get_repo_stats,
+ "get statistics on a FSFS filesystem"),
+ SVN_TEST_OPTS_PASS(dump_index,
+ "dump the P2L index"),
+ SVN_TEST_OPTS_PASS(load_index,
+ "load the P2L index"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_x/fs-x-pack-test.c b/subversion/tests/libsvn_fs_x/fs-x-pack-test.c
new file mode 100644
index 0000000..3f4dda0
--- /dev/null
+++ b/subversion/tests/libsvn_fs_x/fs-x-pack-test.c
@@ -0,0 +1,969 @@
+/* fs-x-pack-test.c --- tests for the FSX filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+#include "../../libsvn_fs_x/batch_fsync.h"
+#include "../../libsvn_fs_x/fs.h"
+#include "../../libsvn_fs_x/reps.h"
+
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_fs.h"
+#include "private/svn_string_private.h"
+
+#include "../svn_test_fs.h"
+
+
+
+/*** Helper Functions ***/
+
+/* Write the format number and maximum number of files per directory
+ to a new format file in PATH, overwriting a previously existing
+ file. Use POOL for temporary allocation.
+
+ (This implementation is largely stolen from libsvn_fs_fs/fs_fs.c.) */
+static svn_error_t *
+write_format(const char *path,
+ int format,
+ int max_files_per_dir,
+ apr_pool_t *pool)
+{
+ const char *contents;
+
+ path = svn_dirent_join(path, "format", pool);
+ SVN_TEST_ASSERT(max_files_per_dir > 0);
+
+ contents = apr_psprintf(pool,
+ "%d\n"
+ "layout sharded %d\n",
+ format, max_files_per_dir);
+
+ SVN_ERR(svn_io_write_atomic2(path, contents, strlen(contents),
+ NULL /* copy perms */, FALSE, pool));
+
+ /* And set the perms to make it read only */
+ return svn_io_set_file_read_only(path, FALSE, pool);
+}
+
+/* Return the expected contents of "iota" in revision REV. */
+static const char *
+get_rev_contents(svn_revnum_t rev, apr_pool_t *pool)
+{
+ /* Toss in a bunch of magic numbers for spice. */
+ apr_int64_t num = ((rev * 1234353 + 4358) * 4583 + ((rev % 4) << 1)) / 42;
+ return apr_psprintf(pool, "%" APR_INT64_T_FMT "\n", num);
+}
+
+struct pack_notify_baton
+{
+ apr_int64_t expected_shard;
+ svn_fs_pack_notify_action_t expected_action;
+};
+
+static svn_error_t *
+pack_notify(void *baton,
+ apr_int64_t shard,
+ svn_fs_pack_notify_action_t action,
+ apr_pool_t *pool)
+{
+ struct pack_notify_baton *pnb = baton;
+
+ SVN_TEST_ASSERT(shard == pnb->expected_shard);
+ SVN_TEST_ASSERT(action == pnb->expected_action);
+
+ /* Update expectations. */
+ switch (action)
+ {
+ case svn_fs_pack_notify_start:
+ pnb->expected_action = svn_fs_pack_notify_end;
+ break;
+
+ case svn_fs_pack_notify_end:
+ pnb->expected_action = svn_fs_pack_notify_start;
+ pnb->expected_shard++;
+ break;
+
+ default:
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Unknown notification action when packing");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#define R1_LOG_MSG "Let's serf"
+
+/* Create a packed filesystem in DIR. Set the shard size to
+ SHARD_SIZE and create NUM_REVS number of revisions (in addition to
+ r0). Use POOL for allocations. After this function successfully
+ completes, the filesystem's youngest revision number will be the
+ same as NUM_REVS. */
+static svn_error_t *
+create_packed_filesystem(const char *dir,
+ const svn_test_opts_t *opts,
+ int num_revs,
+ int shard_size,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ struct pack_notify_baton pnb;
+ apr_pool_t *iterpool;
+ int version;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsx") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSX repositories only");
+
+ if (opts->server_minor_version && (opts->server_minor_version < 9))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "pre-1.9 SVN doesn't support FSX");
+
+ /* Create a filesystem, then close it */
+ SVN_ERR(svn_test__create_fs(&fs, dir, opts, subpool));
+ svn_pool_destroy(subpool);
+
+ subpool = svn_pool_create(pool);
+
+ /* Rewrite the format file */
+ SVN_ERR(svn_io_read_version_file(&version,
+ svn_dirent_join(dir, "format", subpool),
+ subpool));
+ SVN_ERR(write_format(dir, version, shard_size, subpool));
+
+ /* Reopen the filesystem */
+ SVN_ERR(svn_fs_open2(&fs, dir, NULL, subpool, subpool));
+
+ /* Revision 1: the Greek tree */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_LOG,
+ svn_string_create(R1_LOG_MSG, pool),
+ pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ /* Revisions 2 thru NUM_REVS-1: content tweaks to "iota". */
+ iterpool = svn_pool_create(subpool);
+ while (after_rev < num_revs)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, iterpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota",
+ get_rev_contents(after_rev + 1,
+ iterpool),
+ iterpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, iterpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ }
+ svn_pool_destroy(iterpool);
+ svn_pool_destroy(subpool);
+
+ /* Now pack the FS */
+ pnb.expected_shard = 0;
+ pnb.expected_action = svn_fs_pack_notify_start;
+ return svn_fs_pack(dir, pack_notify, &pnb, NULL, NULL, pool);
+}
+
+/* Create a packed FSFS filesystem for revprop tests at REPO_NAME with
+ * MAX_REV revisions and the given SHARD_SIZE and OPTS. Return it in *FS.
+ * Use POOL for allocations.
+ */
+static svn_error_t *
+prepare_revprop_repo(svn_fs_t **fs,
+ const char *repo_name,
+ int max_rev,
+ int shard_size,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ apr_pool_t *subpool;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(create_packed_filesystem(repo_name, opts, max_rev, shard_size, pool));
+ SVN_ERR(svn_fs_open2(fs, repo_name, NULL, pool, pool));
+
+ subpool = svn_pool_create(pool);
+ /* Do a commit to trigger packing. */
+ SVN_ERR(svn_fs_begin_txn(&txn, *fs, max_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "new-iota", subpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ svn_pool_destroy(subpool);
+
+ /* Pack the repository. */
+ SVN_ERR(svn_fs_pack(repo_name, NULL, NULL, NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* For revision REV, return a short log message allocated in POOL.
+ */
+static svn_string_t *
+default_log(svn_revnum_t rev, apr_pool_t *pool)
+{
+ return svn_string_createf(pool, "Default message for rev %ld", rev);
+}
+
+/* For revision REV, return a long log message allocated in POOL.
+ */
+static svn_string_t *
+large_log(svn_revnum_t rev, apr_size_t length, apr_pool_t *pool)
+{
+ svn_stringbuf_t *temp = svn_stringbuf_create_ensure(100000, pool);
+ int i, count = (int)(length - 50) / 6;
+
+ svn_stringbuf_appendcstr(temp, "A ");
+ for (i = 0; i < count; ++i)
+ svn_stringbuf_appendcstr(temp, "very, ");
+
+ svn_stringbuf_appendcstr(temp,
+ apr_psprintf(pool, "very long message for rev %ld, indeed", rev));
+
+ return svn_stringbuf__morph_into_string(temp);
+}
+
+/* For revision REV, return a long log message allocated in POOL.
+ */
+static svn_string_t *
+huge_log(svn_revnum_t rev, apr_pool_t *pool)
+{
+ return large_log(rev, 90000, pool);
+}
+
+
+/*** Tests ***/
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-pack"
+#define SHARD_SIZE 7
+#define MAX_REV 53
+static svn_error_t *
+pack_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ int i;
+ svn_node_kind_t kind;
+ const char *path;
+ char buf[80];
+ apr_file_t *file;
+ apr_size_t len;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ /* Check to see that the pack files exist, and that the rev directories
+ don't. */
+ for (i = 0; i < (MAX_REV + 1) / SHARD_SIZE; i++)
+ {
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d.pack", i / SHARD_SIZE),
+ "pack", SVN_VA_NULL);
+
+ /* This file should exist. */
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_file)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Expected pack file '%s' not found", path);
+
+ /* This directory should not exist. */
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d", i / SHARD_SIZE),
+ SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_none)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Unexpected directory '%s' found", path);
+ }
+
+ /* Ensure the min-unpacked-rev jives with the above operations. */
+ SVN_ERR(svn_io_file_open(&file,
+ svn_dirent_join(REPO_NAME, PATH_MIN_UNPACKED_REV,
+ pool),
+ APR_READ | APR_BUFFERED, APR_OS_DEFAULT, pool));
+ len = sizeof(buf);
+ SVN_ERR(svn_io_read_length_line(file, buf, &len, pool));
+ SVN_ERR(svn_io_file_close(file, pool));
+ if (SVN_STR_TO_REV(buf) != (MAX_REV / SHARD_SIZE) * SHARD_SIZE)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Bad '%s' contents", PATH_MIN_UNPACKED_REV);
+
+ /* Finally, make sure the final revision directory does exist. */
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs",
+ apr_psprintf(pool, "%d", (i / SHARD_SIZE) + 1),
+ SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_none)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Expected directory '%s' not found", path);
+
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-pack-even"
+#define SHARD_SIZE 4
+#define MAX_REV 11
+static svn_error_t *
+pack_even_filesystem(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_node_kind_t kind;
+ const char *path;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ path = svn_dirent_join_many(pool, REPO_NAME, "revs", "2.pack", SVN_VA_NULL);
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ if (kind != svn_node_dir)
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Packing did not complete as expected");
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-read-packed-fs"
+#define SHARD_SIZE 5
+#define MAX_REV 11
+static svn_error_t *
+read_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_stream_t *rstream;
+ svn_stringbuf_t *rstring;
+ svn_revnum_t i;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ for (i = 1; i < (MAX_REV + 1); i++)
+ {
+ svn_fs_root_t *rev_root;
+ svn_stringbuf_t *sb;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, i, pool));
+ SVN_ERR(svn_fs_file_contents(&rstream, rev_root, "iota", pool));
+ SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool));
+
+ if (i == 1)
+ sb = svn_stringbuf_create("This is the file 'iota'.\n", pool);
+ else
+ sb = svn_stringbuf_create(get_rev_contents(i, pool), pool);
+
+ if (! svn_stringbuf_compare(rstring, sb))
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "Bad data in revision %ld.", i);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-commit-packed-fs"
+#define SHARD_SIZE 5
+#define MAX_REV 10
+static svn_error_t *
+commit_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, 5, pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ /* Now do a commit. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota",
+ "How much better is it to get wisdom than gold! and to get "
+ "understanding rather to be chosen than silver!", pool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 10
+static svn_error_t *
+get_set_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Try to get revprop for revision 0
+ * (non-packed due to special handling). */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR,
+ pool));
+
+ /* Try to change revprop for revision 0
+ * (non-packed due to special handling). */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 0, SVN_PROP_REVISION_AUTHOR,
+ svn_string_create("tweaked-author", pool),
+ pool));
+
+ /* verify */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR,
+ pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author");
+
+ /* Try to get packed revprop for revision 5. */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR,
+ pool));
+
+ /* Try to change packed revprop for revision 5. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_AUTHOR,
+ svn_string_create("tweaked-author2", pool),
+ pool));
+
+ /* verify */
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR,
+ pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author2");
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-large-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 11
+static svn_error_t *
+get_set_large_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+ svn_revnum_t rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Set commit messages to different, large values that fill the pack
+ * files but do not exceed the pack size limit. */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG,
+ large_log(rev, 15000, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 15000, pool)->data);
+ }
+
+ /* Put a larger revprop into the last, some middle and the first revision
+ * of a pack. This should cause the packs to split in the middle. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG,
+ /* rev 0 is not packed */
+ large_log(3, 37000, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG,
+ large_log(5, 25000, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG,
+ large_log(8, 25000, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+
+ if (rev == 3)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 37000, pool)->data);
+ else if (rev == 5 || rev == 8)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 25000, pool)->data);
+ else
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ large_log(rev, 15000, pool)->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-get-set-huge-revprop-packed-fs"
+#define SHARD_SIZE 4
+#define MAX_REV 10
+static svn_error_t *
+get_set_huge_revprop_packed_fs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ svn_string_t *prop_value;
+ svn_revnum_t rev;
+
+ /* Create the packed FS and open it. */
+ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts,
+ pool));
+
+ /* Set commit messages to different values */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG,
+ default_log(rev, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data);
+ }
+
+ /* Put a huge revprop into the last, some middle and the first revision
+ * of a pack. They will cause the pack files to split accordingly. */
+ SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG,
+ huge_log(3, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG,
+ huge_log(5, pool),
+ pool));
+ SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG,
+ huge_log(8, pool),
+ pool));
+
+ /* verify */
+ for (rev = 0; rev <= MAX_REV; ++rev)
+ {
+ SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev,
+ SVN_PROP_REVISION_LOG, pool));
+
+ if (rev == 3 || rev == 5 || rev == 8)
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ huge_log(rev, pool)->data);
+ else
+ SVN_TEST_STRING_ASSERT(prop_value->data,
+ default_log(rev, pool)->data);
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+/* Regression test for issue #3571 (fsfs 'svnadmin recover' expects
+ youngest revprop to be outside revprops.db). */
+#define REPO_NAME "test-repo-recover-fully-packed"
+#define SHARD_SIZE 4
+#define MAX_REV 7
+static svn_error_t *
+recover_fully_packed(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t after_rev;
+ svn_error_t *err;
+
+ /* Create a packed FS for which every revision will live in a pack
+ digest file, and then recover it. */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+ SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool));
+
+ /* Add another revision, re-pack, re-recover. */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "new-mu", subpool));
+ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev));
+ svn_pool_destroy(subpool);
+ SVN_ERR(svn_fs_pack(REPO_NAME, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool));
+
+ /* Now, delete the youngest revprop file, and recover again. This
+ time we want to see an error! */
+ SVN_ERR(svn_io_remove_file2(
+ svn_dirent_join_many(pool, REPO_NAME, PATH_REVS_DIR,
+ apr_psprintf(pool, "%ld/p%ld",
+ after_rev / SHARD_SIZE,
+ after_rev),
+ SVN_VA_NULL),
+ FALSE, pool));
+ err = svn_fs_recover(REPO_NAME, NULL, NULL, pool);
+ if (! err)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Expected SVN_ERR_FS_CORRUPT error; got none");
+ if (err->apr_err != SVN_ERR_FS_CORRUPT)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_FS_CORRUPT error; got:");
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+/* Regression test for issue #4320 (fsfs file-hinting fails when reading a rep
+ from the transaction that is commiting rev = SHARD_SIZE). */
+#define REPO_NAME "test-repo-file-hint-at-shard-boundary"
+#define SHARD_SIZE 4
+#define MAX_REV (SHARD_SIZE - 1)
+static svn_error_t *
+file_hint_at_shard_boundary(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *file_contents;
+ svn_stringbuf_t *retrieved_contents;
+ svn_error_t *err = SVN_NO_ERROR;
+
+ /* Create a packed FS and MAX_REV revisions */
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool));
+
+ /* Reopen the filesystem */
+ subpool = svn_pool_create(pool);
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool));
+
+ /* Revision = SHARD_SIZE */
+ file_contents = get_rev_contents(SHARD_SIZE, subpool);
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", file_contents,
+ subpool));
+
+ /* Retrieve the file. */
+ SVN_ERR(svn_test__get_file_contents(txn_root, "iota", &retrieved_contents,
+ subpool));
+ if (strcmp(retrieved_contents->data, file_contents))
+ {
+ err = svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Retrieved incorrect contents from iota.");
+ }
+
+ /* Close the repo. */
+ svn_pool_destroy(subpool);
+
+ return err;
+}
+#undef REPO_NAME
+#undef MAX_REV
+#undef SHARD_SIZE
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-info"
+#define SHARD_SIZE 3
+#define MAX_REV 5
+static svn_error_t *
+test_info(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs;
+ const svn_fs_fsx_info_t *fsx_info;
+ const svn_fs_info_placeholder_t *info;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+ SVN_ERR(svn_fs_info(&info, fs, pool, pool));
+ info = svn_fs_info_dup(info, pool, pool);
+
+ SVN_TEST_STRING_ASSERT(opts->fs_type, info->fs_type);
+
+ /* Bail (with success) on known-untestable scenarios */
+ if (strcmp(opts->fs_type, "fsx") != 0)
+ return SVN_NO_ERROR;
+
+ fsx_info = (const void *)info;
+ SVN_TEST_ASSERT(fsx_info->shard_size == SHARD_SIZE);
+ SVN_TEST_ASSERT(fsx_info->min_unpacked_rev
+ == (MAX_REV + 1) / SHARD_SIZE * SHARD_SIZE);
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-rev-container"
+#define SHARD_SIZE 3
+#define MAX_REV 5
+static svn_error_t *
+test_reps(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs = NULL;
+ svn_fs_x__reps_builder_t *builder;
+ svn_fs_x__reps_t *container;
+ svn_stringbuf_t *serialized;
+ svn_stream_t *stream;
+ svn_stringbuf_t *contents = svn_stringbuf_create_ensure(10000, pool);
+ int i;
+
+ for (i = 0; i < 10000; ++i)
+ {
+ int v, s = 0;
+ for (v = i; v > 0; v /= 10)
+ s += v % 10;
+
+ svn_stringbuf_appendbyte(contents, (char)(s + ' '));
+ }
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+
+ builder = svn_fs_x__reps_builder_create(fs, pool);
+ for (i = 10000; i > 10; --i)
+ {
+ apr_size_t idx;
+ svn_string_t string;
+ string.data = contents->data;
+ string.len = i;
+
+ SVN_ERR(svn_fs_x__reps_add(&idx, builder, &string));
+ }
+
+ serialized = svn_stringbuf_create_empty(pool);
+ stream = svn_stream_from_stringbuf(serialized, pool);
+ SVN_ERR(svn_fs_x__write_reps_container(stream, builder, pool));
+
+ SVN_ERR(svn_stream_reset(stream));
+ SVN_ERR(svn_fs_x__read_reps_container(&container, stream, pool, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-pack-shard-size-one"
+#define SHARD_SIZE 1
+#define MAX_REV 4
+static svn_error_t *
+pack_shard_size_one(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_string_t *propval;
+ svn_fs_t *fs;
+
+ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE,
+ pool));
+ SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool));
+ /* whitebox: revprop packing special-cases r0, which causes
+ (start_rev==1, end_rev==0) in pack_revprops_shard(). So test that. */
+ SVN_ERR(svn_fs_revision_prop(&propval, fs, 1, SVN_PROP_REVISION_LOG, pool));
+ SVN_TEST_STRING_ASSERT(propval->data, R1_LOG_MSG);
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+#undef SHARD_SIZE
+#undef MAX_REV
+/* ------------------------------------------------------------------------ */
+#define REPO_NAME "test-repo-fsx-batch-fsync"
+static svn_error_t *
+test_batch_fsync(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *abspath;
+ svn_fs_x__batch_fsync_t *batch;
+ int i;
+
+ /* Disable this test for non FSX backends because it has no relevance to
+ * them. */
+ if (strcmp(opts->fs_type, "fsx") != 0)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "this will test FSX repositories only");
+
+ /* Create an empty working directory and let it be cleaned up by the test
+ * harness. */
+ SVN_ERR(svn_dirent_get_absolute(&abspath, REPO_NAME, pool));
+
+ SVN_ERR(svn_io_remove_dir2(abspath, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_make_dir_recursively(abspath, pool));
+ svn_test_add_dir_cleanup(abspath);
+
+ /* Initialize infrastructure with a pool that lives as long as this
+ * application. */
+ SVN_ERR(svn_fs_x__batch_fsync_init(pool));
+
+ /* We use and re-use the same batch object throughout this test. */
+ SVN_ERR(svn_fs_x__batch_fsync_create(&batch, TRUE, pool));
+
+ /* The working directory is new. */
+ SVN_ERR(svn_fs_x__batch_fsync_new_path(batch, abspath, pool));
+
+ /* 1st run: Has to fire up worker threads etc. */
+ for (i = 0; i < 10; ++i)
+ {
+ apr_file_t *file;
+ const char *path = svn_dirent_join(abspath,
+ apr_psprintf(pool, "file%i", i),
+ pool);
+ apr_size_t len = strlen(path);
+
+ SVN_ERR(svn_fs_x__batch_fsync_open_file(&file, batch, path, pool));
+
+ SVN_ERR(svn_io_file_write(file, path, &len, pool));
+ }
+
+ SVN_ERR(svn_fs_x__batch_fsync_run(batch, pool));
+
+ /* 2nd run: Running a batch must leave the container in an empty,
+ * re-usable state. Hence, try to re-use it. */
+ for (i = 0; i < 10; ++i)
+ {
+ apr_file_t *file;
+ const char *path = svn_dirent_join(abspath,
+ apr_psprintf(pool, "new%i", i),
+ pool);
+ apr_size_t len = strlen(path);
+
+ SVN_ERR(svn_fs_x__batch_fsync_open_file(&file, batch, path, pool));
+
+ SVN_ERR(svn_io_file_write(file, path, &len, pool));
+ }
+
+ SVN_ERR(svn_fs_x__batch_fsync_run(batch, pool));
+
+ /* 3rd run: Schedule but don't execute. POOL cleanup shall not fail. */
+ for (i = 0; i < 10; ++i)
+ {
+ apr_file_t *file;
+ const char *path = svn_dirent_join(abspath,
+ apr_psprintf(pool, "another%i", i),
+ pool);
+ apr_size_t len = strlen(path);
+
+ SVN_ERR(svn_fs_x__batch_fsync_open_file(&file, batch, path, pool));
+
+ SVN_ERR(svn_io_file_write(file, path, &len, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+#undef REPO_NAME
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(pack_filesystem,
+ "pack a FSX filesystem"),
+ SVN_TEST_OPTS_PASS(pack_even_filesystem,
+ "pack FSX where revs % shard = 0"),
+ SVN_TEST_OPTS_PASS(read_packed_fs,
+ "read from a packed FSX filesystem"),
+ SVN_TEST_OPTS_PASS(commit_packed_fs,
+ "commit to a packed FSX filesystem"),
+ SVN_TEST_OPTS_PASS(get_set_revprop_packed_fs,
+ "get/set revprop while packing FSX filesystem"),
+ SVN_TEST_OPTS_PASS(get_set_large_revprop_packed_fs,
+ "get/set large packed revprops in FSX"),
+ SVN_TEST_OPTS_PASS(get_set_huge_revprop_packed_fs,
+ "get/set huge packed revprops in FSX"),
+ SVN_TEST_OPTS_PASS(recover_fully_packed,
+ "recover a fully packed filesystem"),
+ SVN_TEST_OPTS_PASS(file_hint_at_shard_boundary,
+ "test file hint at shard boundary"),
+ SVN_TEST_OPTS_PASS(test_info,
+ "test svn_fs_info"),
+ SVN_TEST_OPTS_PASS(test_reps,
+ "test representations container"),
+ SVN_TEST_OPTS_PASS(pack_shard_size_one,
+ "test packing with shard size = 1"),
+ SVN_TEST_OPTS_PASS(test_batch_fsync,
+ "test batch fsync"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_fs_x/string-table-test.c b/subversion/tests/libsvn_fs_x/string-table-test.c
new file mode 100644
index 0000000..2633bfa
--- /dev/null
+++ b/subversion/tests/libsvn_fs_x/string-table-test.c
@@ -0,0 +1,318 @@
+/* string-table-test.c --- tests for string tables
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "../svn_test.h"
+#include "../../libsvn_fs_x/string_table.h"
+#include "svn_pools.h"
+#include "svn_sorts.h"
+
+/* Some tests use this list of strings as is. They are all "short strings"
+ * in the terminology of string tables. We use them also as an input to
+ * generate strings of arbitrary length.
+ */
+enum { STRING_COUNT = 12 };
+static const char *basic_strings[STRING_COUNT] =
+ {
+ "some string",
+ "this is another string",
+ "this is a duplicate",
+ "some longer string",
+ "this is a very long string",
+ "and here is another",
+ "this is a duplicate",
+ "/some/path/to/a/dir",
+ "/some/path/to/a/file",
+ "/some/other/dir",
+ "/some/other/file",
+ ""
+ };
+
+/* Generate a string of exactly LEN chars (plus terminating NUL). KEY is
+ * an arbitrary integer that will be transformed into a character sequence
+ * using entries of BASIC_STRINGS. The result will be allocated in POOL.
+ */
+static svn_stringbuf_t *
+generate_string(apr_uint64_t key, apr_size_t len, apr_pool_t *pool)
+{
+ svn_stringbuf_t *result = svn_stringbuf_create_ensure(len, pool);
+ apr_uint64_t temp = key;
+ apr_uint64_t run = 0;
+
+ while (len)
+ {
+ apr_size_t idx;
+ apr_size_t add_len;
+
+ if (temp == 0)
+ {
+ temp = key;
+ run++;
+ }
+
+ idx = (temp + run) % STRING_COUNT;
+ temp /= STRING_COUNT;
+
+ add_len = strlen(basic_strings[idx]);
+ add_len = MIN(len, add_len);
+
+ svn_stringbuf_appendbytes(result, basic_strings[idx], add_len);
+ len -= add_len;
+ }
+
+ return result;
+}
+
+static svn_error_t *
+store_and_load_table(string_table_t **table, apr_pool_t *pool)
+{
+ svn_stringbuf_t *stream_buffer = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream;
+
+ stream = svn_stream_from_stringbuf(stream_buffer, pool);
+ SVN_ERR(svn_fs_x__write_string_table(stream, *table, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ *table = NULL;
+
+ stream = svn_stream_from_stringbuf(stream_buffer, pool);
+ SVN_ERR(svn_fs_x__read_string_table(table, stream, pool, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+create_empty_table_body(svn_boolean_t do_load_store,
+ apr_pool_t *pool)
+{
+ string_table_builder_t *builder
+ = svn_fs_x__string_table_builder_create(pool);
+ string_table_t *table
+ = svn_fs_x__string_table_create(builder, pool);
+
+ SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, 0, NULL, pool), "");
+
+ if (do_load_store)
+ SVN_ERR(store_and_load_table(&table, pool));
+
+ SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, 0, NULL, pool), "");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+short_string_table_body(svn_boolean_t do_load_store,
+ apr_pool_t *pool)
+{
+ apr_size_t indexes[STRING_COUNT] = { 0 };
+
+ string_table_builder_t *builder;
+ string_table_t *table;
+ int i;
+
+ builder = svn_fs_x__string_table_builder_create(pool);
+ for (i = 0; i < STRING_COUNT; ++i)
+ indexes[i] = svn_fs_x__string_table_builder_add(builder, basic_strings[i], 0);
+
+ table = svn_fs_x__string_table_create(builder, pool);
+ if (do_load_store)
+ SVN_ERR(store_and_load_table(&table, pool));
+
+ SVN_TEST_ASSERT(indexes[2] == indexes[6]);
+ for (i = 0; i < STRING_COUNT; ++i)
+ {
+ apr_size_t len;
+ const char *string
+ = svn_fs_x__string_table_get(table, indexes[i], &len, pool);
+
+ SVN_TEST_STRING_ASSERT(string, basic_strings[i]);
+ SVN_TEST_ASSERT(len == strlen(string));
+ SVN_TEST_ASSERT(len == strlen(basic_strings[i]));
+ }
+
+ SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, STRING_COUNT,
+ NULL, pool), "");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+large_string_table_body(svn_boolean_t do_load_store,
+ apr_pool_t *pool)
+{
+ enum { COUNT = 10 };
+
+ svn_stringbuf_t *strings[COUNT] = { 0 };
+ apr_size_t indexes[COUNT] = { 0 };
+
+ string_table_builder_t *builder;
+ string_table_t *table;
+ int i;
+
+ builder = svn_fs_x__string_table_builder_create(pool);
+ for (i = 0; i < COUNT; ++i)
+ {
+ strings[i] = generate_string(APR_UINT64_C(0x1234567876543210) * (i + 1),
+ 73000 + 1000 * i, pool);
+ indexes[i] = svn_fs_x__string_table_builder_add(builder,
+ strings[i]->data,
+ strings[i]->len);
+ }
+
+ table = svn_fs_x__string_table_create(builder, pool);
+ if (do_load_store)
+ SVN_ERR(store_and_load_table(&table, pool));
+
+ for (i = 0; i < COUNT; ++i)
+ {
+ apr_size_t len;
+ const char *string
+ = svn_fs_x__string_table_get(table, indexes[i], &len, pool);
+
+ SVN_TEST_STRING_ASSERT(string, strings[i]->data);
+ SVN_TEST_ASSERT(len == strlen(string));
+ SVN_TEST_ASSERT(len == strings[i]->len);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+many_strings_table_body(svn_boolean_t do_load_store,
+ apr_pool_t *pool)
+{
+ /* cause multiple sub-tables (6 to be exact) to be created */
+ enum { COUNT = 100 };
+
+ svn_stringbuf_t *strings[COUNT] = { 0 };
+ apr_size_t indexes[COUNT] = { 0 };
+
+ string_table_builder_t *builder;
+ string_table_t *table;
+ int i;
+
+ builder = svn_fs_x__string_table_builder_create(pool);
+ for (i = 0; i < COUNT; ++i)
+ {
+ strings[i] = generate_string(APR_UINT64_C(0x1234567876543210) * (i + 1),
+ (i * i) % 23000, pool);
+ indexes[i] = svn_fs_x__string_table_builder_add(builder,
+ strings[i]->data,
+ strings[i]->len);
+ }
+
+ table = svn_fs_x__string_table_create(builder, pool);
+ if (do_load_store)
+ SVN_ERR(store_and_load_table(&table, pool));
+
+ for (i = 0; i < COUNT; ++i)
+ {
+ apr_size_t len;
+ const char *string
+ = svn_fs_x__string_table_get(table, indexes[i], &len, pool);
+
+ SVN_TEST_STRING_ASSERT(string, strings[i]->data);
+ SVN_TEST_ASSERT(len == strlen(string));
+ SVN_TEST_ASSERT(len == strings[i]->len);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+create_empty_table(apr_pool_t *pool)
+{
+ return svn_error_trace(create_empty_table_body(FALSE, pool));
+}
+
+static svn_error_t *
+short_string_table(apr_pool_t *pool)
+{
+ return svn_error_trace(short_string_table_body(FALSE, pool));
+}
+
+static svn_error_t *
+large_string_table(apr_pool_t *pool)
+{
+ return svn_error_trace(large_string_table_body(FALSE, pool));
+}
+
+static svn_error_t *
+many_strings_table(apr_pool_t *pool)
+{
+ return svn_error_trace(many_strings_table_body(FALSE, pool));
+}
+
+static svn_error_t *
+store_load_short_string_table(apr_pool_t *pool)
+{
+ return svn_error_trace(short_string_table_body(TRUE, pool));
+}
+
+static svn_error_t *
+store_load_large_string_table(apr_pool_t *pool)
+{
+ return svn_error_trace(large_string_table_body(TRUE, pool));
+}
+
+static svn_error_t *
+store_load_empty_table(apr_pool_t *pool)
+{
+ return svn_error_trace(create_empty_table_body(TRUE, pool));
+}
+
+static svn_error_t *
+store_load_many_strings_table(apr_pool_t *pool)
+{
+ return svn_error_trace(many_strings_table_body(TRUE, pool));
+}
+
+
+/* ------------------------------------------------------------------------ */
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(create_empty_table,
+ "create an empty string table"),
+ SVN_TEST_PASS2(short_string_table,
+ "string table with short strings only"),
+ SVN_TEST_PASS2(large_string_table,
+ "string table with large strings only"),
+ SVN_TEST_PASS2(many_strings_table,
+ "string table with many strings"),
+ SVN_TEST_PASS2(store_load_empty_table,
+ "store and load an empty string table"),
+ SVN_TEST_PASS2(store_load_short_string_table,
+ "store and load table with short strings only"),
+ SVN_TEST_PASS2(store_load_large_string_table,
+ "store and load table with large strings only"),
+ SVN_TEST_PASS2(store_load_many_strings_table,
+ "store and load string table with many strings"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_ra/ra-test.c b/subversion/tests/libsvn_ra/ra-test.c
new file mode 100644
index 0000000..061b19a
--- /dev/null
+++ b/subversion/tests/libsvn_ra/ra-test.c
@@ -0,0 +1,1826 @@
+/*
+ * ra-local-test.c : basic tests for the RA LOCAL library
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#include <apr_general.h>
+#include <apr_pools.h>
+#include <apr_file_io.h>
+#include <assert.h>
+
+#include "svn_error.h"
+#include "svn_delta.h"
+#include "svn_ra.h"
+#include "svn_time.h"
+#include "svn_pools.h"
+#include "svn_cmdline.h"
+#include "svn_dirent_uri.h"
+#include "svn_hash.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+#include "../../libsvn_ra_local/ra_local.h"
+
+/*-------------------------------------------------------------------*/
+
+/** Helper routines. **/
+
+
+static svn_error_t *
+make_and_open_repos(svn_ra_session_t **session,
+ const char *repos_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *url;
+ svn_ra_callbacks2_t *cbtable;
+
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+ SVN_ERR(svn_test__init_auth_baton(&cbtable->auth_baton, pool));
+
+ SVN_ERR(svn_test__create_repos2(NULL, &url, NULL, repos_name, opts,
+ pool, pool));
+ SVN_ERR(svn_ra_initialize(pool));
+
+ SVN_ERR(svn_ra_open4(session, NULL, url, NULL, cbtable, NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Commit some simple changes */
+static svn_error_t *
+commit_changes(svn_ra_session_t *session,
+ apr_pool_t *pool)
+{
+ apr_hash_t *revprop_table = apr_hash_make(pool);
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ const char *repos_root_url;
+ void *root_baton, *dir_baton;
+
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ revprop_table,
+ NULL, NULL, NULL, TRUE, pool));
+ SVN_ERR(svn_ra_get_repos_root2(session, &repos_root_url, pool));
+
+ SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM,
+ pool, &root_baton));
+ /* copy root-dir@0 to A@1 */
+ SVN_ERR(editor->add_directory("A", root_baton, repos_root_url, 0,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_tree(svn_ra_session_t *session,
+ apr_pool_t *pool)
+{
+ apr_hash_t *revprop_table = apr_hash_make(pool);
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ const char *repos_root_url;
+ void *root_baton, *A_baton, *B_baton, *file_baton;
+
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ revprop_table,
+ NULL, NULL, NULL, TRUE, pool));
+ SVN_ERR(svn_ra_get_repos_root2(session, &repos_root_url, pool));
+
+ SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM,
+ pool, &root_baton));
+ SVN_ERR(editor->add_directory("A", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &A_baton));
+ SVN_ERR(editor->add_directory("A/B", A_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &B_baton));
+ SVN_ERR(editor->add_file("A/B/f", B_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->add_file("A/B/g", B_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(B_baton, pool));
+ SVN_ERR(editor->add_directory("A/BB", A_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &B_baton));
+ SVN_ERR(editor->add_file("A/BB/f", B_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->add_file("A/BB/g", B_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(B_baton, pool));
+ SVN_ERR(editor->close_directory(A_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ return SVN_NO_ERROR;
+}
+
+/* Baton for opening tunnels */
+typedef struct tunnel_baton_t
+{
+ int magic; /* TUNNEL_MAGIC */
+ int open_count;
+ svn_boolean_t last_check;
+} tunnel_baton_t;
+
+#define TUNNEL_MAGIC 0xF00DF00F
+
+/* Baton for closing a specific tunnel */
+typedef struct close_baton_t
+{
+ int magic;
+ tunnel_baton_t *tb;
+ apr_proc_t *proc;
+} close_baton_t;
+
+#define CLOSE_MAGIC 0x1BADBAD1
+
+static svn_boolean_t
+check_tunnel(void *tunnel_baton, const char *tunnel_name)
+{
+ tunnel_baton_t *b = tunnel_baton;
+
+ if (b->magic != TUNNEL_MAGIC)
+ abort();
+
+ b->last_check = (0 == strcmp(tunnel_name, "test"));
+ return b->last_check;
+}
+
+static void
+close_tunnel(void *tunnel_context, void *tunnel_baton);
+
+static svn_error_t *
+open_tunnel(svn_stream_t **request, svn_stream_t **response,
+ svn_ra_close_tunnel_func_t *close_func, void **close_baton,
+ void *tunnel_baton,
+ const char *tunnel_name, const char *user,
+ const char *hostname, int port,
+ svn_cancel_func_t cancel_func, void *cancel_baton,
+ apr_pool_t *pool)
+{
+ svn_node_kind_t kind;
+ apr_proc_t *proc;
+ apr_procattr_t *attr;
+ apr_status_t status;
+ const char *args[] = { "svnserve", "-t", "-r", ".", NULL };
+ const char *svnserve;
+ tunnel_baton_t *b = tunnel_baton;
+ close_baton_t *cb;
+
+ SVN_TEST_ASSERT(b->magic == TUNNEL_MAGIC);
+
+ SVN_ERR(svn_dirent_get_absolute(&svnserve, "../../svnserve/svnserve", pool));
+#ifdef WIN32
+ svnserve = apr_pstrcat(pool, svnserve, ".exe", SVN_VA_NULL);
+#endif
+ SVN_ERR(svn_io_check_path(svnserve, &kind, pool));
+ if (kind != svn_node_file)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Could not find svnserve at %s",
+ svn_dirent_local_style(svnserve, pool));
+
+ status = apr_procattr_create(&attr, pool);
+ if (status == APR_SUCCESS)
+ status = apr_procattr_io_set(attr, 1, 1, 0);
+ if (status == APR_SUCCESS)
+ status = apr_procattr_cmdtype_set(attr, APR_PROGRAM);
+ proc = apr_palloc(pool, sizeof(*proc));
+ if (status == APR_SUCCESS)
+ status = apr_proc_create(proc,
+ svn_dirent_local_style(svnserve, pool),
+ args, NULL, attr, pool);
+ if (status != APR_SUCCESS)
+ return svn_error_wrap_apr(status, "Could not run svnserve");
+ apr_pool_note_subprocess(pool, proc, APR_KILL_NEVER);
+
+ /* APR pipe objects inherit by default. But we don't want the
+ * tunnel agent's pipes held open by future child processes
+ * (such as other ra_svn sessions), so turn that off. */
+ apr_file_inherit_unset(proc->in);
+ apr_file_inherit_unset(proc->out);
+
+ cb = apr_pcalloc(pool, sizeof(*cb));
+ cb->magic = CLOSE_MAGIC;
+ cb->tb = b;
+ cb->proc = proc;
+
+ *request = svn_stream_from_aprfile2(proc->in, FALSE, pool);
+ *response = svn_stream_from_aprfile2(proc->out, FALSE, pool);
+ *close_func = close_tunnel;
+ *close_baton = cb;
+ ++b->open_count;
+ return SVN_NO_ERROR;
+}
+
+static void
+close_tunnel(void *tunnel_context, void *tunnel_baton)
+{
+ close_baton_t *b = tunnel_context;
+
+ if (b->magic != CLOSE_MAGIC)
+ abort();
+ if (--b->tb->open_count == 0)
+ {
+ apr_status_t child_exit_status;
+ int child_exit_code;
+ apr_exit_why_e child_exit_why;
+
+ SVN_TEST_ASSERT_NO_RETURN(0 == apr_file_close(b->proc->in));
+ SVN_TEST_ASSERT_NO_RETURN(0 == apr_file_close(b->proc->out));
+
+ child_exit_status =
+ apr_proc_wait(b->proc, &child_exit_code, &child_exit_why, APR_WAIT);
+
+ SVN_TEST_ASSERT_NO_RETURN(child_exit_status == APR_CHILD_DONE);
+ SVN_TEST_ASSERT_NO_RETURN(child_exit_code == 0);
+ SVN_TEST_ASSERT_NO_RETURN(child_exit_why == APR_PROC_EXIT);
+ }
+}
+
+
+
+
+/*-------------------------------------------------------------------*/
+
+/** The tests **/
+
+/* Baton for gls_receiver(). */
+struct gls_receiver_baton_t
+{
+ apr_array_header_t *segments;
+ apr_pool_t *pool;
+};
+
+/* Receive a location segment and append it to BATON.segments. */
+static svn_error_t *
+gls_receiver(svn_location_segment_t *segment,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct gls_receiver_baton_t *b = baton;
+
+ APR_ARRAY_PUSH(b->segments, svn_location_segment_t *)
+ = svn_location_segment_dup(segment, b->pool);
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_ra_get_location_segments(). */
+static svn_error_t *
+location_segments_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+ apr_array_header_t *segments
+ = apr_array_make(pool, 1, sizeof(svn_location_segment_t *));
+ struct gls_receiver_baton_t b;
+ const char *path = "A";
+ svn_revnum_t peg_revision = 1;
+ svn_location_segment_t *seg;
+
+ b.segments = segments;
+ b.pool = pool;
+
+ SVN_ERR(make_and_open_repos(&session,
+ "test-repo-locsegs", opts,
+ pool));
+
+ /* ### This currently tests only a small subset of what's possible. */
+ SVN_ERR(commit_changes(session, pool));
+ SVN_ERR(svn_ra_get_location_segments(session, path, peg_revision,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ gls_receiver, &b, pool));
+ SVN_TEST_ASSERT(segments->nelts == 2);
+ seg = APR_ARRAY_IDX(segments, 0, svn_location_segment_t *);
+ SVN_TEST_STRING_ASSERT(seg->path, "A");
+ SVN_TEST_ASSERT(seg->range_start == 1);
+ SVN_TEST_ASSERT(seg->range_end == 1);
+ seg = APR_ARRAY_IDX(segments, 1, svn_location_segment_t *);
+ SVN_TEST_STRING_ASSERT(seg->path, "");
+ SVN_TEST_ASSERT(seg->range_start == 0);
+ SVN_TEST_ASSERT(seg->range_end == 0);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test ra_svn tunnel callbacks. */
+
+static svn_error_t *
+check_tunnel_callback_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b));
+ svn_ra_callbacks2_t *cbtable;
+ svn_ra_session_t *session;
+
+ b->magic = TUNNEL_MAGIC;
+
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+ cbtable->check_tunnel_func = check_tunnel;
+ cbtable->open_tunnel_func = open_tunnel;
+ cbtable->tunnel_baton = b;
+ SVN_ERR(svn_cmdline_create_auth_baton2(&cbtable->auth_baton,
+ TRUE /* non_interactive */,
+ "jrandom", "rayjandom",
+ NULL,
+ TRUE /* no_auth_cache */,
+ FALSE /* trust_server_cert */,
+ FALSE, FALSE, FALSE, FALSE,
+ NULL, NULL, NULL, pool));
+
+ b->last_check = TRUE;
+ SVN_TEST_ASSERT_ERROR(svn_ra_open4(&session, NULL,
+ "svn+foo://localhost/no-repo",
+ NULL, cbtable, NULL, NULL, pool),
+ SVN_ERR_RA_CANNOT_CREATE_SESSION);
+ SVN_TEST_ASSERT(!b->last_check);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+tunnel_callback_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b));
+ apr_pool_t *scratch_pool = svn_pool_create(pool);
+ const char *url;
+ svn_ra_callbacks2_t *cbtable;
+ svn_ra_session_t *session;
+ const char tunnel_repos_name[] = "test-repo-tunnel";
+
+ b->magic = TUNNEL_MAGIC;
+
+ SVN_ERR(svn_test__create_repos(NULL, tunnel_repos_name, opts, scratch_pool));
+
+ /* Immediately close the repository to avoid race condition with svnserve
+ (and then the cleanup code) with BDB when our pool is cleared. */
+ svn_pool_clear(scratch_pool);
+
+ url = apr_pstrcat(pool, "svn+test://localhost/", tunnel_repos_name,
+ SVN_VA_NULL);
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+ cbtable->check_tunnel_func = check_tunnel;
+ cbtable->open_tunnel_func = open_tunnel;
+ cbtable->tunnel_baton = b;
+ SVN_ERR(svn_cmdline_create_auth_baton2(&cbtable->auth_baton,
+ TRUE /* non_interactive */,
+ "jrandom", "rayjandom",
+ NULL,
+ TRUE /* no_auth_cache */,
+ FALSE /* trust_server_cert */,
+ FALSE, FALSE, FALSE, FALSE,
+ NULL, NULL, NULL, pool));
+
+ b->last_check = FALSE;
+ SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable, NULL, NULL,
+ scratch_pool));
+ SVN_TEST_ASSERT(b->last_check);
+ SVN_TEST_ASSERT(b->open_count > 0);
+ svn_pool_destroy(scratch_pool);
+ SVN_TEST_ASSERT(b->open_count == 0);
+ return SVN_NO_ERROR;
+}
+
+struct lock_result_t {
+ svn_lock_t *lock;
+ svn_error_t *err;
+};
+
+struct lock_baton_t {
+ apr_hash_t *results;
+ apr_pool_t *pool;
+};
+
+/* Implements svn_ra_lock_callback_t. */
+static svn_error_t *
+lock_cb(void *baton,
+ const char *path,
+ svn_boolean_t do_lock,
+ const svn_lock_t *lock,
+ svn_error_t *ra_err,
+ apr_pool_t *pool)
+{
+ struct lock_baton_t *b = baton;
+ struct lock_result_t *result = apr_palloc(b->pool,
+ sizeof(struct lock_result_t));
+
+ result->lock = svn_lock_dup(lock, b->pool);
+ result->err = ra_err;
+
+ svn_hash_sets(b->results, apr_pstrdup(b->pool, path), result);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_lock(const char *path,
+ apr_hash_t *results,
+ svn_ra_session_t *session,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && result->lock && !result->err);
+ SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool));
+ SVN_TEST_ASSERT(lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_error(const char *path,
+ apr_hash_t *results,
+ svn_ra_session_t *session,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && result->err);
+ SVN_TEST_ASSERT(!result->lock);
+ /* RA layers shouldn't report SVN_ERR_FS_NOT_FOUND */
+ SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool));
+
+ SVN_TEST_ASSERT(!lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_unlock(const char *path,
+ apr_hash_t *results,
+ svn_ra_session_t *session,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && !result->err);
+ SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool));
+ SVN_TEST_ASSERT(!lock);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_unlock_error(const char *path,
+ apr_hash_t *results,
+ svn_ra_session_t *session,
+ apr_pool_t *scratch_pool)
+{
+ svn_lock_t *lock;
+ struct lock_result_t *result = svn_hash_gets(results, path);
+
+ SVN_TEST_ASSERT(result && result->err);
+ SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool));
+ SVN_TEST_ASSERT(lock);
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_ra_lock(). */
+static svn_error_t *
+lock_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+ apr_hash_t *lock_targets = apr_hash_make(pool);
+ apr_hash_t *unlock_targets = apr_hash_make(pool);
+ svn_revnum_t rev = 1;
+ struct lock_result_t *result;
+ struct lock_baton_t baton;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(make_and_open_repos(&session, "test-repo-lock", opts, pool));
+ SVN_ERR(commit_tree(session, pool));
+
+ baton.results = apr_hash_make(pool);
+ baton.pool = pool;
+
+ svn_hash_sets(lock_targets, "A/B/f", &rev);
+ svn_hash_sets(lock_targets, "A/B/g", &rev);
+ svn_hash_sets(lock_targets, "A/B/z", &rev);
+ svn_hash_sets(lock_targets, "A/BB/f", &rev);
+ svn_hash_sets(lock_targets, "X/z", &rev);
+
+ /* Lock some paths. */
+ SVN_ERR(svn_ra_lock(session, lock_targets, "foo", FALSE, lock_cb, &baton,
+ pool));
+
+ SVN_ERR(expect_lock("A/B/f", baton.results, session, pool));
+ SVN_ERR(expect_lock("A/B/g", baton.results, session, pool));
+ SVN_ERR(expect_error("A/B/z", baton.results, session, pool));
+ SVN_ERR(expect_lock("A/BB/f", baton.results, session, pool));
+ SVN_ERR(expect_error("X/z", baton.results, session, pool));
+
+ /* Unlock without force and wrong lock tokens */
+ for (hi = apr_hash_first(pool, lock_targets); hi; hi = apr_hash_next(hi))
+ svn_hash_sets(unlock_targets, apr_hash_this_key(hi), "wrong-token");
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_ra_unlock(session, unlock_targets, FALSE, lock_cb, &baton, pool));
+
+ SVN_ERR(expect_unlock_error("A/B/f", baton.results, session, pool));
+ SVN_ERR(expect_unlock_error("A/B/g", baton.results, session, pool));
+ SVN_ERR(expect_error("A/B/z", baton.results, session, pool));
+ SVN_ERR(expect_unlock_error("A/BB/f", baton.results, session, pool));
+ SVN_ERR(expect_error("X/z", baton.results, session, pool));
+
+ /* Force unlock */
+ for (hi = apr_hash_first(pool, lock_targets); hi; hi = apr_hash_next(hi))
+ svn_hash_sets(unlock_targets, apr_hash_this_key(hi), "");
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_ra_unlock(session, unlock_targets, TRUE, lock_cb, &baton, pool));
+
+ SVN_ERR(expect_unlock("A/B/f", baton.results, session, pool));
+ SVN_ERR(expect_unlock("A/B/g", baton.results, session, pool));
+ SVN_ERR(expect_error("A/B/z", baton.results, session, pool));
+ SVN_ERR(expect_unlock("A/BB/f", baton.results, session, pool));
+ SVN_ERR(expect_error("X/z", baton.results, session, pool));
+
+ /* Lock again. */
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_ra_lock(session, lock_targets, "foo", FALSE, lock_cb, &baton,
+ pool));
+
+ SVN_ERR(expect_lock("A/B/f", baton.results, session, pool));
+ SVN_ERR(expect_lock("A/B/g", baton.results, session, pool));
+ SVN_ERR(expect_error("A/B/z", baton.results, session, pool));
+ SVN_ERR(expect_lock("A/BB/f", baton.results, session, pool));
+ SVN_ERR(expect_error("X/z", baton.results, session, pool));
+
+ for (hi = apr_hash_first(pool, baton.results); hi; hi = apr_hash_next(hi))
+ {
+ result = apr_hash_this_val(hi);
+ svn_hash_sets(unlock_targets, apr_hash_this_key(hi),
+ result->lock ? result->lock->token : "non-existent-token");
+ }
+ apr_hash_clear(baton.results);
+ SVN_ERR(svn_ra_unlock(session, unlock_targets, FALSE, lock_cb, &baton, pool));
+
+ SVN_ERR(expect_unlock("A/B/f", baton.results, session, pool));
+ SVN_ERR(expect_unlock("A/B/g", baton.results, session, pool));
+ SVN_ERR(expect_error("A/B/z", baton.results, session, pool));
+ SVN_ERR(expect_unlock("A/BB/f", baton.results, session, pool));
+ SVN_ERR(expect_error("X/z", baton.results, session, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_ra_get_dir2(). */
+static svn_error_t *
+get_dir_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+ apr_hash_t *dirents;
+ svn_dirent_t *ent;
+
+ SVN_ERR(make_and_open_repos(&session, "test-get-dir", opts, pool));
+ SVN_ERR(commit_tree(session, pool));
+
+ /* This call used to block on ra-svn for 1.8.0...r1656713 */
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_dir2(session, &dirents, NULL, NULL,
+ "non/existing/relpath", 1,
+ SVN_DIRENT_KIND, pool),
+ SVN_ERR_FS_NOT_FOUND);
+
+ /* Test fetching SVN_DIRENT_SIZE without SVN_DIRENT_KIND. */
+ SVN_ERR(svn_ra_get_dir2(session, &dirents, NULL, NULL, "", 1,
+ SVN_DIRENT_SIZE, pool));
+ SVN_TEST_INT_ASSERT(apr_hash_count(dirents), 1);
+ ent = svn_hash_gets(dirents, "A");
+ SVN_TEST_ASSERT(ent);
+
+#if 0
+ /* ra_serf has returns SVN_INVALID_SIZE instead of documented zero for
+ * for directories. */
+ SVN_TEST_INT_ASSERT(ent->size, 0);
+#endif
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_commit_callback2_t for commit_callback_failure() */
+static svn_error_t *
+commit_callback_with_failure(const svn_commit_info_t *info,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ apr_time_t timetemp;
+
+ SVN_TEST_ASSERT(info != NULL);
+ SVN_TEST_STRING_ASSERT(info->author, "jrandom");
+ SVN_TEST_STRING_ASSERT(info->post_commit_err, NULL);
+
+ SVN_ERR(svn_time_from_cstring(&timetemp, info->date, scratch_pool));
+ SVN_TEST_ASSERT(timetemp != 0);
+ SVN_TEST_ASSERT(info->repos_root != NULL);
+ SVN_TEST_ASSERT(info->revision == 1);
+
+ return svn_error_create(SVN_ERR_CANCELLED, NULL, NULL);
+}
+
+static svn_error_t *
+commit_callback_failure(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ void *root_baton;
+ SVN_ERR(make_and_open_repos(&ra_session, "commit_cb_failure", opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), commit_callback_with_failure,
+ NULL, NULL, FALSE, pool));
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+ SVN_ERR(editor->change_dir_prop(root_baton, "A",
+ svn_string_create("B", pool), pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_TEST_ASSERT_ERROR(editor->close_edit(edit_baton, pool),
+ SVN_ERR_CANCELLED);
+
+ /* This is what users should do if close_edit fails... Except that in this case
+ the commit actually succeeded*/
+ SVN_ERR(editor->abort_edit(edit_baton, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+base_revision_above_youngest(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ void *root_baton;
+ svn_error_t *err;
+ SVN_ERR(make_and_open_repos(&ra_session, "base_revision_above_youngest",
+ opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ /* r1 doesn't exist, but we say we want to apply changes against this
+ revision to see how the ra layers behave.
+
+ Some will see an error directly on open_root, others in a later
+ state. */
+
+ /* ra-local and http pre-v2 will see the error here */
+ err = editor->open_root(edit_baton, 1, pool, &root_baton);
+
+ if (!err)
+ err = editor->change_dir_prop(root_baton, "A",
+ svn_string_create("B", pool), pool);
+
+ /* http v2 will notice it here (PROPPATCH) */
+ if (!err)
+ err = editor->close_directory(root_baton, pool);
+
+ /* ra svn only notes it at some later point. Typically here */
+ if (!err)
+ err = editor->close_edit(edit_baton, pool);
+
+ SVN_TEST_ASSERT_ERROR(err,
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_ERR(editor->abort_edit(edit_baton, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+delete_revision_above_youngest(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ svn_error_t *err;
+ void *edit_baton;
+
+ SVN_ERR(make_and_open_repos(&ra_session, "delete_revision_above_youngest",
+ opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ {
+ void *root_baton;
+ void *dir_baton;
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+ SVN_ERR(editor->add_directory("A", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ }
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ {
+ void *root_baton;
+ SVN_ERR(editor->open_root(edit_baton, 1, pool, &root_baton));
+
+ /* Now we supply r2, while HEAD is r1 */
+ err = editor->delete_entry("A", 2, root_baton, pool);
+
+ if (!err)
+ err = editor->close_edit(edit_baton, pool);
+
+ SVN_TEST_ASSERT_ERROR(err,
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_ERR(editor->abort_edit(edit_baton, pool));
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Stub svn_log_entry_receiver_t */
+static svn_error_t *
+stub_log_receiver(void *baton,
+ svn_log_entry_t *entry,
+ apr_pool_t *scratch_pool)
+{
+ return SVN_NO_ERROR;
+}
+
+/* Stub svn_location_segment_receiver_t */
+static svn_error_t *
+stub_segment_receiver(svn_location_segment_t *segment,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ return SVN_NO_ERROR;
+}
+/* Stub svn_file_rev_handler_t */
+static svn_error_t *
+stub_file_rev_handler(void *baton,
+ const char *path,
+ svn_revnum_t rev,
+ apr_hash_t *rev_props,
+ svn_boolean_t result_of_merge,
+ svn_txdelta_window_handler_t *delta_handler,
+ void **delta_baton,
+ apr_array_header_t *prop_diffs,
+ apr_pool_t *pool)
+{
+ if (delta_handler)
+ *delta_handler = svn_delta_noop_window_handler;
+
+ return SVN_NO_ERROR;
+}
+
+struct lock_stub_baton_t
+{
+ apr_status_t result_code;
+};
+
+static svn_error_t *
+store_lock_result(void *baton,
+ const char *path,
+ svn_boolean_t do_lock,
+ const svn_lock_t *lock,
+ svn_error_t *ra_err,
+ apr_pool_t *pool)
+{
+ struct lock_stub_baton_t *b = baton;
+
+ b->result_code = ra_err ? ra_err->apr_err : APR_SUCCESS;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+replay_range_rev_start(svn_revnum_t revision,
+ void *replay_baton,
+ const svn_delta_editor_t **editor,
+ void **edit_baton,
+ apr_hash_t *rev_props,
+ apr_pool_t *pool)
+{
+ *editor = svn_delta_default_editor(pool);
+ *edit_baton = NULL;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+replay_range_rev_end(svn_revnum_t revision,
+ void *replay_baton,
+ const svn_delta_editor_t *editor,
+ void *edit_baton,
+ apr_hash_t *rev_props,
+ apr_pool_t *pool)
+{
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+ra_revision_errors(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ svn_error_t *err;
+ void *edit_baton;
+
+ /* This function DOESN'T use a scratch/iter pool between requests...
+
+ That has a reason: some ra layers (e.g. Serf) are sensitive to
+ reusing the same pool. In that case they may produce bad results
+ that they wouldn't do (as often) when the pool wasn't reused.
+
+ It the amount of memory used gets too big we should probably split
+ this test... as the reuse already discovered a few issues that
+ are now resolved in ra_serf.
+ */
+ SVN_ERR(make_and_open_repos(&ra_session, "ra_revision_errors",
+ opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ {
+ void *root_baton;
+ void *dir_baton;
+ void *file_baton;
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+ SVN_ERR(editor->add_directory("A", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->add_file("A/iota", dir_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("B", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("C", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("D", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ }
+
+ {
+ const svn_ra_reporter3_t *reporter;
+ void *report_baton;
+
+ err = svn_ra_do_update3(ra_session, &reporter, &report_baton,
+ 2, "", svn_depth_infinity, FALSE, FALSE,
+ svn_delta_default_editor(pool), NULL,
+ pool, pool);
+
+ if (!err)
+ err = reporter->set_path(report_baton, "", 0, svn_depth_infinity, FALSE,
+ NULL, pool);
+
+ if (!err)
+ err = reporter->finish_report(report_baton, pool);
+
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ const svn_ra_reporter3_t *reporter;
+ void *report_baton;
+
+ err = svn_ra_do_update3(ra_session, &reporter, &report_baton,
+ 1, "", svn_depth_infinity, FALSE, FALSE,
+ svn_delta_default_editor(pool), NULL,
+ pool, pool);
+
+ if (!err)
+ err = reporter->set_path(report_baton, "", 2, svn_depth_infinity, FALSE,
+ NULL, pool);
+
+ if (!err)
+ err = reporter->finish_report(report_baton, pool);
+
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ const svn_ra_reporter3_t *reporter;
+ void *report_baton;
+
+ err = svn_ra_do_update3(ra_session, &reporter, &report_baton,
+ 1, "", svn_depth_infinity, FALSE, FALSE,
+ svn_delta_default_editor(pool), NULL,
+ pool, pool);
+
+ if (!err)
+ err = reporter->set_path(report_baton, "", 0, svn_depth_infinity, FALSE,
+ NULL, pool);
+
+ if (!err)
+ err = reporter->finish_report(report_baton, pool);
+
+ SVN_ERR(err);
+ }
+
+ {
+ svn_revnum_t revision;
+
+ SVN_ERR(svn_ra_get_dated_revision(ra_session, &revision,
+ apr_time_now() - apr_time_from_sec(3600),
+ pool));
+
+ SVN_TEST_ASSERT(revision == 0);
+
+ SVN_ERR(svn_ra_get_dated_revision(ra_session, &revision,
+ apr_time_now() + apr_time_from_sec(3600),
+ pool));
+
+ SVN_TEST_ASSERT(revision == 1);
+ }
+
+ {
+ /* SVN_INVALID_REVNUM is protected by assert in ra loader */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_change_rev_prop2(ra_session,
+ 2,
+ "bad", NULL,
+ svn_string_create("value",
+ pool),
+ pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ apr_hash_t *props;
+ svn_string_t *value;
+
+ /* SVN_INVALID_REVNUM is protected by assert in ra loader */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_rev_proplist(ra_session, 2, &props, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_rev_prop(ra_session, 2, "bad", &value, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ apr_hash_t *props;
+ svn_string_t *value;
+
+ /* SVN_INVALID_REVNUM is protected by assert in ra loader */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_rev_proplist(ra_session, 2, &props, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_rev_prop(ra_session, 2, "bad", &value, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ svn_revnum_t fetched;
+ apr_hash_t *props;
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file(ra_session, "A", 1,
+ svn_stream_empty(pool), &fetched,
+ &props, pool),
+ SVN_ERR_FS_NOT_FILE);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file(ra_session, "A/iota", 2,
+ svn_stream_empty(pool), &fetched,
+ &props, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file(ra_session, "Z", 1,
+ svn_stream_empty(pool), &fetched,
+ &props, pool),
+ SVN_ERR_FS_NOT_FOUND);
+
+ SVN_ERR(svn_ra_get_file(ra_session, "A/iota", SVN_INVALID_REVNUM,
+ svn_stream_empty(pool), &fetched,
+ &props, pool));
+ SVN_TEST_ASSERT(fetched == 1);
+ }
+
+ {
+ svn_revnum_t fetched;
+ apr_hash_t *dirents;
+ apr_hash_t *props;
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_dir2(ra_session, &dirents, &fetched,
+ &props, "A/iota", 1,
+ SVN_DIRENT_ALL, pool),
+ SVN_ERR_FS_NOT_DIRECTORY);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_dir2(ra_session, &dirents, &fetched,
+ &props, "A", 2,
+ SVN_DIRENT_ALL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_dir2(ra_session, &dirents, &fetched,
+ &props, "Z", 1,
+ SVN_DIRENT_ALL, pool),
+ SVN_ERR_FS_NOT_FOUND);
+
+ SVN_ERR(svn_ra_get_dir2(ra_session, &dirents, &fetched,
+ &props, "A", SVN_INVALID_REVNUM,
+ SVN_DIRENT_ALL, pool));
+ SVN_TEST_ASSERT(fetched == 1);
+ SVN_TEST_ASSERT(apr_hash_count(dirents) == 1);
+ }
+
+ {
+ svn_mergeinfo_catalog_t catalog;
+ apr_array_header_t *paths = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(paths, const char *) = "A";
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_mergeinfo(ra_session, &catalog, paths,
+ 2, svn_mergeinfo_inherited,
+ FALSE, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_mergeinfo(ra_session, &catalog, paths,
+ 0, svn_mergeinfo_inherited,
+ FALSE, pool),
+ SVN_ERR_FS_NOT_FOUND);
+
+ SVN_ERR(svn_ra_get_mergeinfo(ra_session, &catalog, paths,
+ SVN_INVALID_REVNUM, svn_mergeinfo_inherited,
+ FALSE, pool));
+ }
+
+ {
+ apr_array_header_t *paths = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(paths, const char *) = "A";
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_log2(ra_session, paths, 0, 2, -1,
+ FALSE, FALSE, FALSE, NULL,
+ stub_log_receiver, NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_log2(ra_session, paths, 2, 0, -1,
+ FALSE, FALSE, FALSE, NULL,
+ stub_log_receiver, NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_log2(ra_session, paths,
+ SVN_INVALID_REVNUM, 2, -1,
+ FALSE, FALSE, FALSE, NULL,
+ stub_log_receiver, NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_log2(ra_session, paths,
+ 2, SVN_INVALID_REVNUM, -1,
+ FALSE, FALSE, FALSE, NULL,
+ stub_log_receiver, NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ svn_node_kind_t kind;
+ SVN_TEST_ASSERT_ERROR(svn_ra_check_path(ra_session, "A", 2, &kind, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_ERR(svn_ra_check_path(ra_session, "A", SVN_INVALID_REVNUM, &kind,
+ pool));
+
+ SVN_TEST_ASSERT(kind == svn_node_dir);
+ }
+
+ {
+ svn_dirent_t *dirent;
+ apr_array_header_t *paths = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(paths, const char *) = "A";
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_stat(ra_session, "A", 2, &dirent, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_ERR(svn_ra_stat(ra_session, "A", SVN_INVALID_REVNUM, &dirent,
+ pool));
+
+ SVN_TEST_ASSERT(dirent->kind == svn_node_dir);
+ }
+
+ {
+ apr_hash_t *locations;
+ apr_array_header_t *revisions = apr_array_make(pool, 2, sizeof(svn_revnum_t));
+ APR_ARRAY_PUSH(revisions, svn_revnum_t) = 1;
+
+ /* SVN_INVALID_REVNUM as passed revision doesn't work */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_locations(ra_session, &locations, "A", 2,
+ revisions, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ APR_ARRAY_PUSH(revisions, svn_revnum_t) = 7;
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_locations(ra_session, &locations, "A", 1,
+ revisions, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ /* Putting SVN_INVALID_REVNUM in the array doesn't marshal properly in svn://
+ */
+ }
+
+ {
+ /* peg_rev -> SVN_INVALID_REVNUM -> youngest
+ start_rev -> SVN_INVALID_REVNUM -> peg_rev
+ end_rev -> SVN_INVALID_REVNUM -> 0 */
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_location_segments(ra_session, "A",
+ 2, 1, 0,
+ stub_segment_receiver,
+ NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_location_segments(ra_session, "A",
+ SVN_INVALID_REVNUM,
+ 2, 0,
+ stub_segment_receiver,
+ NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_location_segments(ra_session, "A",
+ SVN_INVALID_REVNUM,
+ SVN_INVALID_REVNUM,
+ 2,
+ stub_segment_receiver,
+ NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_ERR(svn_ra_get_location_segments(ra_session, "A",
+ SVN_INVALID_REVNUM,
+ SVN_INVALID_REVNUM,
+ SVN_INVALID_REVNUM,
+ stub_segment_receiver,
+ NULL, pool));
+ }
+
+ {
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file_revs2(ra_session, "A/iota", 2, 0,
+ FALSE, stub_file_rev_handler,
+ NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file_revs2(ra_session, "A/iota", 0, 2,
+ FALSE, stub_file_rev_handler,
+ NULL, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_file_revs2(ra_session, "A", 1, 1,
+ FALSE, stub_file_rev_handler,
+ NULL, pool),
+ SVN_ERR_FS_NOT_FILE);
+ }
+
+ {
+ apr_hash_t *locks = apr_hash_make(pool);
+ svn_revnum_t rev = 2;
+ struct lock_stub_baton_t lr = {0};
+
+ svn_hash_sets(locks, "A/iota", &rev);
+
+ SVN_ERR(svn_ra_lock(ra_session, locks, "comment", FALSE,
+ store_lock_result, &lr, pool));
+ SVN_TEST_ASSERT(lr.result_code == SVN_ERR_FS_NO_SUCH_REVISION);
+
+ rev = 0;
+ SVN_ERR(svn_ra_lock(ra_session, locks, "comment", FALSE,
+ store_lock_result, &lr, pool));
+ SVN_TEST_ASSERT(lr.result_code == SVN_ERR_FS_OUT_OF_DATE);
+
+ svn_hash_sets(locks, "A/iota", NULL);
+ svn_hash_sets(locks, "A", &rev);
+ rev = SVN_INVALID_REVNUM;
+ SVN_ERR(svn_ra_lock(ra_session, locks, "comment", FALSE,
+ store_lock_result, &lr, pool));
+ SVN_TEST_ASSERT(lr.result_code == SVN_ERR_FS_NOT_FILE);
+ }
+
+ {
+ apr_hash_t *locks = apr_hash_make(pool);
+ struct lock_stub_baton_t lr = {0};
+
+ svn_hash_sets(locks, "A/iota", "no-token");
+
+ SVN_ERR(svn_ra_unlock(ra_session, locks, FALSE,
+ store_lock_result, &lr, pool));
+ SVN_TEST_ASSERT(lr.result_code == SVN_ERR_FS_NO_SUCH_LOCK);
+
+
+ svn_hash_sets(locks, "A/iota", NULL);
+ svn_hash_sets(locks, "A", "no-token");
+ SVN_ERR(svn_ra_unlock(ra_session, locks, FALSE,
+ store_lock_result, &lr, pool));
+ SVN_TEST_ASSERT(lr.result_code == SVN_ERR_FS_NO_SUCH_LOCK);
+ }
+
+ {
+ svn_lock_t *lock;
+ SVN_ERR(svn_ra_get_lock(ra_session, &lock, "A", pool));
+ SVN_TEST_ASSERT(lock == NULL);
+ }
+
+ {
+ SVN_TEST_ASSERT_ERROR(svn_ra_replay(ra_session, 2, 0, TRUE,
+ svn_delta_default_editor(pool), NULL,
+ pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ /* Simply assumes everything is there*/
+ SVN_ERR(svn_ra_replay(ra_session, 1, 2, TRUE,
+ svn_delta_default_editor(pool), NULL,
+ pool));
+ }
+
+ {
+ SVN_TEST_ASSERT_ERROR(svn_ra_replay_range(ra_session, 1, 2, 0,
+ TRUE,
+ replay_range_rev_start,
+ replay_range_rev_end, NULL,
+ pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ /* Simply assumes everything is there*/
+ SVN_TEST_ASSERT_ERROR(svn_ra_replay_range(ra_session, 2, 2, 0,
+ TRUE,
+ replay_range_rev_start,
+ replay_range_rev_end, NULL,
+ pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ }
+
+ {
+ svn_revnum_t del_rev;
+
+ /* ### Explicitly documented to not return an FS or RA error???? */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_deleted_rev(ra_session, "Z", 2, 1,
+ &del_rev, pool),
+ SVN_ERR_CLIENT_BAD_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_deleted_rev(ra_session, "Z",
+ SVN_INVALID_REVNUM, 2,
+ &del_rev, pool),
+ SVN_ERR_CLIENT_BAD_REVISION);
+
+ }
+
+ {
+ apr_array_header_t *iprops;
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_inherited_props(ra_session, &iprops,
+ "A", 2, pool, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_inherited_props(ra_session, &iprops,
+ "A", SVN_INVALID_REVNUM,
+ pool, pool),
+ SVN_ERR_FS_NO_SUCH_REVISION);
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_inherited_props(ra_session, &iprops,
+ "Z", 1,
+ pool, pool),
+ SVN_ERR_FS_NOT_FOUND);
+ }
+
+ return SVN_NO_ERROR;
+}
+/* svn_log_entry_receiver_t returning cease invocation */
+static svn_error_t *
+error_log_receiver(void *baton,
+ svn_log_entry_t *entry,
+ apr_pool_t *scratch_pool)
+{
+ return svn_error_create(SVN_ERR_CEASE_INVOCATION, NULL, NULL);
+}
+
+/* Stub svn_location_segment_receiver_t */
+static svn_error_t *
+error_segment_receiver(svn_location_segment_t *segment,
+ void *baton,
+ apr_pool_t *scratch_pool)
+{
+ return svn_error_create(SVN_ERR_CEASE_INVOCATION, NULL, NULL);
+}
+
+
+static svn_error_t *
+errors_from_callbacks(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+
+ /* This function DOESN'T use a scratch/iter pool between requests...
+
+ That has a reason: some ra layers (e.g. Serf) are sensitive to
+ reusing the same pool. In that case they may produce bad results
+ that they wouldn't do (as often) when the pool wasn't reused.
+
+ It the amount of memory used gets too big we should probably split
+ this test... as the reuse already discovered a few issues that
+ are now resolved in ra_serf.
+ */
+ SVN_ERR(make_and_open_repos(&ra_session, "errors_from_callbacks",
+ opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ {
+ void *root_baton;
+ void *dir_baton;
+ void *file_baton;
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+ SVN_ERR(editor->add_directory("A", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->add_file("A/iota", dir_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("B", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("C", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->add_directory("D", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ }
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, pool));
+
+ {
+ void *root_baton;
+ void *dir_baton;
+ void *file_baton;
+
+ SVN_ERR(editor->open_root(edit_baton, 1, pool, &root_baton));
+ SVN_ERR(editor->open_directory("A", root_baton, 1, pool, &dir_baton));
+ SVN_ERR(editor->open_file("A/iota", dir_baton, 1, pool, &file_baton));
+
+ SVN_ERR(editor->change_file_prop(file_baton, "A", svn_string_create("B",
+ pool),
+ pool));
+
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+
+ SVN_ERR(editor->change_dir_prop(dir_baton, "A", svn_string_create("B",
+ pool),
+ pool));
+ SVN_ERR(editor->close_directory(dir_baton, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+ }
+
+ {
+ apr_array_header_t *paths = apr_array_make(pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(paths, const char *) = "A/iota";
+
+ /* Note that ra_svn performs OK for SVN_ERR_CEASE_INVOCATION, but any
+ other error will make it break the ra session for further operations */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_log2(ra_session, paths, 2, 0, -1,
+ FALSE, FALSE, FALSE, NULL,
+ error_log_receiver, NULL, pool),
+ SVN_ERR_CEASE_INVOCATION);
+ }
+
+ {
+ /* Note that ra_svn performs OK for SVN_ERR_CEASE_INVOCATION, but any
+ other error will make it break the ra session for further operations */
+
+ SVN_TEST_ASSERT_ERROR(svn_ra_get_location_segments(ra_session, "A/iota",
+ 2, 2, 0,
+ error_segment_receiver,
+ NULL, pool),
+ SVN_ERR_CEASE_INVOCATION);
+ }
+
+ /* And a final check to see if the ra session is still ok */
+ {
+ svn_node_kind_t kind;
+
+ SVN_ERR(svn_ra_check_path(ra_session, "A", 2, &kind, pool));
+
+ SVN_TEST_ASSERT(kind == svn_node_dir);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+ra_list_has_props(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *ra_session;
+ const svn_delta_editor_t *editor;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ int i;
+ void *edit_baton;
+ const char *trunk_url;
+
+ SVN_ERR(make_and_open_repos(&ra_session, "ra_list_has_props",
+ opts, pool));
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, iterpool));
+
+ /* Create initial layout*/
+ {
+ void *root_baton;
+ void *dir_baton;
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+ SVN_ERR(editor->add_directory("trunk", root_baton, NULL, SVN_INVALID_REVNUM,
+ iterpool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, iterpool));
+ SVN_ERR(editor->add_directory("tags", root_baton, NULL, SVN_INVALID_REVNUM,
+ iterpool, &dir_baton));
+ SVN_ERR(editor->close_directory(dir_baton, iterpool));
+ SVN_ERR(editor->close_directory(root_baton, iterpool));
+ SVN_ERR(editor->close_edit(edit_baton, iterpool));
+ }
+
+ SVN_ERR(svn_ra_get_repos_root2(ra_session, &trunk_url, pool));
+ trunk_url = svn_path_url_add_component2(trunk_url, "trunk", pool);
+
+ /* Create a few tags. Using a value like 8000 will take too long for a normal
+ testrun, but produces more realistic problems */
+ for (i = 0; i < 50; i++)
+ {
+ void *root_baton;
+ void *tags_baton;
+ void *dir_baton;
+
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL,
+ NULL, NULL, FALSE, iterpool));
+
+ SVN_ERR(editor->open_root(edit_baton, i+1, pool, &root_baton));
+ SVN_ERR(editor->open_directory("tags", root_baton, i+1, iterpool,
+ &tags_baton));
+ SVN_ERR(editor->add_directory(apr_psprintf(iterpool, "tags/T%05d", i+1),
+ tags_baton, trunk_url, 1, iterpool,
+ &dir_baton));
+
+ SVN_ERR(editor->close_directory(dir_baton, iterpool));
+ SVN_ERR(editor->close_directory(tags_baton, iterpool));
+ SVN_ERR(editor->close_directory(root_baton, iterpool));
+ SVN_ERR(editor->close_edit(edit_baton, iterpool));
+ }
+
+ {
+ apr_hash_t *dirents;
+ svn_revnum_t fetched_rev;
+ apr_hash_t *props;
+
+ SVN_ERR(svn_ra_get_dir2(ra_session, &dirents, &fetched_rev, &props,
+ "tags", SVN_INVALID_REVNUM,
+ SVN_DIRENT_ALL, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test ra_svn tunnel editor handling, including polling. */
+
+static svn_error_t *
+tunnel_run_checkout(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b));
+ apr_pool_t *scratch_pool = svn_pool_create(pool);
+ const char *url;
+ svn_ra_callbacks2_t *cbtable;
+ svn_ra_session_t *session;
+ const char tunnel_repos_name[] = "test-run_checkout";
+ const svn_ra_reporter3_t *reporter;
+ void *report_baton;
+
+ b->magic = TUNNEL_MAGIC;
+
+ SVN_ERR(svn_test__create_repos(NULL, tunnel_repos_name, opts, scratch_pool));
+
+ /* Immediately close the repository to avoid race condition with svnserve
+ (and then the cleanup code) with BDB when our pool is cleared. */
+ svn_pool_clear(scratch_pool);
+
+ url = apr_pstrcat(pool, "svn+test://localhost/", tunnel_repos_name,
+ SVN_VA_NULL);
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+ cbtable->check_tunnel_func = check_tunnel;
+ cbtable->open_tunnel_func = open_tunnel;
+ cbtable->tunnel_baton = b;
+ SVN_ERR(svn_cmdline_create_auth_baton2(&cbtable->auth_baton,
+ TRUE /* non_interactive */,
+ "jrandom", "rayjandom",
+ NULL,
+ TRUE /* no_auth_cache */,
+ FALSE /* trust_server_cert */,
+ FALSE, FALSE, FALSE, FALSE,
+ NULL, NULL, NULL, pool));
+
+ b->last_check = FALSE;
+
+ SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable, NULL, NULL,
+ scratch_pool));
+
+ SVN_ERR(commit_changes(session, pool));
+
+ SVN_ERR(svn_ra_do_update3(session,
+ &reporter, &report_baton,
+ 1, "",
+ svn_depth_infinity, FALSE, FALSE,
+ svn_delta_default_editor(pool), NULL,
+ pool, pool));
+
+ SVN_ERR(reporter->set_path(report_baton, "", 0, svn_depth_infinity, FALSE,
+ NULL, pool));
+
+ SVN_ERR(reporter->finish_report(report_baton, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_log_entry_receiver_t for commit_empty_last_change */
+static svn_error_t *
+AA_receiver(void *baton,
+ svn_log_entry_t *log_entry,
+ apr_pool_t *pool)
+{
+ svn_log_changed_path2_t *p;
+ apr_hash_index_t *hi;
+
+ SVN_TEST_ASSERT(log_entry->changed_paths2 != NULL);
+ SVN_TEST_ASSERT(apr_hash_count(log_entry->changed_paths2) == 1);
+
+ hi = apr_hash_first(pool, log_entry->changed_paths2);
+
+ SVN_TEST_STRING_ASSERT(apr_hash_this_key(hi), "/AA");
+ p = apr_hash_this_val(hi);
+ SVN_TEST_STRING_ASSERT(p->copyfrom_path, "/A");
+ SVN_TEST_INT_ASSERT(p->copyfrom_rev, 3);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_empty_last_change(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+ apr_hash_t *revprop_table = apr_hash_make(pool);
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ const char *repos_root_url;
+ void *root_baton, *aa_baton;
+ apr_pool_t *tmp_pool = svn_pool_create(pool);
+ svn_dirent_t *dirent;
+ int i;
+
+ SVN_ERR(make_and_open_repos(&session,
+ "commit_empty_last_change", opts,
+ pool));
+
+ SVN_ERR(commit_changes(session, tmp_pool));
+
+ SVN_ERR(svn_ra_get_repos_root2(session, &repos_root_url, pool));
+ for (i = 0; i < 2; i++)
+ {
+ svn_pool_clear(tmp_pool);
+
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ revprop_table,
+ NULL, NULL, NULL, TRUE, tmp_pool));
+
+ SVN_ERR(editor->open_root(edit_baton, 1, tmp_pool, &root_baton));
+ SVN_ERR(editor->close_directory(root_baton, tmp_pool));
+ SVN_ERR(editor->close_edit(edit_baton, tmp_pool));
+
+ SVN_ERR(svn_ra_stat(session, "", 2+i, &dirent, tmp_pool));
+
+ SVN_TEST_ASSERT(dirent != NULL);
+ SVN_TEST_STRING_ASSERT(dirent->last_author, "jrandom");
+
+ /* BDB used to only updates last_changed on the repos_root when there
+ was an actual change. Now all filesystems behave in the same way */
+ SVN_TEST_INT_ASSERT(dirent->created_rev, 2+i);
+ }
+
+ svn_pool_clear(tmp_pool);
+
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ revprop_table,
+ NULL, NULL, NULL, TRUE, tmp_pool));
+
+ SVN_ERR(editor->open_root(edit_baton, 1, tmp_pool, &root_baton));
+ SVN_ERR(editor->add_directory("AA", root_baton,
+ svn_path_url_add_component2(repos_root_url,
+ "A", tmp_pool),
+ 3, tmp_pool,
+ &aa_baton));
+ SVN_ERR(editor->close_directory(aa_baton, tmp_pool));
+ SVN_ERR(editor->close_directory(root_baton, tmp_pool));
+ SVN_ERR(editor->close_edit(edit_baton, tmp_pool));
+
+ svn_pool_clear(tmp_pool);
+
+ {
+ apr_array_header_t *paths = apr_array_make(tmp_pool, 1, sizeof(const char*));
+ APR_ARRAY_PUSH(paths, const char *) = "AA";
+
+ SVN_ERR(svn_ra_get_log2(session, paths, 4, 4, 1, TRUE, FALSE, FALSE, NULL,
+ AA_receiver, NULL, tmp_pool));
+ }
+
+ svn_pool_destroy(tmp_pool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_locked_file(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ const char *url;
+ svn_ra_callbacks2_t *cbtable;
+ svn_ra_session_t *session;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ void *root_baton;
+ void *file_baton;
+ struct lock_result_t *lock_result;
+ apr_hash_t *lock_tokens;
+ svn_txdelta_window_handler_t handler;
+ void *handler_baton;
+ svn_revnum_t fetched_rev;
+ apr_hash_t *fetched_props;
+ const svn_string_t *propval;
+
+ SVN_ERR(svn_test__create_repos2(NULL, &url, NULL,
+ "test-repo-commit-locked-file-test",
+ opts, pool, pool));
+
+ SVN_ERR(svn_ra_initialize(pool));
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+ SVN_ERR(svn_test__init_auth_baton(&cbtable->auth_baton, pool));
+
+ SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable,
+ NULL, NULL, pool));
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ apr_hash_make(pool),
+ NULL, NULL, NULL, TRUE, pool));
+ /* Add a file. */
+ SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM,
+ pool, &root_baton));
+ SVN_ERR(editor->add_file("file", root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+
+ /* Acquire a lock on this file. */
+ {
+ struct lock_baton_t baton = {0};
+ svn_revnum_t rev = 1;
+ apr_hash_t *lock_targets;
+
+ baton.results = apr_hash_make(pool);
+ baton.pool = pool;
+
+ lock_targets = apr_hash_make(pool);
+ svn_hash_sets(lock_targets, "file", &rev);
+ SVN_ERR(svn_ra_lock(session, lock_targets, "comment", FALSE,
+ lock_cb, &baton, pool));
+
+ SVN_ERR(expect_lock("file", baton.results, session, pool));
+ lock_result = svn_hash_gets(baton.results, "file");
+ }
+
+ /* Open a new session using the file parent's URL. */
+ SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable,
+ NULL, NULL, pool));
+
+ /* Create a new commit editor supplying our lock token. */
+ lock_tokens = apr_hash_make(pool);
+ svn_hash_sets(lock_tokens, "file", lock_result->lock->token);
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL, NULL,
+ lock_tokens, TRUE, pool));
+ /* Edit the locked file. */
+ SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM,
+ pool, &root_baton));
+ SVN_ERR(editor->open_file("file", root_baton, SVN_INVALID_REVNUM, pool,
+ &file_baton));
+ SVN_ERR(editor->apply_textdelta(file_baton, NULL, pool, &handler,
+ &handler_baton));
+ SVN_ERR(svn_txdelta_send_string(svn_string_create("A", pool),
+ handler, handler_baton, pool));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+
+ /* Check the result. */
+ SVN_ERR(svn_ra_get_file(session, "file", SVN_INVALID_REVNUM, NULL,
+ &fetched_rev, NULL, pool));
+ SVN_TEST_INT_ASSERT((int) fetched_rev, 2);
+
+ /* Change property of the locked file. */
+ SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton,
+ apr_hash_make(pool), NULL, NULL,
+ lock_tokens, TRUE, pool));
+ SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM,
+ pool, &root_baton));
+ SVN_ERR(editor->open_file("file", root_baton, SVN_INVALID_REVNUM, pool,
+ &file_baton));
+ SVN_ERR(editor->change_file_prop(file_baton, "propname",
+ svn_string_create("propval", pool),
+ pool));
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+ SVN_ERR(editor->close_directory(root_baton, pool));
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+
+ /* Check the result. */
+ SVN_ERR(svn_ra_get_file(session, "file", SVN_INVALID_REVNUM, NULL,
+ &fetched_rev, &fetched_props, pool));
+ SVN_TEST_INT_ASSERT((int) fetched_rev, 3);
+ propval = svn_hash_gets(fetched_props, "propname");
+ SVN_TEST_ASSERT(propval);
+ SVN_TEST_STRING_ASSERT(propval->data, "propval");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(location_segments_test,
+ "test svn_ra_get_location_segments"),
+ SVN_TEST_OPTS_PASS(check_tunnel_callback_test,
+ "test ra_svn tunnel callback check"),
+ SVN_TEST_OPTS_PASS(tunnel_callback_test,
+ "test ra_svn tunnel creation callbacks"),
+ SVN_TEST_OPTS_PASS(lock_test,
+ "lock multiple paths"),
+ SVN_TEST_OPTS_PASS(get_dir_test,
+ "test ra_get_dir2"),
+ SVN_TEST_OPTS_PASS(commit_callback_failure,
+ "commit callback failure"),
+ SVN_TEST_OPTS_PASS(base_revision_above_youngest,
+ "base revision newer than youngest"),
+ SVN_TEST_OPTS_PASS(delete_revision_above_youngest,
+ "delete revision newer than youngest"),
+ SVN_TEST_OPTS_PASS(ra_revision_errors,
+ "check how ra functions handle bad revisions"),
+ SVN_TEST_OPTS_PASS(errors_from_callbacks,
+ "check how ra layers handle errors from callbacks"),
+ SVN_TEST_OPTS_PASS(ra_list_has_props,
+ "check list has_props performance"),
+ SVN_TEST_OPTS_PASS(tunnel_run_checkout,
+ "verify checkout over a tunnel"),
+ SVN_TEST_OPTS_PASS(commit_empty_last_change,
+ "check how last change applies to empty commit"),
+ SVN_TEST_OPTS_PASS(commit_locked_file,
+ "check commit editor for a locked file"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_ra_local/ra-local-test.c b/subversion/tests/libsvn_ra_local/ra-local-test.c
new file mode 100644
index 0000000..98695bc
--- /dev/null
+++ b/subversion/tests/libsvn_ra_local/ra-local-test.c
@@ -0,0 +1,302 @@
+/*
+ * ra-local-test.c : basic tests for the RA LOCAL library
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#include <apr_general.h>
+#include <apr_pools.h>
+
+#define SVN_DEPRECATED
+
+#include "svn_error.h"
+#include "svn_delta.h"
+#include "svn_ra.h"
+#include "svn_client.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+#include "../../libsvn_ra_local/ra_local.h"
+
+/*-------------------------------------------------------------------*/
+
+/** Helper routines. **/
+
+
+static svn_error_t *
+make_and_open_local_repos(svn_ra_session_t **session,
+ const char *repos_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ const char *url;
+ svn_ra_callbacks2_t *cbtable;
+
+ SVN_ERR(svn_ra_create_callbacks(&cbtable, pool));
+
+ SVN_ERR(svn_test__create_repos(&repos, repos_name, opts, pool));
+ SVN_ERR(svn_ra_initialize(pool));
+
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&url, repos_name, pool));
+
+ SVN_ERR(svn_ra_open3(session,
+ url,
+ NULL,
+ cbtable,
+ NULL,
+ NULL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/*-------------------------------------------------------------------*/
+
+/** The tests **/
+
+/* Open an RA session to a local repository. */
+static svn_error_t *
+open_ra_session(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+
+ SVN_ERR(make_and_open_local_repos(&session,
+ "test-repo-open", opts, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Discover the youngest revision in a repository. */
+static svn_error_t *
+get_youngest_rev(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_ra_session_t *session;
+ svn_revnum_t latest_rev;
+
+ SVN_ERR(make_and_open_local_repos(&session,
+ "test-repo-getrev", opts,
+ pool));
+
+ /* Get the youngest revision and make sure it's 0. */
+ SVN_ERR(svn_ra_get_latest_revnum(session, &latest_rev, pool));
+
+ if (latest_rev != 0)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "youngest rev isn't 0!");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper function. Run svn_ra_local__split_URL with interest only in
+ the return error code */
+static apr_status_t
+try_split_url(const char *url, apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ const char *repos_path, *fs_path;
+ svn_error_t *err;
+ apr_status_t apr_err;
+
+ err = svn_ra_local__split_URL(&repos, &repos_path, &fs_path, url, pool);
+
+ if (! err)
+ return APR_SUCCESS;
+
+ apr_err = err->apr_err;
+ svn_error_clear(err);
+ return apr_err;
+}
+
+
+static svn_error_t *
+split_url_syntax(apr_pool_t *pool)
+{
+ apr_status_t apr_err;
+
+ /* TEST 1: Make sure we can recognize bad URLs (this should not
+ require a filesystem) */
+
+ /* Use `blah' for scheme instead of `file' */
+ apr_err = try_split_url("blah:///bin/svn", pool);
+ if (apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL failed to catch bad URL (scheme)");
+
+ /* Use only a hostname, with no path */
+ apr_err = try_split_url("file://hostname", pool);
+ if (apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL failed to catch bad URL (no path)");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+split_url_bad_host(apr_pool_t *pool)
+{
+ apr_status_t apr_err;
+
+ /* Give a hostname other than `' or `localhost' */
+ apr_err = try_split_url("file://myhost/repos/path", pool);
+ if (apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL failed to catch bad URL (hostname)");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+split_url_host(apr_pool_t *pool)
+{
+ apr_status_t apr_err;
+
+ /* Make sure we *don't* fuss about a good URL (note that this URL
+ still doesn't point to an existing versioned resource) */
+ apr_err = try_split_url("file:///repos/path", pool);
+ if (apr_err == SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL cried foul about a good URL (no hostname)");
+
+ apr_err = try_split_url("file://localhost/repos/path", pool);
+ if (apr_err == SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL cried foul about a good URL (localhost)");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper function. Creates a repository in the current working
+ directory named REPOS_PATH, then assembes a URL that points to that
+ FS, plus additional cruft (IN_REPOS_PATH) that theoretically refers to a
+ versioned resource in that repository. Finally, it runs this URL
+ through svn_ra_local__split_URL to verify that it accurately
+ separates the filesystem path and the repository path cruft.
+
+ If IN_REPOS_PATH is NULL, we'll split the root URL and verify our
+ parts that way (noting that that in-repos-path that results should
+ be "/"). */
+static svn_error_t *
+check_split_url(const char *repos_path,
+ const char *in_repos_path,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ const char *url, *root_url, *repos_part, *in_repos_part;
+
+ /* Create a filesystem and repository */
+ SVN_ERR(svn_test__create_repos(&repos, repos_path, opts, pool));
+
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&root_url, repos_path, pool));
+ if (in_repos_path)
+ url = apr_pstrcat(pool, root_url, in_repos_path, SVN_VA_NULL);
+ else
+ url = root_url;
+
+ /* Run this URL through our splitter... */
+ SVN_ERR(svn_ra_local__split_URL(&repos, &repos_part, &in_repos_part,
+ url, pool));
+
+ /* We better see the REPOS_PART looking just like our ROOT_URL. And
+ we better see in the IN_REPOS_PART either exactly the same as the
+ IN_REPOS_PATH provided us, or "/" if we weren't provided an
+ IN_REPOS_PATH. */
+ if ((strcmp(repos_part, root_url) == 0)
+ && ((in_repos_path && (strcmp(in_repos_part, in_repos_path) == 0))
+ || ((! in_repos_path) && (strcmp(in_repos_part, "/") == 0))))
+ return SVN_NO_ERROR;
+
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ra_local__split_URL failed to properly split the URL\n"
+ "%s\n%s\n%s\n%s",
+ repos_part, root_url, in_repos_part,
+ in_repos_path ? in_repos_path : "(null)");
+}
+
+
+static svn_error_t *
+split_url_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ /* TEST 2: Given well-formed URLs, make sure that we can correctly
+ find where the filesystem portion of the path ends and the
+ in-repository path begins. */
+ SVN_ERR(check_split_url("test-repo-split-fs1",
+ "/trunk/foobar/quux.c",
+ opts,
+ pool));
+ SVN_ERR(check_split_url("test-repo-split-fs2",
+ "/alpha/beta/gamma/delta/epsilon/zeta/eta/theta",
+ opts,
+ pool));
+ SVN_ERR(check_split_url("test-repo-split-fs3",
+ NULL,
+ opts,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* The test table. */
+
+#if defined(WIN32) || defined(__CYGWIN__)
+#define HAS_UNC_HOST 1
+#else
+#define HAS_UNC_HOST 0
+#endif
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(open_ra_session,
+ "open an ra session to a local repository"),
+ SVN_TEST_OPTS_PASS(get_youngest_rev,
+ "get the youngest revision in a repository"),
+ SVN_TEST_PASS2(split_url_syntax,
+ "svn_ra_local__split_URL: syntax validation"),
+ SVN_TEST_SKIP2(split_url_bad_host, HAS_UNC_HOST,
+ "svn_ra_local__split_URL: invalid host names"),
+ SVN_TEST_PASS2(split_url_host,
+ "svn_ra_local__split_URL: valid host names"),
+ SVN_TEST_OPTS_PASS(split_url_test,
+ "test svn_ra_local__split_URL correctness"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_repos/authz-test.c b/subversion/tests/libsvn_repos/authz-test.c
new file mode 100644
index 0000000..6ee2448
--- /dev/null
+++ b/subversion/tests/libsvn_repos/authz-test.c
@@ -0,0 +1,495 @@
+/* authz-test.c --- tests for authorization system
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_fnmatch.h>
+
+#include "svn_pools.h"
+#include "svn_iter.h"
+#include "svn_hash.h"
+#include "private/svn_subr_private.h"
+
+#include "../../libsvn_repos/authz.h"
+
+#include "../svn_test.h"
+
+/* Used to terminate lines in large multi-line string literals. */
+#define NL APR_EOL_STR
+
+static svn_error_t *
+print_group_member(void *baton,
+ const void *key, apr_ssize_t klen, void *val,
+ apr_pool_t *pool)
+{
+ svn_boolean_t *first = baton;
+ const char *member = key;
+ printf("%s%s", (*first ? "" : ", "), member);
+ *first = FALSE;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+print_group(void *baton,
+ const void *key, apr_ssize_t klen, void *val,
+ apr_pool_t *pool)
+{
+ const char *group = key;
+ apr_hash_t *members = val;
+ svn_boolean_t first = TRUE;
+ svn_error_t *err;
+
+ printf(" %s = ", group);
+ err = svn_iter_apr_hash(NULL, members,
+ print_group_member, &first, pool);
+ printf("\n");
+
+ return err;
+}
+
+
+static const char *
+access_string(authz_access_t access)
+{
+ switch (access & authz_access_write)
+ {
+ case authz_access_none: return ""; break;
+ case authz_access_read_flag: return "r"; break;
+ case authz_access_write_flag: return "w"; break;
+ default:
+ return "rw";
+ }
+}
+
+static svn_error_t *
+print_repos_rights(void *baton,
+ const void *key, apr_ssize_t klen,
+ void *val,
+ apr_pool_t *pool)
+{
+ const char *repos = key;
+ authz_rights_t *rights = val;
+ printf(" %s = all:%s some:%s\n", repos,
+ access_string(rights->min_access),
+ access_string(rights->max_access));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+print_user_rights(void *baton, const void *key, apr_ssize_t klen,
+ void *val,
+ apr_pool_t *pool)
+{
+ authz_global_rights_t *gr = val;
+
+ printf(" %s\n", gr->user);
+ printf(" [all] = all:%s some:%s\n",
+ access_string(gr->all_repos_rights.min_access),
+ access_string(gr->all_repos_rights.max_access));
+ printf(" [any] = all:%s some:%s\n",
+ access_string(gr->any_repos_rights.min_access),
+ access_string(gr->any_repos_rights.max_access));
+ SVN_ERR(svn_iter_apr_hash(NULL, gr->per_repos_rights,
+ print_repos_rights, NULL, pool));
+ return SVN_NO_ERROR;
+}
+
+static const char*
+rule_string(authz_rule_t* rule, apr_pool_t *pool)
+{
+ svn_stringbuf_t *str;
+ int i;
+
+ if (rule->len == 0)
+ return "/";
+
+ str = svn_stringbuf_create_empty(pool);
+
+ for (i = 0; i < rule->len; ++i)
+ {
+ authz_rule_segment_t *segment = &rule->path[i];
+
+ switch(segment->kind)
+ {
+ case authz_rule_any_segment:
+ svn_stringbuf_appendcstr(str, "/*");
+ break;
+
+ case authz_rule_any_recursive:
+ svn_stringbuf_appendcstr(str, "/**");
+ break;
+
+ case authz_rule_prefix:
+ svn_stringbuf_appendcstr(str, "/#");
+ svn_stringbuf_appendcstr(str, segment->pattern.data);
+ svn_stringbuf_appendbyte(str, '*');
+ break;
+
+ case authz_rule_suffix:
+ svn_stringbuf_appendcstr(str, "/#*");
+ svn_stringbuf_appendcstr(str, segment->pattern.data);
+ svn_authz__reverse_string(
+ str->data + str->len - segment->pattern.len,
+ segment->pattern.len);
+ break;
+
+ case authz_rule_fnmatch:
+ svn_stringbuf_appendcstr(str, "/%");
+ svn_stringbuf_appendcstr(str, segment->pattern.data);
+ break;
+
+ default: /* literal */
+ svn_stringbuf_appendcstr(str, "//");
+ svn_stringbuf_appendcstr(str, segment->pattern.data);
+ }
+ }
+ return str->data;
+}
+
+
+static svn_boolean_t
+has_glob(authz_rule_t* rule)
+{
+ int i;
+ for (i = 0; i < rule->len; ++i)
+ {
+ authz_rule_segment_t *segment = &rule->path[i];
+ if (segment->kind != authz_rule_literal)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+
+static svn_error_t *
+test_authz_parse(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *srcdir;
+ const char *rules_path;
+ apr_file_t *rules_file;
+ svn_stream_t *rules;
+ const char *groups_path;
+ apr_file_t *groups_file;
+ svn_stream_t *groups;
+ authz_full_t *authz;
+ apr_hash_t *groupdefs = svn_hash__make(pool);
+ int i;
+
+ const char *check_user = "wunga";
+ const char *check_repo = "bloop";
+ authz_rights_t global_rights;
+ svn_boolean_t global_explicit;
+
+
+ SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool));
+ rules_path = svn_dirent_join(srcdir, "authz.rules", pool);
+ groups_path = svn_dirent_join(srcdir, "authz.groups", pool);
+
+ SVN_ERR(svn_io_file_open(&rules_file, rules_path,
+ APR_READ, APR_OS_DEFAULT,
+ pool));
+ rules = svn_stream_from_aprfile2(rules_file, FALSE, pool);
+ SVN_ERR(svn_io_file_open(&groups_file, groups_path,
+ APR_READ, APR_OS_DEFAULT,
+ pool));
+ groups = svn_stream_from_aprfile2(groups_file, FALSE, pool);
+ SVN_ERR(svn_authz__parse(&authz, rules, groups, pool, pool));
+
+ printf("Access check for ('%s', '%s')\n", check_user, check_repo);
+
+ global_explicit = svn_authz__get_global_rights(&global_rights, authz,
+ check_user, check_repo);
+ printf("Global rights: min=%s, max=%s (%s)\n\n",
+ access_string(global_rights.min_access),
+ access_string(global_rights.max_access),
+ (global_explicit ? "explicit" : "implicit"));
+
+ printf("[rules]\n");
+ for (i = 0; i < authz->acls->nelts; ++i)
+ {
+ authz_acl_t *acl = &APR_ARRAY_IDX(authz->acls, i, authz_acl_t);
+ const authz_access_t all_access =
+ (acl->anon_access & acl->authn_access);
+ authz_access_t access;
+ svn_boolean_t has_access =
+ svn_authz__get_acl_access(&access, acl, check_user, check_repo);
+ int j;
+
+ printf("%s%s%s Sequence: %d\n"
+ " Repository: [%s]\n"
+ " Rule: %s[%s]\n",
+ (has_access ? "Match = " : ""),
+ (has_access ? access_string(access) : ""),
+ (has_access ? "\n" : ""),
+ acl->sequence_number,
+ acl->rule.repos,
+ (has_glob(&acl->rule) ? "glob:" : " "),
+ rule_string(&acl->rule, pool));
+
+ if (acl->has_anon_access && acl->has_authn_access)
+ printf(" * = %s\n", access_string(all_access));
+
+ if (acl->has_anon_access
+ && (acl->anon_access & ~all_access) != svn_authz_none)
+ printf(" $anonymous = %s\n",
+ access_string(acl->anon_access));
+
+ if (acl->has_authn_access
+ && (acl->authn_access & ~all_access) != svn_authz_none)
+ printf(" $authenticated = %s\n",
+ access_string(acl->authn_access));
+
+ for (j = 0; j < acl->user_access->nelts; ++j)
+ {
+ authz_ace_t *ace = &APR_ARRAY_IDX(acl->user_access, j, authz_ace_t);
+ printf(" %c%s = %s\n",
+ (ace->inverted ? '~' : ' '),
+ ace->name, access_string(ace->access));
+ if (ace->members)
+ svn_hash_sets(groupdefs, ace->name, ace->members);
+ }
+ printf("\n\n");
+ }
+
+ printf("[groups]\n");
+ SVN_ERR(svn_iter_apr_hash(NULL, groupdefs,
+ print_group, NULL, pool));
+ printf("\n\n");
+
+ printf("[users]\n");
+ if (authz->has_anon_rights)
+ print_user_rights(NULL, NULL, 0, &authz->anon_rights, pool);
+ if (authz->has_authn_rights)
+ print_user_rights(NULL, NULL, 0, &authz->authn_rights, pool);
+ SVN_ERR(svn_iter_apr_hash(NULL, authz->user_rights,
+ print_user_rights, NULL, pool));
+ printf("\n\n");
+
+ return SVN_NO_ERROR;
+}
+
+typedef struct global_right_text_case_t
+{
+ const char *repos;
+ const char *user;
+ authz_rights_t rights;
+ svn_boolean_t found;
+} global_right_text_case_t;
+
+static svn_error_t *
+run_global_rights_tests(const char *contents,
+ const global_right_text_case_t *test_cases,
+ apr_pool_t *pool)
+{
+ svn_authz_t *authz;
+
+ svn_stringbuf_t *buffer = svn_stringbuf_create(contents, pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(buffer, pool);
+ SVN_ERR(svn_repos_authz_parse(&authz, stream, NULL, pool));
+
+ for (; test_cases->repos; ++test_cases)
+ {
+ authz_rights_t rights = { authz_access_write, authz_access_none };
+ svn_boolean_t found = svn_authz__get_global_rights(&rights, authz->full,
+ test_cases->user,
+ test_cases->repos);
+
+ printf("%s %s {%d %d} %d => {%d %d} %d\n",
+ test_cases->repos, test_cases->user,
+ test_cases->rights.min_access, test_cases->rights.max_access,
+ test_cases->found, rights.min_access, rights.max_access, found);
+ SVN_TEST_ASSERT(found == test_cases->found);
+ SVN_TEST_ASSERT(rights.min_access == test_cases->rights.min_access);
+ SVN_TEST_ASSERT(rights.max_access == test_cases->rights.max_access);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_global_rights(apr_pool_t *pool)
+{
+ const char* authz1 =
+ "[/public]" NL
+ "* = r" NL
+ "" NL
+ "[greek:/A]" NL
+ "userA = rw" NL
+ "" NL
+ "[repo:/A]" NL
+ "userA = r" NL
+ "" NL
+ "[repo:/B]" NL
+ "userA = rw" NL
+ "" NL
+ "[greek:/B]" NL
+ "userB = rw" NL;
+
+ const global_right_text_case_t test_cases1[] =
+ {
+ /* Everyone may get read access b/c there might be a "/public" path. */
+ { "", "", { authz_access_none, authz_access_read }, TRUE },
+ { "", "userA", { authz_access_none, authz_access_read }, TRUE },
+ { "", "userB", { authz_access_none, authz_access_read }, TRUE },
+ { "", "userC", { authz_access_none, authz_access_read }, TRUE },
+
+ /* Two users do even get write access on some paths in "greek".
+ * The root always defaults to n/a due to the default rule. */
+ { "greek", "", { authz_access_none, authz_access_read }, FALSE },
+ { "greek", "userA", { authz_access_none, authz_access_write }, TRUE },
+ { "greek", "userB", { authz_access_none, authz_access_write }, TRUE },
+ { "greek", "userC", { authz_access_none, authz_access_read }, FALSE },
+
+ /* One users has write access to some paths in "repo". */
+ { "repo", "", { authz_access_none, authz_access_read }, FALSE },
+ { "repo", "userA", { authz_access_none, authz_access_write }, TRUE },
+ { "repo", "userB", { authz_access_none, authz_access_read }, FALSE },
+ { "repo", "userC", { authz_access_none, authz_access_read }, FALSE },
+
+ /* For unknown repos, we default to the global settings. */
+ { "X", "", { authz_access_none, authz_access_read }, FALSE },
+ { "X", "userA", { authz_access_none, authz_access_read }, FALSE },
+ { "X", "userB", { authz_access_none, authz_access_read }, FALSE },
+ { "X", "userC", { authz_access_none, authz_access_read }, FALSE },
+
+ { NULL }
+ };
+
+ const char* authz2 =
+ "[/]" NL
+ "userA = r" NL
+ "" NL
+ "[/public]" NL
+ "userB = rw" NL
+ "" NL
+ "[repo:/]" NL
+ "userA = rw" NL;
+
+ const global_right_text_case_t test_cases2[] =
+ {
+ /* Everyone may get read access b/c there might be a "/public" path. */
+ { "", "", { authz_access_none, authz_access_none }, TRUE },
+ { "", "userA", { authz_access_none, authz_access_read }, TRUE },
+ { "", "userB", { authz_access_none, authz_access_write }, TRUE },
+ { "", "userC", { authz_access_none, authz_access_none }, TRUE },
+
+ /* Two users do even get write access on some paths in "greek".
+ * The root always defaults to n/a due to the default rule. */
+ { "greek", "", { authz_access_none, authz_access_none }, FALSE },
+ { "greek", "userA", { authz_access_none, authz_access_read }, FALSE },
+ { "greek", "userB", { authz_access_none, authz_access_write }, FALSE },
+ { "greek", "userC", { authz_access_none, authz_access_none }, FALSE },
+
+ { NULL }
+ };
+
+ const char* authz3 =
+ "[/]" NL
+ "userA = r" NL
+ "" NL
+ "[greek:/public]" NL
+ "userB = rw" NL
+ "" NL
+ "[repo:/users]" NL
+ "$authenticated = rw" NL;
+
+ const global_right_text_case_t test_cases3[] =
+ {
+ /* Everyone may get read access b/c there might be a "/public" path. */
+ { "", "", { authz_access_none, authz_access_none }, TRUE },
+ { "", "userA", { authz_access_none, authz_access_read }, TRUE },
+ { "", "userB", { authz_access_none, authz_access_none }, TRUE },
+ { "", "userC", { authz_access_none, authz_access_none }, TRUE },
+
+ /* Two users do even get write access on some paths in "greek".
+ * The root always defaults to n/a due to the default rule. */
+ { "greek", "", { authz_access_none, authz_access_none }, FALSE },
+ { "greek", "userA", { authz_access_none, authz_access_read }, FALSE },
+ { "greek", "userB", { authz_access_none, authz_access_write }, TRUE },
+ { "greek", "userC", { authz_access_none, authz_access_none }, FALSE },
+
+ /* Two users do even get write access on some paths in "greek".
+ * The root always defaults to n/a due to the default rule. */
+ { "repo", "", { authz_access_none, authz_access_none }, FALSE },
+ { "repo", "userA", { authz_access_none, authz_access_write }, TRUE },
+ { "repo", "userB", { authz_access_none, authz_access_write }, TRUE },
+ { "repo", "userC", { authz_access_none, authz_access_write }, TRUE },
+
+ { NULL }
+ };
+
+ SVN_ERR(run_global_rights_tests(authz1, test_cases1, pool));
+ SVN_ERR(run_global_rights_tests(authz2, test_cases2, pool));
+ SVN_ERR(run_global_rights_tests(authz3, test_cases3, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+issue_4741_groups(apr_pool_t *pool)
+{
+ const char rules[] =
+ "[groups]" NL
+ "g1 = userA" NL
+ "g2 = userB" NL
+ "g = @g1, @g2" NL
+ "" NL
+ "[/]" NL
+ "* =" NL
+ "@g = rw" NL
+ ;
+
+ svn_stringbuf_t *buf = svn_stringbuf_create(rules, pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(buf, pool);
+ svn_authz_t *authz;
+ svn_boolean_t access_granted;
+
+ SVN_ERR(svn_repos_authz_parse(&authz, stream, NULL, pool));
+
+ SVN_ERR(svn_repos_authz_check_access(authz, "repo", "/", "userA",
+ svn_authz_write, &access_granted,
+ pool));
+ SVN_TEST_ASSERT(access_granted == TRUE);
+
+ SVN_ERR(svn_repos_authz_check_access(authz, "repo", "/", "userB",
+ svn_authz_write, &access_granted,
+ pool));
+ SVN_TEST_ASSERT(access_granted == TRUE);
+
+ return SVN_NO_ERROR;
+}
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_authz_parse,
+ "test svn_authz__parse"),
+ SVN_TEST_PASS2(test_global_rights,
+ "test svn_authz__get_global_rights"),
+ SVN_TEST_PASS2(issue_4741_groups,
+ "issue 4741 groups"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_repos/authz.groups b/subversion/tests/libsvn_repos/authz.groups
new file mode 100644
index 0000000..006ae2b
--- /dev/null
+++ b/subversion/tests/libsvn_repos/authz.groups
@@ -0,0 +1,4 @@
+[groups]
+x = a
+y = b, @z
+z = c, @x
diff --git a/subversion/tests/libsvn_repos/authz.rules b/subversion/tests/libsvn_repos/authz.rules
new file mode 100644
index 0000000..fb56f49
--- /dev/null
+++ b/subversion/tests/libsvn_repos/authz.rules
@@ -0,0 +1,18 @@
+[/]
+~$anonymous = rw
+~$authenticated = r
+&x = r
+luser = rw
+
+[bloop:/]
+@x = r
+@y = rw
+
+[:glob:bloop:/*a\bc/def*/**/*/**/ghi?jkl/**/**/mno\/**]
+$anonymous =
+~luser =
+~@x = r
+luser = rw
+
+[aliases]
+x = luser
diff --git a/subversion/tests/libsvn_repos/dir-delta-editor.c b/subversion/tests/libsvn_repos/dir-delta-editor.c
new file mode 100644
index 0000000..b843a81
--- /dev/null
+++ b/subversion/tests/libsvn_repos/dir-delta-editor.c
@@ -0,0 +1,314 @@
+/*
+ * svn_tests_editor.c: a `dummy' editor implementation for testing
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+
+#include <stdio.h>
+
+#include <apr_pools.h>
+#include <apr_file_io.h>
+
+#define SVN_DEPRECATED
+
+#include "svn_types.h"
+#include "svn_error.h"
+#include "svn_path.h"
+#include "svn_delta.h"
+#include "svn_fs.h"
+
+#include "../svn_test.h"
+#include "dir-delta-editor.h"
+
+struct edit_baton
+{
+ svn_fs_t *fs;
+ svn_fs_root_t *txn_root;
+ const char *root_path;
+ apr_pool_t *pool;
+};
+
+struct dir_baton
+{
+ struct edit_baton *edit_baton;
+ const char *full_path;
+};
+
+
+struct file_baton
+{
+ struct edit_baton *edit_baton;
+ const char *path;
+};
+
+
+
+static svn_error_t *
+test_delete_entry(const char *path,
+ svn_revnum_t revision,
+ void *parent_baton,
+ apr_pool_t *pool)
+{
+ struct dir_baton *pb = parent_baton;
+
+ /* Construct the full path of this entry and delete it from the txn. */
+ return svn_fs_delete(pb->edit_baton->txn_root,
+ svn_path_join(pb->edit_baton->root_path, path, pool),
+ pool);
+}
+
+
+static svn_error_t *
+test_open_root(void *edit_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *dir_pool,
+ void **root_baton)
+{
+ struct dir_baton *db = apr_pcalloc(dir_pool, sizeof(*db));
+ struct edit_baton *eb = edit_baton;
+
+ db->full_path = eb->root_path;
+ db->edit_baton = edit_baton;
+
+ *root_baton = db;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_open_directory(const char *path,
+ void *parent_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *dir_pool,
+ void **child_baton)
+{
+ struct dir_baton *pb = parent_baton;
+ struct edit_baton *eb = pb->edit_baton;
+ struct dir_baton *db = apr_pcalloc(dir_pool, sizeof(*db));
+ svn_fs_root_t *rev_root = NULL;
+
+ /* Construct the full path of the new directory */
+ db->full_path = svn_path_join(eb->root_path, path, eb->pool);
+ db->edit_baton = eb;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, eb->fs, base_revision, dir_pool));
+ SVN_ERR(svn_fs_revision_link(rev_root, eb->txn_root, db->full_path,
+ dir_pool));
+
+ *child_baton = db;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_add_directory(const char *path,
+ void *parent_baton,
+ const char *copyfrom_path,
+ svn_revnum_t copyfrom_revision,
+ apr_pool_t *dir_pool,
+ void **child_baton)
+{
+ struct dir_baton *pb = parent_baton;
+ struct edit_baton *eb = pb->edit_baton;
+ struct dir_baton *db = apr_pcalloc(dir_pool, sizeof(*db));
+
+ /* Construct the full path of the new directory */
+ db->full_path = svn_path_join(eb->root_path, path, eb->pool);
+ db->edit_baton = eb;
+
+ if (copyfrom_path) /* add with history */
+ {
+ svn_fs_root_t *rev_root = NULL;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root,
+ eb->fs,
+ copyfrom_revision,
+ dir_pool));
+
+ SVN_ERR(svn_fs_copy(rev_root,
+ copyfrom_path,
+ eb->txn_root,
+ db->full_path,
+ dir_pool));
+ }
+ else /* add without history */
+ SVN_ERR(svn_fs_make_dir(eb->txn_root, db->full_path, dir_pool));
+
+ *child_baton = db;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_open_file(const char *path,
+ void *parent_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *file_pool,
+ void **file_baton)
+{
+ struct dir_baton *pb = parent_baton;
+ struct edit_baton *eb = pb->edit_baton;
+ struct file_baton *fb = apr_pcalloc(file_pool, sizeof(*fb));
+ svn_fs_root_t *rev_root = NULL;
+
+ /* Fill in the file baton. */
+ fb->path = svn_path_join(eb->root_path, path, eb->pool);
+ fb->edit_baton = eb;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root, eb->fs, base_revision, file_pool));
+ SVN_ERR(svn_fs_revision_link(rev_root, eb->txn_root, fb->path, file_pool));
+
+ *file_baton = fb;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_add_file(const char *path,
+ void *parent_baton,
+ const char *copyfrom_path,
+ svn_revnum_t copyfrom_revision,
+ apr_pool_t *file_pool,
+ void **file_baton)
+{
+ struct dir_baton *db = parent_baton;
+ struct edit_baton *eb = db->edit_baton;
+ struct file_baton *fb = apr_pcalloc(file_pool, sizeof(*fb));
+
+ /* Fill in the file baton. */
+ fb->path = svn_path_join(eb->root_path, path, eb->pool);
+ fb->edit_baton = eb;
+
+ if (copyfrom_path) /* add with history */
+ {
+ svn_fs_root_t *rev_root = NULL;
+
+ SVN_ERR(svn_fs_revision_root(&rev_root,
+ eb->fs,
+ copyfrom_revision,
+ file_pool));
+
+ SVN_ERR(svn_fs_copy(rev_root,
+ copyfrom_path,
+ eb->txn_root,
+ fb->path,
+ file_pool));
+ }
+ else /* add without history */
+ SVN_ERR(svn_fs_make_file(eb->txn_root, fb->path, file_pool));
+
+ *file_baton = fb;
+ return SVN_NO_ERROR;
+}
+
+
+
+static svn_error_t *
+test_apply_textdelta(void *file_baton,
+ const char *base_checksum,
+ apr_pool_t *pool,
+ svn_txdelta_window_handler_t *handler,
+ void **handler_baton)
+{
+ struct file_baton *fb = file_baton;
+
+ return svn_fs_apply_textdelta(handler, handler_baton,
+ fb->edit_baton->txn_root,
+ fb->path,
+ base_checksum,
+ NULL,
+ pool);
+}
+
+
+static svn_error_t *
+test_change_file_prop(void *file_baton,
+ const char *name, const svn_string_t *value,
+ apr_pool_t *pool)
+{
+ struct file_baton *fb = file_baton;
+
+ return svn_fs_change_node_prop(fb->edit_baton->txn_root,
+ fb->path, name, value, pool);
+}
+
+
+static svn_error_t *
+test_change_dir_prop(void *parent_baton,
+ const char *name, const svn_string_t *value,
+ apr_pool_t *pool)
+{
+ struct dir_baton *db = parent_baton;
+ struct edit_baton *eb = db->edit_baton;
+
+ /* Construct the full path of this entry and change the property. */
+ return svn_fs_change_node_prop(eb->txn_root, db->full_path,
+ name, value, pool);
+}
+
+
+/*---------------------------------------------------------------*/
+
+
+
+svn_error_t *
+dir_delta_get_editor(const svn_delta_editor_t **editor,
+ void **edit_baton,
+ svn_fs_t *fs,
+ svn_fs_root_t *txn_root,
+ const char *path,
+ apr_pool_t *pool)
+{
+ svn_delta_editor_t *my_editor;
+ struct edit_baton *my_edit_baton;
+
+ /* Wondering why we don't include test_close_directory,
+ test_close_file, test_absent_directory, and test_absent_file
+ here...? -kfogel, 3 Nov 2003 */
+
+ /* Set up the editor. */
+ my_editor = svn_delta_default_editor(pool);
+ my_editor->open_root = test_open_root;
+ my_editor->delete_entry = test_delete_entry;
+ my_editor->add_directory = test_add_directory;
+ my_editor->open_directory = test_open_directory;
+ my_editor->add_file = test_add_file;
+ my_editor->open_file = test_open_file;
+ my_editor->apply_textdelta = test_apply_textdelta;
+ my_editor->change_file_prop = test_change_file_prop;
+ my_editor->change_dir_prop = test_change_dir_prop;
+
+ /* Set up the edit baton. */
+ my_edit_baton = apr_pcalloc(pool, sizeof(*my_edit_baton));
+ my_edit_baton->root_path = apr_pstrdup(pool, path);
+ my_edit_baton->pool = pool;
+ my_edit_baton->fs = fs;
+ my_edit_baton->txn_root = txn_root;
+
+ *editor = my_editor;
+ *edit_baton = my_edit_baton;
+
+ return SVN_NO_ERROR;
+}
diff --git a/subversion/tests/libsvn_repos/dir-delta-editor.h b/subversion/tests/libsvn_repos/dir-delta-editor.h
new file mode 100644
index 0000000..5c498f8
--- /dev/null
+++ b/subversion/tests/libsvn_repos/dir-delta-editor.h
@@ -0,0 +1,68 @@
+/*
+ * svn_tests_editor.c: a `dummy' editor implementation for testing
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+#ifndef SVN_TEST__DIR_DELTA_EDITOR_H
+#define SVN_TEST__DIR_DELTA_EDITOR_H
+
+#include <stdio.h>
+
+#include <apr_pools.h>
+
+#include "svn_types.h"
+#include "svn_error.h"
+#include "svn_delta.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/* Get an editor that will edit an FS transaction via the libsvn_fs API.
+ *
+ * Set *EDITOR and *EDIT_BATON to a new editor that edits the subtree at
+ * PATH of the existing, open transaction TXN_ROOT in filesystem FS.
+ *
+ * Note: Related but more complex functions in the regular API include
+ * svn_fs__editor_create_for() and svn_repos_get_commit_editor*().
+ *
+ * Note: The only connection with dir-deltas is that a test for dir-deltas
+ * was the first user of this editor.
+ */
+svn_error_t *
+dir_delta_get_editor(const svn_delta_editor_t **editor,
+ void **edit_baton,
+ svn_fs_t *fs,
+ svn_fs_root_t *txn_root,
+ const char *path,
+ apr_pool_t *pool);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVN_TEST__DIR_DELTA_EDITOR_H */
diff --git a/subversion/tests/libsvn_repos/dump-load-test.c b/subversion/tests/libsvn_repos/dump-load-test.c
new file mode 100644
index 0000000..6940c85
--- /dev/null
+++ b/subversion/tests/libsvn_repos/dump-load-test.c
@@ -0,0 +1,290 @@
+/* dump-load-test.c --- tests for dumping and loading repositories
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "svn_pools.h"
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_repos.h"
+#include "private/svn_repos_private.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+
+
+
+/* Test dumping in the presence of the property PROP_NAME:PROP_VAL.
+ * Return the dumped data in *DUMP_DATA_P (if DUMP_DATA_P is not null).
+ * REPOS is an empty repository.
+ * See svn_repos_dump_fs3() for START_REV, END_REV, NOTIFY_FUNC, NOTIFY_BATON.
+ */
+static svn_error_t *
+test_dump_bad_props(svn_stringbuf_t **dump_data_p,
+ svn_repos_t *repos,
+ const char *prop_name,
+ const svn_string_t *prop_val,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ svn_repos_notify_func_t notify_func,
+ void *notify_baton,
+ apr_pool_t *pool)
+{
+ const char *test_path = "/bar";
+ svn_fs_t *fs = svn_repos_fs(repos);
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev = 0;
+ svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool);
+ const char *expected_str;
+
+ /* Revision 1: Any commit will do, here */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, test_path , pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Revision 2: Add the bad property */
+ SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, test_path , prop_name, prop_val,
+ pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Test that a dump completes without error. */
+ SVN_ERR(svn_repos_dump_fs4(repos, stream, start_rev, end_rev,
+ FALSE, FALSE, TRUE, TRUE,
+ notify_func, notify_baton,
+ NULL, NULL, NULL, NULL,
+ pool));
+ svn_stream_close(stream);
+
+ /* Check that the property appears in the dump data */
+ expected_str = apr_psprintf(pool, "K %d\n%s\n"
+ "V %d\n%s\n"
+ "PROPS-END\n",
+ (int)strlen(prop_name), prop_name,
+ (int)prop_val->len, prop_val->data);
+ SVN_TEST_ASSERT(strstr(dump_data->data, expected_str));
+
+ if (dump_data_p)
+ *dump_data_p = dump_data;
+ return SVN_NO_ERROR;
+}
+
+/* Test loading in the presence of the property PROP_NAME:PROP_VAL.
+ * Load data from DUMP_DATA.
+ * REPOS is an empty repository.
+ */
+static svn_error_t *
+test_load_bad_props(svn_stringbuf_t *dump_data,
+ svn_repos_t *repos,
+ const char *prop_name,
+ const svn_string_t *prop_val,
+ const char *parent_fspath,
+ svn_boolean_t validate_props,
+ svn_repos_notify_func_t notify_func,
+ void *notify_baton,
+ apr_pool_t *pool)
+{
+ const char *test_path = apr_psprintf(pool, "%s%s",
+ parent_fspath ? parent_fspath : "",
+ "/bar");
+ svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool);
+ svn_fs_t *fs;
+ svn_fs_root_t *rev_root;
+ svn_revnum_t youngest_rev;
+ svn_string_t *loaded_prop_val;
+
+ SVN_ERR(svn_repos_load_fs6(repos, stream,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ svn_repos_load_uuid_default,
+ parent_fspath,
+ FALSE, FALSE, /*use_*_commit_hook*/
+ validate_props,
+ FALSE /*ignore_dates*/,
+ FALSE /*normalize_props*/,
+ notify_func, notify_baton,
+ NULL, NULL, /*cancellation*/
+ pool));
+ svn_stream_close(stream);
+
+ /* Check the loaded property */
+ fs = svn_repos_fs(repos);
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_node_prop(&loaded_prop_val,
+ rev_root, test_path, prop_name, pool));
+ SVN_TEST_ASSERT(svn_string_compare(loaded_prop_val, prop_val));
+ return SVN_NO_ERROR;
+}
+
+/* Notification receiver for test_dump_r0_mergeinfo(). This does not
+ need to do anything, it just needs to exist.
+ */
+static void
+dump_r0_mergeinfo_notifier(void *baton,
+ const svn_repos_notify_t *notify,
+ apr_pool_t *scratch_pool)
+{
+}
+
+/* Regression test for the 'dump' part of issue #4476 "Mergeinfo
+ containing r0 makes svnsync and svnadmin dump fail". */
+static svn_error_t *
+test_dump_r0_mergeinfo(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *prop_name = "svn:mergeinfo";
+ const svn_string_t *bad_mergeinfo = svn_string_create("/foo:0", pool);
+ svn_repos_t *repos;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-dump-r0-mergeinfo",
+ opts, pool));
+ /* In order to exercise the
+ functionality under test -- that is, in order for the dump to try to
+ parse the mergeinfo it is dumping -- the dump must start from a
+ revision greater than 1 and must take a notification callback. */
+ SVN_ERR(test_dump_bad_props(NULL, repos,
+ prop_name, bad_mergeinfo,
+ 2, SVN_INVALID_REVNUM,
+ dump_r0_mergeinfo_notifier, NULL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static void
+load_r0_mergeinfo_notifier(void *baton,
+ const svn_repos_notify_t *notify,
+ apr_pool_t *scratch_pool)
+{
+ svn_boolean_t *had_mergeinfo_warning = baton;
+
+ if (notify->action == svn_repos_notify_warning)
+ {
+ if (notify->warning == svn_repos_notify_warning_invalid_mergeinfo)
+ {
+ *had_mergeinfo_warning = TRUE;
+ }
+ }
+}
+
+/* Regression test for the 'load' part of issue #4476 "Mergeinfo
+ * containing r0 makes svnsync and svnadmin dump fail".
+ *
+ * Bad mergeinfo should not prevent loading a backup, at least when we do not
+ * require mergeinfo revision numbers or paths to be adjusted during loading.
+ */
+static svn_error_t *
+test_load_r0_mergeinfo(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *prop_name = "svn:mergeinfo";
+ const svn_string_t *prop_val = svn_string_create("/foo:0", pool);
+ svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool);
+
+ /* Produce a dump file containing bad mergeinfo */
+ {
+ svn_repos_t *repos;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-1",
+ opts, pool));
+ SVN_ERR(test_dump_bad_props(&dump_data, repos,
+ prop_name, prop_val,
+ SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ NULL, NULL, pool));
+ }
+
+ /* Test loading without validating properties: should warn and succeed */
+ {
+ svn_repos_t *repos;
+ svn_boolean_t had_mergeinfo_warning = FALSE;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-2",
+ opts, pool));
+
+ /* Without changing revision numbers or paths */
+ SVN_ERR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ NULL /*parent_dir*/, FALSE /*validate_props*/,
+ load_r0_mergeinfo_notifier, &had_mergeinfo_warning,
+ pool));
+ SVN_TEST_ASSERT(had_mergeinfo_warning);
+
+ /* With changing revision numbers and/or paths (by loading the same data
+ again, on top of existing revisions, into subdirectory 'bar') */
+ had_mergeinfo_warning = FALSE;
+ SVN_ERR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ "/bar", FALSE /*validate_props*/,
+ load_r0_mergeinfo_notifier, &had_mergeinfo_warning,
+ pool));
+ SVN_TEST_ASSERT(had_mergeinfo_warning);
+ }
+
+ /* Test loading with validating properties: should return an error */
+ {
+ svn_repos_t *repos;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-3",
+ opts, pool));
+
+ /* Without changing revision numbers or paths */
+ SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ NULL /*parent_dir*/, TRUE /*validate_props*/,
+ NULL, NULL,
+ pool));
+
+ /* With changing revision numbers and/or paths (by loading the same data
+ again, on top of existing revisions, into subdirectory 'bar') */
+ SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos,
+ prop_name, prop_val,
+ "/bar", TRUE /*validate_props*/,
+ NULL, NULL,
+ pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_dump_r0_mergeinfo,
+ "test dumping with r0 mergeinfo"),
+ SVN_TEST_OPTS_PASS(test_load_r0_mergeinfo,
+ "test loading with r0 mergeinfo"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_repos/repos-test.c b/subversion/tests/libsvn_repos/repos-test.c
new file mode 100644
index 0000000..8d5472c
--- /dev/null
+++ b/subversion/tests/libsvn_repos/repos-test.c
@@ -0,0 +1,4543 @@
+/* repos-test.c --- tests for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <apr_pools.h>
+#include <apr_time.h>
+
+#include "../svn_test.h"
+
+#include "svn_pools.h"
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_hash.h"
+#include "svn_repos.h"
+#include "svn_path.h"
+#include "svn_delta.h"
+#include "svn_config.h"
+#include "svn_props.h"
+#include "svn_sorts.h"
+#include "svn_version.h"
+#include "private/svn_repos_private.h"
+#include "private/svn_dep_compat.h"
+
+/* be able to look into svn_config_t */
+#include "../../libsvn_subr/config_impl.h"
+
+#include "../svn_test_fs.h"
+
+#include "dir-delta-editor.h"
+
+/* Used to terminate lines in large multi-line string literals. */
+#define NL APR_EOL_STR
+
+/* Compare strings, like strcmp but either or both may be NULL which
+ * compares equal to NULL and not equal to any non-NULL string. */
+static int
+strcmp_null(const char *s1, const char *s2)
+{
+ if (s1 && s2)
+ return strcmp(s1, s2);
+ else if (s1 || s2)
+ return 1;
+ else
+ return 0;
+}
+
+
+
+static svn_error_t *
+dir_deltas(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t youngest_rev;
+ void *edit_baton;
+ const svn_delta_editor_t *editor;
+ svn_test__tree_t expected_trees[8];
+ int revision_count = 0;
+ int i, j;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* The Test Plan
+
+ The filesystem function svn_repos_dir_delta2 exists to drive an
+ editor in such a way that given a source tree S and a target tree
+ T, that editor manipulation will transform S into T, insomuch as
+ directories and files, and their contents and properties, go.
+ The general notion of the test plan will be to create pairs of
+ trees (S, T), and an editor that edits a copy of tree S, run them
+ through svn_repos_dir_delta2, and then verify that the edited copy of
+ S is identical to T when it is all said and done. */
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-dir-deltas",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+ expected_trees[revision_count].num_entries = 0;
+ expected_trees[revision_count++].entries = 0;
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 1 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 20;
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ youngest_rev, subpool));
+ SVN_ERR(svn_test__validate_tree
+ (revision_root, expected_trees[revision_count].entries,
+ expected_trees[revision_count].num_entries, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "A/delta", "This is the file 'delta'.\n" },
+ { 'a', "A/epsilon", "This is the file 'epsilon'.\n" },
+ { 'a', "A/B/Z", 0 },
+ { 'a', "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { 'd', "A/C", 0 },
+ { 'd', "A/mu", "" },
+ { 'd', "A/D/G/tau", "" },
+ { 'd', "A/D/H/omega", "" },
+ { 'e', "iota", "Changed file 'iota'.\n" },
+ { 'e', "A/D/G/rho", "Changed file 'rho'.\n" }
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 10,
+ subpool));
+ }
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 2 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "Changed file 'iota'.\n" },
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 20;
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ youngest_rev, subpool));
+ SVN_ERR(svn_test__validate_tree
+ (revision_root, expected_trees[revision_count].entries,
+ expected_trees[revision_count].num_entries, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'a', "A/mu", "Re-added file 'mu'.\n" },
+ { 'a', "A/D/H/omega", 0 }, /* re-add omega as directory! */
+ { 'd', "iota", "" },
+ { 'e', "A/delta", "This is the file 'delta'.\nLine 2.\n" }
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root, script_entries, 4, subpool));
+ }
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 3 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\nLine 2.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/mu", "Re-added file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", 0 }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 21;
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ youngest_rev, subpool));
+ SVN_ERR(svn_test__validate_tree
+ (revision_root, expected_trees[revision_count].entries,
+ expected_trees[revision_count].num_entries, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* Make a new txn based on the youngest revision, make some changes,
+ and commit those changes (which makes a new youngest
+ revision). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_copy(revision_root, "A/D/G",
+ txn_root, "A/D/G2",
+ subpool));
+ SVN_ERR(svn_fs_copy(revision_root, "A/epsilon",
+ txn_root, "A/B/epsilon",
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /***********************************************************************/
+ /* REVISION 4 */
+ /***********************************************************************/
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "A", 0 },
+ { "A/delta", "This is the file 'delta'.\nLine 2.\n" },
+ { "A/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/mu", "Re-added file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/epsilon", "This is the file 'epsilon'.\n" },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/B/Z", 0 },
+ { "A/B/Z/zeta", "This is the file 'zeta'.\n" },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "Changed file 'rho'.\n" },
+ { "A/D/G2", 0 },
+ { "A/D/G2/pi", "This is the file 'pi'.\n" },
+ { "A/D/G2/rho", "Changed file 'rho'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", 0 }
+ };
+ expected_trees[revision_count].entries = expected_entries;
+ expected_trees[revision_count].num_entries = 25;
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ youngest_rev, pool));
+ SVN_ERR(svn_test__validate_tree
+ (revision_root, expected_trees[revision_count].entries,
+ expected_trees[revision_count].num_entries, subpool));
+ revision_count++;
+ }
+ svn_pool_clear(subpool);
+
+ /* THE BIG IDEA: Now that we have a collection of revisions, let's
+ first make sure that given any two revisions, we can get the
+ right delta between them. We'll do this by selecting our two
+ revisions, R1 and R2, basing a transaction off R1, deltafying the
+ txn with respect to R2, and then making sure our final txn looks
+ exactly like R2. This should work regardless of the
+ chronological order in which R1 and R2 were created. */
+ for (i = 0; i < revision_count; i++)
+ {
+ for (j = 0; j < revision_count; j++)
+ {
+ /* Prepare a txn that will receive the changes from
+ svn_repos_dir_delta2 */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, i, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+
+ /* Get the editor that will be modifying our transaction. */
+ SVN_ERR(dir_delta_get_editor(&editor,
+ &edit_baton,
+ fs,
+ txn_root,
+ "",
+ subpool));
+
+ /* Here's the kicker...do the directory delta. */
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, j, subpool));
+ SVN_ERR(svn_repos_dir_delta2(txn_root,
+ "",
+ "",
+ revision_root,
+ "",
+ editor,
+ edit_baton,
+ NULL,
+ NULL,
+ TRUE,
+ svn_depth_infinity,
+ FALSE,
+ FALSE,
+ subpool));
+
+ /* Hopefully at this point our transaction has been modified
+ to look exactly like our latest revision. We'll check
+ that. */
+ SVN_ERR(svn_test__validate_tree
+ (txn_root, expected_trees[j].entries,
+ expected_trees[j].num_entries, subpool));
+
+ /* We don't really want to do anything with this
+ transaction...so we'll abort it (good for software, bad
+ bad bad for society). */
+ svn_error_clear(svn_fs_abort_txn(txn, subpool));
+ svn_pool_clear(subpool);
+ }
+ }
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+node_tree_delete_under_copy(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root, *revision_2_root;
+ svn_revnum_t youngest_rev;
+ void *edit_baton;
+ const svn_delta_editor_t *editor;
+ svn_repos_node_t *tree;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-del-under-copy",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+
+ /* Create and commit the greek tree. */
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Now, commit again, this time after copying a directory, and then
+ deleting some paths under that directory. */
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_copy(revision_root, "A", txn_root, "Z", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "Z/D/G/rho", pool));
+ SVN_ERR(svn_fs_delete(txn_root, "Z/D/H", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Now, we run the node_tree editor code, and see that a) it doesn't
+ bomb out, and b) that our nodes are all good. */
+ SVN_ERR(svn_fs_revision_root(&revision_2_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_repos_node_editor(&editor, &edit_baton, repos,
+ revision_root, revision_2_root,
+ pool, subpool));
+ SVN_ERR(svn_repos_replay2(revision_2_root, "", SVN_INVALID_REVNUM, FALSE,
+ editor, edit_baton, NULL, NULL, subpool));
+
+ /* Get the root of the generated tree, and cleanup our mess. */
+ tree = svn_repos_node_from_baton(edit_baton);
+ svn_pool_destroy(subpool);
+
+ /* See that we got what we expected (fortunately, svn_repos_replay
+ drivers editor paths in a predictable fashion!). */
+
+ if (! (tree /* / */
+ && tree->child /* /Z */
+ && tree->child->child /* /Z/D */
+ && tree->child->child->child /* /Z/D/G */
+ && tree->child->child->child->child /* /Z/D/G/rho */
+ && tree->child->child->child->sibling)) /* /Z/D/H */
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Generated node tree is bogus.");
+
+ if (! ((strcmp(tree->name, "") == 0)
+ && (strcmp(tree->child->name, "Z") == 0)
+ && (strcmp(tree->child->child->name, "D") == 0)
+ && (strcmp(tree->child->child->child->name, "G") == 0)
+ && ((strcmp(tree->child->child->child->child->name, "rho") == 0)
+ && (tree->child->child->child->child->kind == svn_node_file)
+ && (tree->child->child->child->child->action == 'D'))
+ && ((strcmp(tree->child->child->child->sibling->name, "H") == 0)
+ && (tree->child->child->child->sibling->kind == svn_node_dir)
+ && (tree->child->child->child->sibling->action == 'D'))))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Generated node tree is bogus.");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper for revisions_changed(). */
+static const char *
+print_chrevs(const apr_array_header_t *revs_got,
+ int num_revs_expected,
+ const svn_revnum_t *revs_expected,
+ apr_pool_t *pool)
+{
+ int i;
+ const char *outstr;
+ svn_revnum_t rev;
+
+ outstr = apr_psprintf(pool, "Got: { ");
+ if (revs_got)
+ {
+ for (i = 0; i < revs_got->nelts; i++)
+ {
+ rev = APR_ARRAY_IDX(revs_got, i, svn_revnum_t);
+ outstr = apr_pstrcat(pool,
+ outstr,
+ apr_psprintf(pool, "%ld ", rev),
+ SVN_VA_NULL);
+ }
+ }
+ outstr = apr_pstrcat(pool, outstr, "} Expected: { ", SVN_VA_NULL);
+ for (i = 0; i < num_revs_expected; i++)
+ {
+ outstr = apr_pstrcat(pool,
+ outstr,
+ apr_psprintf(pool, "%ld ",
+ revs_expected[i]),
+ SVN_VA_NULL);
+ }
+ return apr_pstrcat(pool, outstr, "}", SVN_VA_NULL);
+}
+
+
+/* Implements svn_repos_history_func_t interface. Accumulate history
+ revisions the apr_array_header_t * which is the BATON. */
+static svn_error_t *
+history_to_revs_array(void *baton,
+ const char *path,
+ svn_revnum_t revision,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *revs_array = baton;
+ APR_ARRAY_PUSH(revs_array, svn_revnum_t) = revision;
+ return SVN_NO_ERROR;
+}
+
+struct revisions_changed_results
+{
+ const char *path;
+ int num_revs;
+ svn_revnum_t revs_changed[11];
+};
+
+
+static svn_error_t *
+revisions_changed(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *spool = svn_pool_create(pool);
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-revisions-changed",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /*** Testing Algorithm ***
+
+ 1. Create a greek tree in revision 1.
+ 2. Make a series of new revisions, changing a file here and file
+ there.
+ 3. Loop over each path in each revision, verifying that we get
+ the right revisions-changed array back from the filesystem.
+ */
+
+ /* Created the greek tree in revision 1. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 2 - mu, alpha, omega */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "2", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha", "2", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/omega", "2", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 3 - iota, lambda, psi, omega */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "3", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/lambda", "3", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/psi", "3", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/omega", "3", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 4 - iota, beta, gamma, pi, rho */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "4", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/beta", "4", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/gamma", "4", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/pi", "4", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/rho", "4", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 5 - mu, alpha, tau, chi */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "5", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha", "5", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/G/tau", "5", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/chi", "5", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 6 - move A/D to A/Z */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D", txn_root, "A/Z", spool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 7 - edit A/Z/G/pi */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/Z/G/pi", "7", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 8 - move A/Z back to A/D, edit iota */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/Z", txn_root, "A/D", spool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/Z", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "8", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 9 - copy A/D/G to A/D/Q */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_copy(rev_root, "A/D/G", txn_root, "A/D/Q", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Revision 10 - edit A/D/Q/pi and A/D/Q/rho */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, spool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/Q/pi", "10", spool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/Q/rho", "10", spool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn, spool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(spool);
+
+ /* Now, it's time to verify our results. */
+ {
+ int j;
+ /* Number, and list of, changed revisions for each path. Note
+ that for now, bubble-up in directories causes the directory to
+ appear changed though no entries were added or removed, and no
+ property mods occurred. Also note that this matrix represents
+ only the final state of the paths existing in HEAD of the
+ repository.
+
+ Notice for each revision, you can glance down that revision's
+ column in this table and see all the paths modified directory or
+ via bubble-up. */
+ static const struct revisions_changed_results test_data[25] = {
+ /* path, num, revisions changed... */
+ { "", 11, { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 } },
+ { "iota", 4, { 8, 4, 3, 1 } },
+ { "A", 10, { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 } },
+ { "A/mu", 3, { 5, 2, 1 } },
+ { "A/B", 5, { 5, 4, 3, 2, 1 } },
+ { "A/B/lambda", 2, { 3, 1 } },
+ { "A/B/E", 4, { 5, 4, 2, 1 } },
+ { "A/B/E/alpha", 3, { 5, 2, 1 } },
+ { "A/B/E/beta", 2, { 4, 1 } },
+ { "A/B/F", 1, { 1 } },
+ { "A/C", 1, { 1 } },
+ { "A/D", 10, { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 } },
+ { "A/D/gamma", 4, { 8, 6, 4, 1 } },
+ { "A/D/G", 6, { 8, 7, 6, 5, 4, 1 } },
+ { "A/D/G/pi", 5, { 8, 7, 6, 4, 1 } },
+ { "A/D/G/rho", 4, { 8, 6, 4, 1 } },
+ { "A/D/G/tau", 4, { 8, 6, 5, 1 } },
+ { "A/D/Q", 8, { 10, 9, 8, 7, 6, 5, 4, 1 } },
+ { "A/D/Q/pi", 7, { 10, 9, 8, 7, 6, 4, 1 } },
+ { "A/D/Q/rho", 6, { 10, 9, 8, 6, 4, 1 } },
+ { "A/D/Q/tau", 5, { 9, 8, 6, 5, 1 } },
+ { "A/D/H", 6, { 8, 6, 5, 3, 2, 1 } },
+ { "A/D/H/chi", 4, { 8, 6, 5, 1 } },
+ { "A/D/H/psi", 4, { 8, 6, 3, 1 } },
+ { "A/D/H/omega", 5, { 8, 6, 3, 2, 1 } }
+ };
+
+ /* Now, for each path in the revision, get its changed-revisions
+ array and compare the array to the static results above. */
+ for (j = 0; j < 25; j++)
+ {
+ int i;
+ const char *path = test_data[j].path;
+ int num_revs = test_data[j].num_revs;
+ const svn_revnum_t *revs_changed = test_data[j].revs_changed;
+ apr_array_header_t *revs = apr_array_make(spool, 10,
+ sizeof(svn_revnum_t));
+
+ SVN_ERR(svn_repos_history(fs, path, history_to_revs_array, revs,
+ 0, youngest_rev, TRUE, spool));
+
+ /* Are we at least looking at the right number of returned
+ revisions? */
+ if ((! revs) || (revs->nelts != num_revs))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "Changed revisions differ from expected for '%s'\n%s",
+ path, print_chrevs(revs, num_revs, revs_changed, spool));
+
+ /* Do the revisions lists match up exactly? */
+ for (i = 0; i < num_revs; i++)
+ {
+ svn_revnum_t rev = APR_ARRAY_IDX(revs, i, svn_revnum_t);
+ if (rev != revs_changed[i])
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "Changed revisions differ from expected for '%s'\n%s",
+ path, print_chrevs(revs, num_revs, revs_changed, spool));
+ }
+
+ /* Clear the per-iteration subpool. */
+ svn_pool_clear(spool);
+ }
+ }
+
+ /* Destroy the subpool. */
+ svn_pool_destroy(spool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+struct locations_info
+{
+ svn_revnum_t rev;
+ const char *path;
+};
+
+/* Check that LOCATIONS contain everything in INFO and nothing more. */
+static svn_error_t *
+check_locations_info(apr_hash_t *locations, const struct locations_info *info)
+{
+ unsigned int i;
+ for (i = 0; info->rev != 0; ++i, ++info)
+ {
+ const char *p = apr_hash_get(locations, &info->rev, sizeof(info->rev));
+ if (!p)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Missing path for revision %ld", info->rev);
+ if (strcmp(p, info->path) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Pth mismatch for rev %ld", info->rev);
+ }
+
+ if (apr_hash_count(locations) > i)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Returned locations contain too many elements.");
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that all locations in INFO exist in REPOS for PATH and PEG_REVISION.
+ */
+static svn_error_t *
+check_locations(svn_fs_t *fs, struct locations_info *info,
+ const char *path, svn_revnum_t peg_revision,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *a = apr_array_make(pool, 0, sizeof(svn_revnum_t));
+ apr_hash_t *h;
+ struct locations_info *iter;
+
+ for (iter = info; iter->rev != 0; ++iter)
+ APR_ARRAY_PUSH(a, svn_revnum_t) = iter->rev;
+
+ SVN_ERR(svn_repos_trace_node_locations(fs, &h, path, peg_revision, a,
+ NULL, NULL, pool));
+ SVN_ERR(check_locations_info(h, info));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+node_locations(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev;
+
+ /* Create the repository with a Greek tree. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-node-locations",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Move a file. Rev 2. */
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_copy(root, "/A/mu", txn_root, "/mu.new", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ {
+ struct locations_info info[] =
+ {
+ { 1, "/A/mu" },
+ { 2, "/mu.new" },
+ { 0 }
+ };
+
+ /* Test this twice, once with a leading slash, once without,
+ because we know that the "without" form has caused us trouble
+ in the past. */
+ SVN_ERR(check_locations(fs, info, "/mu.new", 2, pool));
+ SVN_ERR(check_locations(fs, info, "mu.new", 2, pool));
+ }
+ svn_pool_clear(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+node_locations2(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev = 0;
+
+ /* Create the repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-node-locations2",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1: Add a directory /foo */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/foo", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: Copy /foo to /bar, and add /bar/baz */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_copy(root, "/foo", txn_root, "/bar", subpool));
+ SVN_ERR(svn_fs_make_file(txn_root, "/bar/baz", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 3: Modify /bar/baz */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/bar/baz", "brrt", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 4: Modify /bar/baz again */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "/bar/baz", "bzzz", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Now, check locations. */
+ {
+ struct locations_info info[] =
+ {
+ { 3, "/bar/baz" },
+ { 2, "/bar/baz" },
+ { 0 }
+ };
+ SVN_ERR(check_locations(fs, info, "/bar/baz", youngest_rev, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Testing the reporter. */
+
+/* Functions for an editor that will catch removal of defunct locks. */
+
+/* The main editor baton. */
+typedef struct rmlocks_baton_t {
+ apr_hash_t *removed;
+ apr_pool_t *pool;
+} rmlocks_baton_t;
+
+/* The file baton. */
+typedef struct rmlocks_file_baton_t {
+ rmlocks_baton_t *main_baton;
+ const char *path;
+} rmlocks_file_baton_t;
+
+/* An svn_delta_editor_t function. */
+static svn_error_t *
+rmlocks_open_file(const char *path,
+ void *parent_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *file_pool,
+ void **file_baton)
+{
+ rmlocks_file_baton_t *fb = apr_palloc(file_pool, sizeof(*fb));
+ rmlocks_baton_t *b = parent_baton;
+
+ fb->main_baton = b;
+ fb->path = apr_pstrdup(b->pool, path);
+
+ *file_baton = fb;
+
+ return SVN_NO_ERROR;
+}
+
+/* An svn_delta_editor_t function. */
+static svn_error_t *
+rmlocks_change_prop(void *file_baton,
+ const char *name,
+ const svn_string_t *value,
+ apr_pool_t *pool)
+{
+ rmlocks_file_baton_t *fb = file_baton;
+
+ if (strcmp(name, SVN_PROP_ENTRY_LOCK_TOKEN) == 0)
+ {
+ if (value != NULL)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Value for lock-token property not NULL");
+
+ /* We only want it removed once. */
+ if (apr_hash_get(fb->main_baton->removed, fb->path,
+ APR_HASH_KEY_STRING) != NULL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Lock token for '%s' already removed",
+ fb->path);
+
+ /* Mark as removed. */
+ apr_hash_set(fb->main_baton->removed, fb->path, APR_HASH_KEY_STRING,
+ (void *)1);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* An svn_delta_editor_t function. */
+static svn_error_t *
+rmlocks_open_root(void *edit_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *dir_pool,
+ void **root_baton)
+{
+ *root_baton = edit_baton;
+ return SVN_NO_ERROR;
+}
+
+/* An svn_delta_editor_t function. */
+static svn_error_t *
+rmlocks_open_directory(const char *path,
+ void *parent_baton,
+ svn_revnum_t base_revision,
+ apr_pool_t *pool,
+ void **dir_baton)
+{
+ *dir_baton = parent_baton;
+ return SVN_NO_ERROR;
+}
+
+/* Create an svn_delta_editor/baton, storing them in EDITOR/EDIT_BATON,
+ that will store paths for which lock tokens were *REMOVED in REMOVED.
+ Allocate the editor and *REMOVED in POOL. */
+static svn_error_t *
+create_rmlocks_editor(svn_delta_editor_t **editor,
+ void **edit_baton,
+ apr_hash_t **removed,
+ apr_pool_t *pool)
+{
+ rmlocks_baton_t *baton = apr_palloc(pool, sizeof(*baton));
+
+ /* Create the editor. */
+ *editor = svn_delta_default_editor(pool);
+ (*editor)->open_root = rmlocks_open_root;
+ (*editor)->open_directory = rmlocks_open_directory;
+ (*editor)->open_file = rmlocks_open_file;
+ (*editor)->change_file_prop = rmlocks_change_prop;
+
+ /* Initialize the baton. */
+ baton->removed = apr_hash_make(pool);
+ baton->pool = pool;
+ *edit_baton = baton;
+
+ *removed = baton->removed;
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that HASH contains exactly the const char * entries for all entries
+ in the NULL-terminated array SPEC. */
+static svn_error_t *
+rmlocks_check(const char **spec, apr_hash_t *hash)
+{
+ apr_size_t n = 0;
+
+ for (; *spec; ++spec, ++n)
+ {
+ if (! apr_hash_get(hash, *spec, APR_HASH_KEY_STRING))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Lock token for '%s' should have been removed", *spec);
+ }
+
+ if (n < apr_hash_count(hash))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Lock token for one or more paths unexpectedly "
+ "removed");
+ return SVN_NO_ERROR;
+}
+
+/* Test that defunct locks are removed by the reporter. */
+static svn_error_t *
+rmlocks(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_revnum_t youngest_rev;
+ svn_delta_editor_t *editor;
+ void *edit_baton, *report_baton;
+ svn_lock_t *l1, *l2, *l3, *l4;
+ svn_fs_access_t *fs_access;
+ apr_hash_t *removed;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-rmlocks",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_fs_create_access(&fs_access, "user1", pool));
+ SVN_ERR(svn_fs_set_access(fs, fs_access));
+
+ /* Lock some files, break a lock, steal another and check that those get
+ removed. */
+ {
+ const char *expected [] = { "A/mu", "A/D/gamma", NULL };
+
+ SVN_ERR(svn_fs_lock(&l1, fs, "/iota", NULL, NULL, 0, 0, youngest_rev,
+ FALSE, subpool));
+ SVN_ERR(svn_fs_lock(&l2, fs, "/A/mu", NULL, NULL, 0, 0, youngest_rev,
+ FALSE, subpool));
+ SVN_ERR(svn_fs_lock(&l3, fs, "/A/D/gamma", NULL, NULL, 0, 0, youngest_rev,
+ FALSE, subpool));
+
+ /* Break l2. */
+ SVN_ERR(svn_fs_unlock(fs, "/A/mu", NULL, TRUE, subpool));
+
+ /* Steal l3 from ourselves. */
+ SVN_ERR(svn_fs_lock(&l4, fs, "/A/D/gamma", NULL, NULL, 0, 0, youngest_rev,
+ TRUE, subpool));
+
+ /* Create the editor. */
+ SVN_ERR(create_rmlocks_editor(&editor, &edit_baton, &removed, subpool));
+
+ /* Report what we have. */
+ SVN_ERR(svn_repos_begin_report3(&report_baton, 1, repos, "/", "", NULL,
+ FALSE, svn_depth_infinity, FALSE, FALSE,
+ editor, edit_baton, NULL, NULL, 1024,
+ subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "", 1,
+ svn_depth_infinity,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "iota", 1,
+ svn_depth_infinity,
+ FALSE, l1->token, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "A/mu", 1,
+ svn_depth_infinity,
+ FALSE, l2->token, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "A/D/gamma", 1,
+ svn_depth_infinity,
+ FALSE, l3->token, subpool));
+
+ /* End the report. */
+ SVN_ERR(svn_repos_finish_report(report_baton, pool));
+
+ /* And check that the edit did what we wanted. */
+ SVN_ERR(rmlocks_check(expected, removed));
+ }
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Helper for the authz test. Set *AUTHZ_P to a representation of
+ AUTHZ_CONTENTS, using POOL for temporary allocation. If DISK
+ is TRUE then write the contents to a temp file and use
+ svn_repos_authz_read() to get the data if FALSE write the
+ data to a buffered stream and use svn_repos_authz_parse(). */
+static svn_error_t *
+authz_get_handle(svn_authz_t **authz_p, const char *authz_contents,
+ svn_boolean_t disk, apr_pool_t *pool)
+{
+ if (disk)
+ {
+ const char *authz_file_path;
+
+ /* Create a temporary file. */
+ SVN_ERR_W(svn_io_write_unique(&authz_file_path, NULL,
+ authz_contents, strlen(authz_contents),
+ svn_io_file_del_on_pool_cleanup, pool),
+ "Writing temporary authz file");
+
+ /* Read the authz configuration back and start testing. */
+ SVN_ERR_W(svn_repos_authz_read(authz_p, authz_file_path, TRUE, pool),
+ "Opening test authz file");
+
+ /* Done with the file. */
+ SVN_ERR_W(svn_io_remove_file(authz_file_path, pool),
+ "Removing test authz file");
+ }
+ else
+ {
+ svn_stream_t *stream;
+
+ stream = svn_stream_buffered(pool);
+ SVN_ERR_W(svn_stream_puts(stream, authz_contents),
+ "Writing authz contents to stream");
+
+ SVN_ERR_W(svn_repos_authz_parse(authz_p, stream, NULL, pool),
+ "Parsing the authz contents");
+
+ SVN_ERR_W(svn_stream_close(stream),
+ "Closing the stream");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+struct check_access_tests {
+ const char *path;
+ const char *repo_name;
+ const char *user;
+ const svn_repos_authz_access_t required;
+ const svn_boolean_t expected;
+};
+
+/* Helper for the authz test. Runs a set of tests against AUTHZ_CFG
+ * as defined in TESTS. */
+static svn_error_t *
+authz_check_access(svn_authz_t *authz_cfg,
+ const struct check_access_tests *tests,
+ apr_pool_t *pool)
+{
+ int i;
+ svn_boolean_t access_granted;
+
+ /* Loop over the test array and test each case. */
+ for (i = 0; !(tests[i].path == NULL
+ && tests[i].required == svn_authz_none); i++)
+ {
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg,
+ tests[i].repo_name,
+ tests[i].path,
+ tests[i].user,
+ tests[i].required,
+ &access_granted, pool));
+
+ if (access_granted != tests[i].expected)
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Authz incorrectly %s %s%s access "
+ "to %s%s%s for user %s",
+ access_granted ?
+ "grants" : "denies",
+ tests[i].required
+ & svn_authz_recursive ?
+ "recursive " : "",
+ tests[i].required
+ & svn_authz_read ?
+ "read" : "write",
+ tests[i].repo_name ?
+ tests[i].repo_name : "",
+ tests[i].repo_name ?
+ ":" : "",
+ tests[i].path,
+ tests[i].user ?
+ tests[i].user : "-");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that authz is giving out the right authorizations. */
+static svn_error_t *
+authz(apr_pool_t *pool)
+{
+ const char *contents;
+ svn_authz_t *authz_cfg;
+ svn_error_t *err;
+ svn_boolean_t access_granted;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set[] = {
+ /* Test that read rules are correctly used. */
+ { "/A", "greek", NULL, svn_authz_read, TRUE },
+ { "/iota", "greek", NULL, svn_authz_read, FALSE },
+ /* Test that write rules are correctly used. */
+ { "/A", "greek", "plato", svn_authz_write, TRUE },
+ { "/A", "greek", NULL, svn_authz_write, FALSE },
+ /* Test that pan-repository rules are found and used. */
+ { "/A/B/lambda", "greek", "plato", svn_authz_read, TRUE },
+ { "/A/B/lambda", "greek", NULL, svn_authz_read, FALSE },
+ /* Test that authz uses parent path ACLs if no rule for the path
+ exists. */
+ { "/A/C", "greek", NULL, svn_authz_read, TRUE },
+ /* Test that recursive access requests take into account the rules
+ of subpaths. */
+ { "/A/D", "greek", "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/A/D", "greek", NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ /* Test global write access lookups. */
+ { NULL, "greek", "plato", svn_authz_read, TRUE },
+ { NULL, "greek", NULL, svn_authz_write, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* The test logic:
+ *
+ * 1. Perform various access tests on a set of authz rules. Each
+ * test has a known outcome and tests different aspects of authz,
+ * such as inheriting parent-path authz, pan-repository rules or
+ * recursive access. 'plato' is our friendly neighborhood user with
+ * more access rights than other anonymous philosophers.
+ *
+ * 2. Load an authz file containing a cyclic dependency in groups
+ * and another containing a reference to an undefined group. Verify
+ * that svn_repos_authz_read fails to load both and returns an
+ * "invalid configuration" error.
+ *
+ * 3. Regression test for a bug in how recursion is handled in
+ * authz. The bug was that paths not under the parent path
+ * requested were being considered during the determination of
+ * access rights (eg. a rule for /dir2 matched during a lookup for
+ * /dir), due to incomplete tests on path relations.
+ */
+
+ /* The authz rules for the phase 1 tests. */
+ contents =
+ "[greek:/A]" NL
+ "* = r" NL
+ "plato = rw" NL
+ "" NL
+ "[greek:/iota]" NL
+ "* =" NL
+ "" NL
+ "[/A/B/lambda]" NL
+ "plato = r" NL
+ "* =" NL
+ "" NL
+ "[greek:/A/D]" NL
+ "plato = r" NL
+ "* = r" NL
+ "" NL
+ "[greek:/A/D/G]" NL
+ "plato = r" NL
+ "* =" NL
+ "" NL
+ "[greek:/A/B/E/beta]" NL
+ "* =" NL
+ "" NL
+ "[/nowhere]" NL
+ "nobody = r" NL
+ "" NL;
+
+ /* Load the test authz rules. */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, subpool));
+
+ /* Loop over the test array and test each case. */
+ SVN_ERR(authz_check_access(authz_cfg, test_set, subpool));
+
+ /* Repeat the previous test on disk */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, TRUE, subpool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set, subpool));
+
+ /* The authz rules for the phase 2 tests, first case (cyclic
+ dependency). */
+ contents =
+ "[groups]" NL
+ "slaves = cooks,scribes,@gladiators" NL
+ "gladiators = equites,thraces,@slaves" NL
+ "" NL
+ "[greek:/A]" NL
+ "@slaves = r" NL;
+
+ /* Load the test authz rules and check that group cycles are
+ reported. */
+ err = authz_get_handle(&authz_cfg, contents, FALSE, subpool);
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* The authz rules for the phase 2 tests, second case (missing group
+ definition). */
+ contents =
+ "[greek:/A]" NL
+ "@senate = r" NL;
+
+ /* Check that references to undefined groups are reported. */
+ err = authz_get_handle(&authz_cfg, contents, FALSE, subpool);
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* The authz rules for the phase 3 tests */
+ contents =
+ "[/]" NL
+ "* = rw" NL
+ "" NL
+ "[greek:/dir2/secret]" NL
+ "* =" NL;
+
+ /* Load the test authz rules. */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, subpool));
+
+ /* Verify that the rule on /dir2/secret doesn't affect this
+ request */
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, "greek",
+ "/dir", NULL,
+ (svn_authz_read
+ | svn_authz_recursive),
+ &access_granted, subpool));
+ if (!access_granted)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Regression: incomplete ancestry test "
+ "for recursive access lookup.");
+
+ /* The authz rules for the phase 4 tests */
+ contents =
+ "[greek:/dir2//secret]" NL
+ "* =" NL;
+ SVN_TEST_ASSERT_ERROR(authz_get_handle(&authz_cfg, contents, FALSE, subpool),
+ SVN_ERR_AUTHZ_INVALID_CONFIG);
+
+ /* Verify that the rule on /dir2/secret doesn't affect this
+ request */
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, "greek",
+ "/dir", NULL,
+ (svn_authz_read
+ | svn_authz_recursive),
+ &access_granted, subpool));
+ if (!access_granted)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Regression: incomplete ancestry test "
+ "for recursive access lookup.");
+
+ /* That's a wrap! */
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+/* Test the supported authz wildcard variants. */
+static svn_error_t *
+test_authz_wildcards(apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+
+ /* Some non-trivially overlapping wildcard rules, convering all types
+ * of wildcards: "any", "any-var", "prefix", "postfix" and "complex".
+ *
+ * Note that the rules are not in 1:1 correspondence to that enumeration.
+ */
+ const char *contents =
+ "[:glob:/**/G]" NL
+ "* = r" NL
+ "" NL
+ "[:glob:/A/*/G]" NL
+ "* =" NL
+ "" NL
+ "[:glob:/A/**/*a*]" NL
+ "* = r" NL
+ "" NL
+ "[:glob:/**/*a]" NL
+ "* = rw" NL
+ "" NL
+ "[:glob:/A/**/g*]" NL
+ "* =" NL
+ "" NL
+ "[:glob:/**/lambda]" NL
+ "* = rw" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set[] = {
+ /* Test that read rules are correctly used. */
+ { "/", NULL, NULL, svn_authz_read, FALSE }, /* default */
+ { "/iota", NULL, NULL, svn_authz_write, TRUE }, /* rule 4 */
+ { "/A", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/mu", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/B", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/B/lambda", NULL, NULL, svn_authz_write, TRUE }, /* rule 6 */
+ { "/A/B/E", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/B/E/alpha", NULL, NULL, svn_authz_write, TRUE }, /* rule 4 */
+ { "/A/B/E/beta", NULL, NULL, svn_authz_write, TRUE }, /* rule 4 */
+ { "/A/B/F", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/C", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/gamma", NULL, NULL, svn_authz_read, FALSE }, /* rule 5 */
+ { "/A/D/G", NULL, NULL, svn_authz_read, FALSE }, /* rule 2 */
+ { "/A/D/G/pi", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/G/rho", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/G/tau", NULL, NULL, svn_authz_read, TRUE }, /* rule 3 */
+ { "/A/D/G/tau", NULL, NULL, svn_authz_write, FALSE }, /* rule 3 */
+ { "/A/D/H", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/H/chi", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/H/psi", NULL, NULL, svn_authz_read, FALSE }, /* inherited */
+ { "/A/D/H/omega", NULL, NULL, svn_authz_write, TRUE }, /* rule 4 */
+ /* Non-greek tree paths: */
+ { "/A/G", NULL, NULL, svn_authz_read, TRUE }, /* rule 1 */
+ { "/A/G", NULL, NULL, svn_authz_write, FALSE }, /* rule 1 */
+ { "/A/G/G", NULL, NULL, svn_authz_read, FALSE }, /* rule 2 */
+ { "/G", NULL, NULL, svn_authz_read, TRUE }, /* rule 1 */
+ { "/G", NULL, NULL, svn_authz_write, FALSE }, /* rule 1 */
+ { "/Y/G", NULL, NULL, svn_authz_read, TRUE }, /* rule 1 */
+ { "/Y/G", NULL, NULL, svn_authz_write, FALSE }, /* rule 1 */
+ { "/X/Z/G", NULL, NULL, svn_authz_read, TRUE }, /* rule 1 */
+ { "/X/Z/G", NULL, NULL, svn_authz_write, FALSE }, /* rule 1 */
+ /* Rule 5 prevents recursive access anywhere below /A. */
+ { "/", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/iota", NULL, NULL, svn_authz_read | svn_authz_recursive, TRUE },
+ { "/iota", NULL, NULL, svn_authz_write | svn_authz_recursive, FALSE },
+ { "/A", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/mu", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B/lambda", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B/E", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B/E/alpha", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B/E/beta", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/B/F", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/C", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/gamma", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/G", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/G/pi", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/G/rho", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/G/tau", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/H", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/H/chi", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/H/psi", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ { "/A/D/H/omega", NULL, NULL, svn_authz_read | svn_authz_recursive, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Load the test authz rules. */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, pool));
+
+ /* Loop over the test array and test each case. */
+ SVN_ERR(authz_check_access(authz_cfg, test_set, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test the authz performance with wildcard rules. */
+static svn_error_t *
+test_authz_wildcard_performance(apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+ svn_boolean_t access_granted;
+ int i, k;
+ apr_time_t start, end;
+
+ /* Some non-trivially overlapping wildcard rules, convering all types
+ * of wildcards: "any", "any-var", "prefix", "postfix" and "complex".
+ */
+ const char *contents =
+ "[:glob:greek:/A/*/G]" NL
+ "* =" NL
+ "" NL
+ "[:glob:greek:/A/**/*a*]" NL
+ "* = r" NL
+ "" NL
+ "[:glob:greek:/**/*a]" NL
+ "* = rw" NL
+ "" NL
+ "[:glob:greek:/A/**/g*]" NL
+ "* =" NL
+ "" NL
+ "[:glob:greek:/**/lambda]" NL
+ "* = rw" NL;
+
+ /* Load the test authz rules. */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, pool));
+
+ start = apr_time_now();
+ for (k = 0; k < 100000; ++k)
+ for (i = 1; i < 4; ++i)
+ {
+ const char **path;
+ const char *paths[] =
+ { "/iota",
+ "/A",
+ "/A/mu",
+ "/A/B",
+ "/A/B/lambda",
+ "/A/B/E",
+ "/A/B/E/alpha",
+ "/A/B/E/beta",
+ "/A/B/F",
+ "/A/C",
+ "/A/D",
+ "/A/D/gamma",
+ "/A/D/G",
+ "/A/D/G/pi",
+ "/A/D/G/rho",
+ "/A/D/G/tau",
+ "/A/D/H",
+ "/A/D/H/chi",
+ "/A/D/H/psi",
+ "/A/D/H/omega",
+ NULL
+ };
+
+ for (path = paths; *path; ++path)
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, "greek",
+ *path, NULL, i,
+ &access_granted, pool));
+ }
+
+ end = apr_time_now();
+ printf("%"APR_TIME_T_FMT" musecs\n", end - start);
+ printf("%"APR_TIME_T_FMT" checks / sec\n",
+ (k * (i - 1) * 20 * 1000000l) / (end - start));
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that the latest definition wins, regardless of whether the ":glob:"
+ * prefix has been given. */
+static svn_error_t *
+test_authz_prefixes(apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ int i, combi;
+
+ /* Set all rights at some folder and replace them again. Make sure to
+ * cover the "/" b/c that already has an implicit rule, so we* overwrite
+ * it twice. The first 2 string placeholders in the rules are for the
+ * repository name and the optional glob support marker. */
+ const char *contents_format =
+ "[%s%s%s]" NL
+ "* = r" NL
+ "plato = rw" NL
+ "" NL
+ "[%s%s%s]" NL
+ "* =" NL
+ "plato = r" NL;
+
+ /* The paths on which to apply this test. */
+ enum { PATH_COUNT = 2 };
+ const char *test_paths[PATH_COUNT] = { "/", "/A" };
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set1[] = {
+ /* Test that read rules are correctly used. */
+ { "", "greek", NULL, svn_authz_read, FALSE },
+ /* Test that write rules are correctly used. */
+ { "", "greek", "plato", svn_authz_read, TRUE },
+ { "", "greek", "plato", svn_authz_write, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* To be used when global rules are specified after per-repos rules.
+ * In that case, the global rules still win. */
+ struct check_access_tests test_set2[] = {
+ /* Test that read rules are correctly used. */
+ { "", "greek", NULL, svn_authz_read, TRUE },
+ { "", "greek", NULL, svn_authz_write, FALSE },
+ /* Test that write rules are correctly used. */
+ { "", "greek", "plato", svn_authz_read, TRUE },
+ { "", "greek", "plato", svn_authz_write, TRUE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* There is a total of 16 combinations of authz content. */
+ for (combi = 0; combi < 16; ++combi)
+ {
+ const char *contents;
+ const char *glob1 = (combi & 1) ? ":glob:" : "";
+ const char *glob2 = (combi & 2) ? ":glob:" : "";
+ const char *repo1 = (combi & 4) ? "greek:" : "";
+ const char *repo2 = (combi & 4) ? "" : "greek:";
+ const char *test_path = test_paths[combi / 8];
+ struct check_access_tests *test_set = (combi & 4) ? test_set2 : test_set1;
+
+ /* Create and parse the authz rules. */
+ svn_pool_clear(iterpool);
+ contents = apr_psprintf(iterpool, contents_format,
+ glob1, repo1, test_path,
+ glob2, repo2, test_path);
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, iterpool));
+
+ /* iterate over all test paths */
+ for (i = combi / 8; i < PATH_COUNT; ++i)
+ {
+ /* Set the path for all test cases to the current test path. */
+ struct check_access_tests *test;
+ for (test = test_set; test->path != NULL; ++test)
+ test->path = test_paths[i];
+
+ /* Loop over the test array and test each case. */
+ SVN_ERR(authz_check_access(authz_cfg, test_set, iterpool));
+ }
+ }
+
+ /* That's a wrap! */
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_authz_recursive_override(apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+
+ /* Set all rights at some folder and replace them again. Make sure to
+ * cover the "/" b/c that already has an implicit rule, so we* overwrite
+ * it twice. The first 2 string placeholders in the rules are for the
+ * repository name and the optional glob support marker. */
+ const char *contents =
+ "[:glob:/A/B]" NL
+ "plato = rw" NL
+ "" NL
+ "[:glob:/A/**]" NL
+ "plato = r" NL
+ "" NL
+ "[:glob:/B/C]" NL
+ "plato =" NL
+ "" NL
+ "[:glob:/B/**]" NL
+ "plato = rw" NL
+ "" NL
+ "[:glob:/C/D]" NL
+ "plato = rw" NL
+ "" NL
+ "[:glob:/C/**/E]" NL
+ "plato = r" NL
+ "" NL
+ "[:glob:/D/E]" NL
+ "plato = r" NL
+ "" NL
+ "[:glob:/D/**/F]" NL
+ "plato = rw" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set[] = {
+ /* The root shall not be affected -> defaults to "no access". */
+ { "/", NULL, "plato", svn_authz_read, FALSE },
+ /* Recursive restriction of rights shall work. */
+ { "/A", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/A", NULL, "plato", svn_authz_write | svn_authz_recursive, FALSE },
+ /* Recursive extension of rights shall work. */
+ { "/B", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/B", NULL, "plato", svn_authz_write | svn_authz_recursive, TRUE },
+ /* Partial replacements shall not result in recursive rights. */
+ { "/C", NULL, "plato", svn_authz_read | svn_authz_recursive, FALSE },
+ { "/C/D", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/C/D", NULL, "plato", svn_authz_write | svn_authz_recursive, FALSE },
+ { "/D", NULL, "plato", svn_authz_read | svn_authz_recursive, FALSE },
+ { "/D/E", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/D/E", NULL, "plato", svn_authz_write | svn_authz_recursive, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, pool));
+
+ /* Loop over the test array and test each case. */
+ SVN_ERR(authz_check_access(authz_cfg, test_set, pool));
+
+ /* That's a wrap! */
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_authz_pattern_tests(apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+
+ /* Rules will be considered for recursive access checks irrespective of
+ * whether the respective paths actually do exist. */
+ const char *contents =
+ "[:glob:/**/Yeti]" NL
+ "plato = r" NL
+ "" NL
+ "[/]" NL
+ "plato = r" NL
+ "" NL
+ "[/trunk]" NL
+ "plato = rw" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set[] = {
+ /* We have no recursive write access anywhere. */
+ { "/", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/", NULL, "plato", svn_authz_write | svn_authz_recursive, FALSE },
+ { "/trunk", NULL, "plato", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/trunk", NULL, "plato", svn_authz_write | svn_authz_recursive, FALSE },
+
+ /* We do have ordinary write access to anything under /trunk that is
+ * not a Yeti. */
+ { "/trunk", NULL, "plato", svn_authz_write, TRUE },
+ { "/trunk/A/B/C", NULL, "plato", svn_authz_write, TRUE },
+
+ /* We don't have write access to Yetis. */
+ { "/trunk/A/B/C/Yeti", NULL, "plato", svn_authz_write, FALSE },
+ { "/trunk/Yeti", NULL, "plato", svn_authz_write, FALSE },
+ { "/Yeti", NULL, "plato", svn_authz_write, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Global override via "**" and selective override for a specific path. */
+ const char *contents2 =
+ "[:glob:/X]" NL
+ "user1 =" NL
+ "" NL
+ "[:glob:/X/**]" NL
+ "user1 = rw" NL
+ "user2 = rw" NL
+ "" NL
+ "[:glob:/X/Y/Z]" NL
+ "user2 =" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set2[] = {
+ /* No access at the root*/
+ { "/", NULL, "user1", svn_authz_read, FALSE },
+ { "/", NULL, "user2", svn_authz_read, FALSE },
+
+ /* User 1 has recursive write access anywhere. */
+ { "/X", NULL, "user1", svn_authz_write | svn_authz_recursive, TRUE },
+ { "/X/Y", NULL, "user1", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/X/Y/Z", NULL, "user1", svn_authz_read | svn_authz_recursive, TRUE },
+
+ /* User 2 only has recursive read access to X/Y/Z. */
+ { "/X", NULL, "user1", svn_authz_read | svn_authz_recursive, TRUE },
+ { "/X", NULL, "user2", svn_authz_write | svn_authz_recursive, FALSE },
+ { "/X/Y", NULL, "user2", svn_authz_write | svn_authz_recursive, FALSE },
+ { "/X/Y/Z", NULL, "user2", svn_authz_write | svn_authz_recursive, FALSE },
+
+ /* However, user2 has ordinary write access X and recursive write access
+ * to anything not in X/Y/Z. */
+ { "/X", NULL, "user2", svn_authz_write, TRUE },
+ { "/X/A", NULL, "user2", svn_authz_write | svn_authz_recursive, TRUE },
+ { "/X/Y/A", NULL, "user2", svn_authz_write | svn_authz_recursive, TRUE },
+
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Global patterns vs. global path rules. */
+ const char *contents3 =
+ "[groups]" NL
+ "Team1 = user1" NL
+ "Team2 = user1, user2" NL
+ "" NL
+ "[/]" NL
+ "* =" NL
+ "" NL
+ "[:glob:Repo1:/**/folder*]" NL
+ "@Team1 = rw" NL
+ "" NL
+ "[Repo2:/]" NL
+ "@Team2 = r" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set3[] = {
+ /* No access at the root of Repo1 (inherited from global settings) */
+ { "/", "Repo1", "user1", svn_authz_read, FALSE },
+ { "/", "Repo1", "user2", svn_authz_read, FALSE },
+
+ /* r/o access for both users at the root of Repo2 */
+ { "/", "Repo2", "user1", svn_authz_read, TRUE },
+ { "/", "Repo2", "user2", svn_authz_read, TRUE },
+ { "/", "Repo2", "user1", svn_authz_write, FALSE },
+ { "/", "Repo2", "user2", svn_authz_write, FALSE },
+
+ /* user1 has recursive write access (b/c there are no further rules
+ * restricting the access once granted at the parent) wherever there is
+ * a "folder..." in the path, while user2 has no access at all. */
+ { "/folder_1", "Repo1", "user1",
+ svn_authz_write | svn_authz_recursive, TRUE },
+ { "/folder_1", "Repo1", "user2", svn_authz_read, FALSE },
+ { "/1_folder", "Repo1", "user1", svn_authz_read, FALSE },
+ { "/foo/bar/folder_2/random", "Repo1", "user1",
+ svn_authz_write | svn_authz_recursive, TRUE },
+ { "/foo/bar/folder_2/random", "Repo1", "user2", svn_authz_read, FALSE },
+ { "/foo/bar/2_folder/random", "Repo1", "user1", svn_authz_read, FALSE },
+ { "/foo/bar/folder", "Repo1", "user1",
+ svn_authz_write | svn_authz_recursive, TRUE },
+ { "/foo/bar/folder", "Repo1", "user2", svn_authz_read, FALSE },
+
+ /* Doesn't quite match the pattern: */
+ { "/foo/bar/folde", "Repo1", "user1", svn_authz_read, FALSE },
+ { "/foo/bar/folde", "Repo1", "user2", svn_authz_read, FALSE },
+
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Illustrate the difference between "matching" rule and "applying" rule.
+ * "*" only _matches_ a single level and will _apply_ to sub-paths only
+ * if no other rule _applies_. The "**" rule applies to all paths in
+ * trunk and will only be eclipsed for members of team1 and then only for
+ * the first sub-level. */
+ const char *contents4 =
+ "[groups]" NL
+ "team1 = user1, user3" NL
+ "team2 = user2, user3" NL
+ "" NL
+ "[:glob:Repo1:/trunk/**]" NL
+ "@team2 = rw" NL
+ "" NL
+ "[:glob:Repo1:/trunk/*]" NL
+ "@team1 = r" NL;
+
+ /* Definition of the paths to test and expected replies for each. */
+ struct check_access_tests test_set4[] = {
+ /* Team2 has r/w access to /trunk */
+ { "/trunk", "Repo1", "user1", svn_authz_read, FALSE },
+ { "/trunk", "Repo1", "user2", svn_authz_write, TRUE },
+ { "/trunk", "Repo1", "user3", svn_authz_write, TRUE },
+
+ /* At the first sub-level, team1 has only read access;
+ * the remainder of team2 has write access. */
+ { "/trunk/A", "Repo1", "user1", svn_authz_read, TRUE },
+ { "/trunk/A", "Repo1", "user3", svn_authz_read, TRUE },
+ { "/trunk/A", "Repo1", "user1", svn_authz_write, FALSE },
+ { "/trunk/A", "Repo1", "user2", svn_authz_write, TRUE },
+ { "/trunk/A", "Repo1", "user3", svn_authz_write, FALSE },
+
+ /* At the second sub-level, team2 has full write access;
+ * the remainder of team1 has still r/o access. */
+ { "/trunk/A/B", "Repo1", "user2",
+ svn_authz_write | svn_authz_recursive, TRUE },
+ { "/trunk/A/B", "Repo1", "user3",
+ svn_authz_write | svn_authz_recursive, TRUE },
+ { "/trunk/A/B", "Repo1", "user1", svn_authz_read, TRUE },
+ { "/trunk/A/B", "Repo1", "user1", svn_authz_write, FALSE },
+
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Verify that the rules are applies as expected. */
+ SVN_ERR(authz_get_handle(&authz_cfg, contents, FALSE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set, pool));
+
+ SVN_ERR(authz_get_handle(&authz_cfg, contents2, FALSE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set2, pool));
+
+ SVN_ERR(authz_get_handle(&authz_cfg, contents3, FALSE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set3, pool));
+
+ SVN_ERR(authz_get_handle(&authz_cfg, contents4, FALSE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set4, pool));
+
+ /* That's a wrap! */
+ return SVN_NO_ERROR;
+}
+
+
+/* Test in-repo authz paths */
+static svn_error_t *
+in_repo_authz(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev;
+ svn_authz_t *authz_cfg;
+ const char *authz_contents;
+ const char *repos_root;
+ const char *repos_url;
+ const char *authz_url;
+ const char *noent_authz_url;
+ svn_error_t *err;
+ struct check_access_tests test_set[] = {
+ /* reads */
+ { "/A", NULL, NULL, svn_authz_read, FALSE },
+ { "/A", NULL, "plato", svn_authz_read, TRUE },
+ { "/A", NULL, "socrates", svn_authz_read, TRUE },
+ /* writes */
+ { "/A", NULL, NULL, svn_authz_write, FALSE },
+ { "/A", NULL, "socrates", svn_authz_write, FALSE },
+ { "/A", NULL, "plato", svn_authz_write, TRUE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Test plan:
+ * Create an authz file and put it in the repository.
+ * Verify it can be read with an relative URL.
+ * Verify it can be read with an absolute URL.
+ * Verify non-existent path does not error out when must_exist is FALSE.
+ * Verify non-existent path does error out when must_exist is TRUE.
+ * Verify that an http:// URL produces an error.
+ * Verify that an svn:// URL produces an error.
+ */
+
+ /* What we'll put in the authz file, it's simple since we're not testing
+ * the parsing, just that we got what we expected. */
+ authz_contents =
+ "" NL
+ "" NL
+ "[/]" NL
+ "plato = rw" NL
+ "socrates = r";
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-in-repo-authz",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Commit the authz file to the repo. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "authz", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "authz", authz_contents,
+ pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ repos_root = svn_repos_path(repos, pool);
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&repos_url, repos_root, pool));
+ authz_url = svn_path_url_add_component2(repos_url, "authz", pool);
+ noent_authz_url = svn_path_url_add_component2(repos_url, "A/authz", pool);
+
+ /* absolute file URL. */
+ SVN_ERR(svn_repos_authz_read2(&authz_cfg, authz_url, NULL, TRUE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set, pool));
+
+ /* Non-existent path in the repo with must_exist set to FALSE */
+ SVN_ERR(svn_repos_authz_read2(&authz_cfg, noent_authz_url, NULL,
+ FALSE, pool));
+
+ /* Non-existent path in the repo with must_exist set to TRUE */
+ err = svn_repos_authz_read2(&authz_cfg, noent_authz_url, NULL, TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_ILLEGAL_TARGET)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_ILLEGAL_TARGET",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* http:// URL which is unsupported */
+ err = svn_repos_authz_read2(&authz_cfg, "http://example.com/repo/authz",
+ NULL, TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_RA_ILLEGAL_URL",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* svn:// URL which is unsupported */
+ err = svn_repos_authz_read2(&authz_cfg, "svn://example.com/repo/authz",
+ NULL, TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_RA_ILLEGAL_URL",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test in-repo authz with global groups. */
+static svn_error_t *
+in_repo_groups_authz(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev;
+ svn_authz_t *authz_cfg;
+ const char *groups_contents;
+ const char *authz_contents;
+ const char *repos_root;
+ const char *repos_url;
+ const char *groups_url;
+ const char *noent_groups_url;
+ const char *authz_url;
+ const char *empty_authz_url;
+ const char *noent_authz_url;
+ svn_error_t *err;
+ struct check_access_tests test_set[] = {
+ /* reads */
+ { "/A", NULL, NULL, svn_authz_read, FALSE },
+ { "/A", NULL, "plato", svn_authz_read, TRUE },
+ { "/A", NULL, "socrates", svn_authz_read, TRUE },
+ { "/A", NULL, "solon", svn_authz_read, TRUE },
+ { "/A", NULL, "ephialtes", svn_authz_read, TRUE },
+ /* writes */
+ { "/A", NULL, NULL, svn_authz_write, FALSE },
+ { "/A", NULL, "plato", svn_authz_write, FALSE },
+ { "/A", NULL, "socrates", svn_authz_write, FALSE },
+ { "/A", NULL, "solon", svn_authz_write, TRUE },
+ { "/A", NULL, "ephialtes", svn_authz_write, TRUE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Test plan:
+ * 1. Create an authz file, a global groups file and an empty authz file,
+ * put all these files in the repository. The empty authz file is
+ * required to perform the non-existent path checks (4-7) --
+ * otherwise we would get the authz validation error due to undefined
+ * groups.
+ * 2. Verify that the groups file can be read with an relative URL.
+ * 3. Verify that the groups file can be read with an absolute URL.
+ * 4. Verify that non-existent groups file path does not error out when
+ * must_exist is FALSE.
+ * 5. Same as (4), but when both authz and groups file paths do
+ * not exist.
+ * 6. Verify that non-existent path for the groups file does error out when
+ * must_exist is TRUE.
+ * 7. Verify that an http:// URL produces an error.
+ * 8. Verify that an svn:// URL produces an error.
+ */
+
+ /* What we'll put in the authz and groups files, it's simple since
+ * we're not testing the parsing, just that we got what we expected. */
+
+ groups_contents =
+ "[groups]" NL
+ "philosophers = plato, socrates" NL
+ "senate = solon, ephialtes" NL
+ "" NL;
+
+ authz_contents =
+ "[/]" NL
+ "@senate = rw" NL
+ "@philosophers = r" NL
+ "" NL;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos,
+ "test-repo-in-repo-global-groups-authz",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Commit the authz, empty authz and groups files to the repo. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "groups", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "authz", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "empty-authz", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "groups",
+ groups_contents, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "authz",
+ authz_contents, pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "empty-authz", "", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Calculate URLs */
+ repos_root = svn_repos_path(repos, pool);
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&repos_url, repos_root, pool));
+ authz_url = svn_path_url_add_component2(repos_url, "authz", pool);
+ empty_authz_url = svn_path_url_add_component2(repos_url, "empty-authz", pool);
+ noent_authz_url = svn_path_url_add_component2(repos_url, "A/authz", pool);
+ groups_url = svn_path_url_add_component2(repos_url, "groups", pool);
+ noent_groups_url = svn_path_url_add_component2(repos_url, "A/groups", pool);
+
+
+ /* absolute file URLs. */
+ SVN_ERR(svn_repos_authz_read2(&authz_cfg, authz_url, groups_url, TRUE, pool));
+ SVN_ERR(authz_check_access(authz_cfg, test_set, pool));
+
+ /* Non-existent path for the groups file with must_exist
+ * set to TRUE */
+ SVN_ERR(svn_repos_authz_read2(&authz_cfg, empty_authz_url, noent_groups_url,
+ FALSE, pool));
+
+ /* Non-existent paths for both the authz and the groups files
+ * with must_exist set to TRUE */
+ SVN_ERR(svn_repos_authz_read2(&authz_cfg, noent_authz_url, noent_groups_url,
+ FALSE, pool));
+
+ /* Non-existent path for the groups file with must_exist
+ * set to TRUE */
+ err = svn_repos_authz_read2(&authz_cfg, empty_authz_url, noent_groups_url,
+ TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_ILLEGAL_TARGET)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_ILLEGAL_TARGET",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* http:// URL which is unsupported */
+ err = svn_repos_authz_read2(&authz_cfg, empty_authz_url,
+ "http://example.com/repo/groups",
+ TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_RA_ILLEGAL_URL",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* svn:// URL which is unsupported */
+ err = svn_repos_authz_read2(&authz_cfg, empty_authz_url,
+ "http://example.com/repo/groups",
+ TRUE, pool);
+ if (!err || err->apr_err != SVN_ERR_RA_ILLEGAL_URL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_RA_ILLEGAL_URL",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Helper for the groups_authz test. Set *AUTHZ_P to a representation of
+ AUTHZ_CONTENTS in conjunction with GROUPS_CONTENTS, using POOL for
+ temporary allocation. If DISK is TRUE then write the contents to
+ temporary files and use svn_repos_authz_read2() to get the data if FALSE
+ write the data to a buffered stream and use svn_repos_authz_parse(). */
+static svn_error_t *
+authz_groups_get_handle(svn_authz_t **authz_p,
+ const char *authz_contents,
+ const char *groups_contents,
+ svn_boolean_t disk,
+ apr_pool_t *pool)
+{
+ if (disk)
+ {
+ const char *authz_file_path;
+ const char *groups_file_path;
+
+ /* Create temporary files. */
+ SVN_ERR_W(svn_io_write_unique(&authz_file_path, NULL,
+ authz_contents,
+ strlen(authz_contents),
+ svn_io_file_del_on_pool_cleanup, pool),
+ "Writing temporary authz file");
+ SVN_ERR_W(svn_io_write_unique(&groups_file_path, NULL,
+ groups_contents,
+ strlen(groups_contents),
+ svn_io_file_del_on_pool_cleanup, pool),
+ "Writing temporary groups file");
+
+ /* Read the authz configuration back and start testing. */
+ SVN_ERR_W(svn_repos_authz_read2(authz_p, authz_file_path,
+ groups_file_path, TRUE, pool),
+ "Opening test authz and groups files");
+
+ /* Done with the files. */
+ SVN_ERR_W(svn_io_remove_file(authz_file_path, pool),
+ "Removing test authz file");
+ SVN_ERR_W(svn_io_remove_file(groups_file_path, pool),
+ "Removing test groups file");
+ }
+ else
+ {
+ svn_stream_t *stream;
+ svn_stream_t *groups_stream;
+
+ /* Create the streams. */
+ stream = svn_stream_buffered(pool);
+ groups_stream = svn_stream_buffered(pool);
+
+ SVN_ERR_W(svn_stream_puts(stream, authz_contents),
+ "Writing authz contents to stream");
+ SVN_ERR_W(svn_stream_puts(groups_stream, groups_contents),
+ "Writing groups contents to stream");
+
+ /* Read the authz configuration from the streams and start testing. */
+ SVN_ERR_W(svn_repos_authz_parse(authz_p, stream, groups_stream, pool),
+ "Parsing the authz and groups contents");
+
+ /* Done with the streams. */
+ SVN_ERR_W(svn_stream_close(stream),
+ "Closing the authz stream");
+ SVN_ERR_W(svn_stream_close(groups_stream),
+ "Closing the groups stream");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test authz with global groups. */
+static svn_error_t *
+groups_authz(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_authz_t *authz_cfg;
+ const char *authz_contents;
+ const char *groups_contents;
+ svn_error_t *err;
+
+ struct check_access_tests test_set1[] = {
+ /* reads */
+ { "/A", "greek", NULL, svn_authz_read, FALSE },
+ { "/A", "greek", "plato", svn_authz_read, TRUE },
+ { "/A", "greek", "demetrius", svn_authz_read, TRUE },
+ { "/A", "greek", "galenos", svn_authz_read, TRUE },
+ { "/A", "greek", "pamphilos", svn_authz_read, FALSE },
+ /* writes */
+ { "/A", "greek", NULL, svn_authz_write, FALSE },
+ { "/A", "greek", "plato", svn_authz_write, TRUE },
+ { "/A", "greek", "demetrius", svn_authz_write, FALSE },
+ { "/A", "greek", "galenos", svn_authz_write, FALSE },
+ { "/A", "greek", "pamphilos", svn_authz_write, FALSE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ struct check_access_tests test_set2[] = {
+ /* reads */
+ { "/A", "greek", NULL, svn_authz_read, FALSE },
+ { "/A", "greek", "socrates", svn_authz_read, FALSE },
+ { "/B", "greek", NULL, svn_authz_read, FALSE},
+ { "/B", "greek", "socrates", svn_authz_read, TRUE },
+ /* writes */
+ { "/A", "greek", NULL, svn_authz_write, FALSE },
+ { "/A", "greek", "socrates", svn_authz_write, FALSE },
+ { "/B", "greek", NULL, svn_authz_write, FALSE},
+ { "/B", "greek", "socrates", svn_authz_write, TRUE },
+ /* Sentinel */
+ { NULL, NULL, NULL, svn_authz_none, FALSE }
+ };
+
+ /* Test plan:
+ * 1. Ensure that a simple setup with global groups and access rights in
+ * two separate files works as expected.
+ * 2. Verify that access rights written in the global groups file are
+ * discarded and affect nothing in authorization terms.
+ * 3. Verify that local groups in the authz file are prohibited in
+ * conjunction with global groups (and that a configuration error is
+ * reported in this scenario).
+ * 4. Ensure that group cycles in the global groups file are reported.
+ *
+ * All checks are performed twice -- for the configurations stored on disk
+ * and in memory. See authz_groups_get_handle.
+ */
+
+ groups_contents =
+ "[groups]" NL
+ "slaves = pamphilos,@gladiators" NL
+ "gladiators = demetrius,galenos" NL
+ "philosophers = plato" NL
+ "" NL;
+
+ authz_contents =
+ "[greek:/A]" NL
+ "@slaves = " NL
+ "@gladiators = r" NL
+ "@philosophers = rw" NL
+ "" NL;
+
+ SVN_ERR(authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, TRUE, pool));
+
+ SVN_ERR(authz_check_access(authz_cfg, test_set1, pool));
+
+ SVN_ERR(authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, FALSE, pool));
+
+ SVN_ERR(authz_check_access(authz_cfg, test_set1, pool));
+
+ /* Access rights in the global groups file are forbidden. */
+ groups_contents =
+ "[groups]" NL
+ "philosophers = socrates" NL
+ "" NL
+ "[greek:/A]" NL
+ "@philosophers = rw" NL
+ "" NL;
+
+ authz_contents =
+ "[greek:/B]" NL
+ "@philosophers = rw" NL
+ "" NL;
+
+ SVN_TEST_ASSERT_ERROR(
+ authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, TRUE, pool),
+ SVN_ERR_AUTHZ_INVALID_CONFIG);
+ SVN_TEST_ASSERT_ERROR(
+ authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, FALSE, pool),
+ SVN_ERR_AUTHZ_INVALID_CONFIG);
+
+ groups_contents =
+ "[groups]" NL
+ "philosophers = socrates" NL
+ "" NL;
+ SVN_ERR(authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, TRUE, pool));
+
+ SVN_ERR(authz_check_access(authz_cfg, test_set2, pool));
+
+ SVN_ERR(authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, FALSE, pool));
+
+ SVN_ERR(authz_check_access(authz_cfg, test_set2, pool));
+
+ /* Local groups cannot be used in conjunction with global groups. */
+ groups_contents =
+ "[groups]" NL
+ "slaves = maximus" NL
+ "" NL;
+
+ authz_contents =
+ "[greek:/A]" NL
+ "@slaves = " NL
+ "@kings = rw" NL
+ "" NL
+ "[groups]" NL
+ /* That's an epic story of the slave who tried to become a king. */
+ "kings = maximus" NL
+ "" NL;
+
+ err = authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, TRUE, pool);
+
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ err = authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, FALSE, pool);
+
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ /* Ensure that group cycles are reported. */
+ groups_contents =
+ "[groups]" NL
+ "slaves = cooks,scribes,@gladiators" NL
+ "gladiators = equites,thraces,@slaves" NL
+ "" NL;
+
+ authz_contents =
+ "[greek:/A]" NL
+ "@slaves = r" NL
+ "" NL;
+
+ err = authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, TRUE, pool);
+
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ err = authz_groups_get_handle(&authz_cfg, authz_contents,
+ groups_contents, FALSE, pool);
+
+ if (!err || err->apr_err != SVN_ERR_AUTHZ_INVALID_CONFIG)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Got %s error instead of expected "
+ "SVN_ERR_AUTHZ_INVALID_CONFIG",
+ err ? "unexpected" : "no");
+ svn_error_clear(err);
+
+ return SVN_NO_ERROR;
+}
+
+/* Callback for the commit editor tests that relays requests to
+ authz. */
+static svn_error_t *
+commit_authz_cb(svn_repos_authz_access_t required,
+ svn_boolean_t *allowed,
+ svn_fs_root_t *root,
+ const char *path,
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_authz_t *authz_file = baton;
+
+ return svn_repos_authz_check_access(authz_file, "test", path,
+ "plato", required, allowed,
+ pool);
+}
+
+
+
+enum action_t {
+ A_DELETE,
+ A_ADD_FILE,
+ A_ADD_DIR,
+ A_CHANGE_FILE_PROP
+};
+struct authz_path_action_t
+{
+ enum action_t action;
+ const char *path;
+ svn_boolean_t authz_error_expected;
+ const char *copyfrom_path;
+};
+
+/* Return the appropriate dir baton for the parent of PATH in *DIR_BATON,
+ allocated in POOL. */
+static svn_error_t *
+get_dir_baton(void **dir_baton,
+ const char *path,
+ const svn_delta_editor_t *editor,
+ void *root_baton,
+ apr_pool_t *pool)
+{
+ int i;
+ apr_array_header_t *path_bits = svn_path_decompose(path, pool);
+ const char *path_so_far = "";
+
+ *dir_baton = root_baton;
+ for (i = 0; i < (path_bits->nelts - 1); i++)
+ {
+ const char *path_bit = APR_ARRAY_IDX(path_bits, i, const char *);
+ path_so_far = svn_path_join(path_so_far, path_bit, pool);
+ SVN_ERR(editor->open_directory(path_so_far, *dir_baton,
+ SVN_INVALID_REVNUM, pool, dir_baton));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Return the appropriate file baton for PATH in *FILE_BATON, allocated in
+ POOL. */
+static svn_error_t *
+get_file_baton(void **file_baton,
+ const char *path,
+ const svn_delta_editor_t *editor,
+ void *root_baton,
+ apr_pool_t *pool)
+{
+ void *dir_baton;
+
+ SVN_ERR(get_dir_baton(&dir_baton, path, editor, root_baton, pool));
+
+ SVN_ERR(editor->open_file(path, dir_baton, SVN_INVALID_REVNUM, pool,
+ file_baton));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_authz(svn_repos_t *repos,
+ struct authz_path_action_t *path_action,
+ svn_authz_t *authz_file,
+ svn_revnum_t youngest_rev,
+ apr_pool_t *scratch_pool)
+{
+ void *edit_baton;
+ void *root_baton;
+ void *dir_baton;
+ void *file_baton;
+ void *out_baton;
+ const svn_delta_editor_t *editor;
+ svn_error_t *err;
+ svn_error_t *err2;
+
+ /* Create a new commit editor in which we're going to play with
+ authz */
+ SVN_ERR(svn_repos_get_commit_editor4(&editor, &edit_baton, repos,
+ NULL, "file://test", "/",
+ "plato", "test commit", NULL,
+ NULL, commit_authz_cb, authz_file,
+ scratch_pool));
+
+ /* Start fiddling. First get the root, which is readonly. */
+ SVN_ERR(editor->open_root(edit_baton, 1, scratch_pool, &root_baton));
+
+ /* Fetch the appropriate baton for our action. This may involve opening
+ intermediate batons, but we only care about the final one for the
+ cooresponding action. */
+ if (path_action->action == A_CHANGE_FILE_PROP)
+ SVN_ERR(get_file_baton(&file_baton, path_action->path, editor, root_baton,
+ scratch_pool));
+ else
+ SVN_ERR(get_dir_baton(&dir_baton, path_action->path, editor, root_baton,
+ scratch_pool));
+
+ /* Test the appropriate action. */
+ switch (path_action->action)
+ {
+ case A_DELETE:
+ err = editor->delete_entry(path_action->path, SVN_INVALID_REVNUM,
+ dir_baton, scratch_pool);
+ break;
+
+ case A_CHANGE_FILE_PROP:
+ err = editor->change_file_prop(file_baton, "svn:test",
+ svn_string_create("test", scratch_pool),
+ scratch_pool);
+ break;
+
+ case A_ADD_FILE:
+ err = editor->add_file(path_action->path, dir_baton,
+ path_action->copyfrom_path, youngest_rev,
+ scratch_pool, &out_baton);
+ break;
+
+ case A_ADD_DIR:
+ err = editor->add_directory(path_action->path, dir_baton,
+ path_action->copyfrom_path, youngest_rev,
+ scratch_pool, &out_baton);
+ break;
+
+ default:
+ SVN_TEST_ASSERT(FALSE);
+ }
+
+ /* Don't worry about closing batons, just abort the edit. Since errors
+ may be delayed, we need to capture results of the abort as well. */
+ err2 = editor->abort_edit(edit_baton, scratch_pool);
+ if (!err)
+ err = err2;
+ else
+ svn_error_clear(err2);
+
+ /* Check for potential errors. */
+ if (path_action->authz_error_expected)
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_AUTHZ_UNWRITABLE);
+ else
+ SVN_ERR(err);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Test that the commit editor is taking authz into account
+ properly */
+static svn_error_t *
+commit_editor_authz(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev;
+ svn_authz_t *authz_file;
+ apr_pool_t *iterpool;
+ const char *authz_contents;
+ int i;
+ struct authz_path_action_t path_actions[] = {
+ { A_DELETE, "/iota", TRUE },
+ { A_CHANGE_FILE_PROP, "/iota", TRUE },
+ { A_ADD_FILE, "/alpha", TRUE },
+ { A_ADD_FILE, "/alpha", TRUE, "file://test/A/B/lambda" },
+ { A_ADD_DIR, "/I", TRUE },
+ { A_ADD_DIR, "/J", TRUE, "file://test/A/D" },
+ { A_ADD_FILE, "/A/alpha", TRUE },
+ { A_ADD_FILE, "/A/B/theta", FALSE },
+ { A_DELETE, "/A/mu", FALSE },
+ { A_ADD_DIR, "/A/E", FALSE },
+ { A_ADD_DIR, "/A/J", FALSE, "file://test/A/D" },
+ { A_DELETE, "A/D/G", TRUE },
+ { A_DELETE, "A/D/H", FALSE },
+ { A_CHANGE_FILE_PROP, "A/D/gamma", FALSE }
+ };
+
+ /* The Test Plan
+ *
+ * We create a greek tree repository, then create a commit editor
+ * and try to perform various operations that will run into authz
+ * callbacks. Check that all operations are properly
+ * authorized/denied when necessary. We don't try to be exhaustive
+ * in the kinds of authz lookups. We just make sure that the editor
+ * replies to the calls in a way that proves it is doing authz
+ * lookups. Some actions are tested implicitly (such as open_file being
+ * required for change_file_props).
+ *
+ * Note that because of the error handling requirements of the generic
+ * editor API, each operation needs its own editor, which is handled by
+ * a helper function above.
+ */
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-commit-authz",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Load the authz rules for the greek tree. */
+ authz_contents =
+ "" NL
+ "" NL
+ "[/]" NL
+ "plato = r" NL
+ "" NL
+ "[/A]" NL
+ "plato = rw" NL
+ "" NL
+ "[/A/alpha]" NL
+ "plato = " NL
+ "" NL
+ "[/A/C]" NL
+ "" NL
+ "plato = " NL
+ "" NL
+ "[/A/D]" NL
+ "plato = rw" NL
+ "" NL
+ "[/A/D/G]" NL
+ "plato = r"; /* No newline at end of file. */
+
+ SVN_ERR(authz_get_handle(&authz_file, authz_contents, FALSE, pool));
+
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < (sizeof(path_actions) / sizeof(struct authz_path_action_t));
+ i++)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(test_path_authz(repos, &path_actions[i], authz_file,
+ youngest_rev, iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements svn_commit_callback2_t. */
+static svn_error_t *
+dummy_commit_cb(const svn_commit_info_t *commit_info,
+ void *baton, apr_pool_t *pool)
+{
+ return SVN_NO_ERROR;
+}
+
+/* Test using explicit txns during a commit. */
+static svn_error_t *
+commit_continue_txn(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t youngest_rev;
+ void *edit_baton;
+ void *root_baton, *file_baton;
+ const svn_delta_editor_t *editor;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ const char *txn_name;
+
+ /* The Test Plan
+ *
+ * We create a greek tree repository, then create a transaction and
+ * a commit editor from that txn. We do one change, abort the edit, reopen
+ * the txn and create a new commit editor, do anyther change and commit.
+ * We check that both changes were done.
+ */
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-commit-continue",
+ opts, subpool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_name(&txn_name, txn, subpool));
+ SVN_ERR(svn_repos_get_commit_editor4(&editor, &edit_baton, repos,
+ txn, "file://test", "/",
+ "plato", "test commit",
+ dummy_commit_cb, NULL, NULL, NULL,
+ subpool));
+
+ SVN_ERR(editor->open_root(edit_baton, 1, subpool, &root_baton));
+
+ SVN_ERR(editor->add_file("/f1", root_baton, NULL, SVN_INVALID_REVNUM,
+ subpool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, subpool));
+ /* This should leave the transaction. */
+ SVN_ERR(editor->abort_edit(edit_baton, subpool));
+
+ /* Reopen the transaction. */
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, subpool));
+ SVN_ERR(svn_repos_get_commit_editor4(&editor, &edit_baton, repos,
+ txn, "file://test", "/",
+ "plato", "test commit",
+ dummy_commit_cb,
+ NULL, NULL, NULL,
+ subpool));
+
+ SVN_ERR(editor->open_root(edit_baton, 1, subpool, &root_baton));
+
+ SVN_ERR(editor->add_file("/f2", root_baton, NULL, SVN_INVALID_REVNUM,
+ subpool, &file_baton));
+ SVN_ERR(editor->close_file(file_baton, NULL, subpool));
+
+ /* Finally, commit it. */
+ SVN_ERR(editor->close_edit(edit_baton, subpool));
+
+ /* Check that the edits really happened. */
+ {
+ static svn_test__tree_entry_t expected_entries[] = {
+ /* path, contents (0 = dir) */
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" },
+ { "f1", "" },
+ { "f2", "" }
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ 2, subpool));
+ SVN_ERR(svn_test__validate_tree
+ (revision_root, expected_entries,
+ sizeof(expected_entries) / sizeof(expected_entries[0]),
+ subpool));
+ }
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* A baton for check_location_segments(). */
+struct nls_receiver_baton
+{
+ int count;
+ const svn_location_segment_t *expected_segments;
+};
+
+/* Return a pretty-printed string representing SEGMENT. */
+static const char *
+format_segment(const svn_location_segment_t *segment,
+ apr_pool_t *pool)
+{
+ return apr_psprintf(pool, "[r%ld-r%ld: /%s]",
+ segment->range_start,
+ segment->range_end,
+ segment->path ? segment->path : "(null)");
+}
+
+/* A location segment receiver for check_location_segments().
+ * Implements svn_location_segment_receiver_t. */
+static svn_error_t *
+nls_receiver(svn_location_segment_t *segment,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct nls_receiver_baton *b = baton;
+ const svn_location_segment_t *expected_segment = b->expected_segments + b->count;
+
+ /* expected_segments->range_end can't be 0, so if we see that, it's
+ our end-of-the-list sentry. */
+ if (! expected_segment->range_end)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected location segment: %s",
+ format_segment(segment, pool));
+
+ if (expected_segment->range_start != segment->range_start
+ || expected_segment->range_end != segment->range_end
+ || strcmp_null(expected_segment->path, segment->path) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Location segments differ\n"
+ " Expected location segment: %s\n"
+ " Actual location segment: %s",
+ format_segment(expected_segment, pool),
+ format_segment(segment, pool));
+ b->count++;
+ return SVN_NO_ERROR;
+}
+
+/* Run a svn_repos_node_location_segments() query with REPOS, PATH, PEG_REV,
+ * START_REV, END_REV. Check that the result exactly matches the list of
+ * segments EXPECTED_SEGMENTS, which is terminated by an entry with
+ * 'range_end'==0.
+ */
+static svn_error_t *
+check_location_segments(svn_repos_t *repos,
+ const char *path,
+ svn_revnum_t peg_rev,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ const svn_location_segment_t *expected_segments,
+ apr_pool_t *pool)
+{
+ struct nls_receiver_baton b;
+ const svn_location_segment_t *segment;
+
+ /* Run svn_repos_node_location_segments() with a receiver that
+ validates against EXPECTED_SEGMENTS. */
+ b.count = 0;
+ b.expected_segments = expected_segments;
+ SVN_ERR(svn_repos_node_location_segments(repos, path, peg_rev,
+ start_rev, end_rev, nls_receiver,
+ &b, NULL, NULL, pool));
+
+ /* Make sure we saw all of our expected segments. (If the
+ 'range_end' member of our expected_segments is 0, it's our
+ end-of-the-list sentry. Otherwise, it's some segment we expect
+ to see.) If not, raise an error. */
+ segment = expected_segments + b.count;
+ if (segment->range_end)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Failed to get expected location segment: %s",
+ format_segment(segment, pool));
+ return SVN_NO_ERROR;
+}
+
+/* Inputs and expected outputs for svn_repos_node_location_segments() tests.
+ */
+typedef struct location_segment_test_t
+{
+ /* Path and peg revision to query */
+ const char *path;
+ svn_revnum_t peg;
+ /* Start (youngest) and end (oldest) revisions to query */
+ svn_revnum_t start;
+ svn_revnum_t end;
+
+ /* Expected segments */
+ svn_location_segment_t segments[10];
+
+} location_segment_test_t;
+
+static svn_error_t *
+node_location_segments(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *root;
+ svn_revnum_t youngest_rev = 0;
+
+ static const location_segment_test_t subtests[] =
+ {
+ { /* Check locations for /@HEAD. */
+ "", SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ {
+ { 0, 7, "" },
+ { 0 }
+ }
+ },
+ { /* Check locations for A/D@HEAD. */
+ "A/D", SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ {
+ { 7, 7, "A/D" },
+ { 3, 6, "A/D2" },
+ { 1, 2, "A/D" },
+ { 0 }
+ }
+ },
+ { /* Check a subset of the locations for A/D@HEAD. */
+ "A/D", SVN_INVALID_REVNUM, 5, 2,
+ {
+ { 3, 5, "A/D2" },
+ { 2, 2, "A/D" },
+ { 0 }
+ },
+ },
+ { /* Check a subset of locations for A/D2@5. */
+ "A/D2", 5, 3, 2,
+ {
+ { 3, 3, "A/D2" },
+ { 2, 2, "A/D" },
+ { 0 }
+ },
+ },
+ { /* Check locations for A/D@6. */
+ "A/D", 6, 6, SVN_INVALID_REVNUM,
+ {
+ { 1, 6, "A/D" },
+ { 0 }
+ },
+ },
+ { /* Check locations for A/D/G@HEAD. */
+ "A/D/G", SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, SVN_INVALID_REVNUM,
+ {
+ { 7, 7, "A/D/G" },
+ { 6, 6, "A/D2/G" },
+ { 5, 5, NULL },
+ { 3, 4, "A/D2/G" },
+ { 1, 2, "A/D/G" },
+ { 0 }
+ },
+ },
+ { /* Check a subset of the locations for A/D/G@HEAD. */
+ "A/D/G", SVN_INVALID_REVNUM, 3, 2,
+ {
+ { 3, 3, "A/D2/G" },
+ { 2, 2, "A/D/G" },
+ { 0 }
+ },
+ },
+ {
+ NULL
+ },
+ };
+ const location_segment_test_t *subtest;
+
+ /* Bail (with success) on known-untestable scenarios */
+ if ((strcmp(opts->fs_type, "bdb") == 0)
+ && (opts->server_minor_version == 4))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "not supported for BDB in SVN 1.4");
+
+ /* Create the repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-node-location-segments",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1: Create the Greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: Modify A/D/H/chi and A/B/E/alpha. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/chi", "2", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha", "2", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 3: Copy A/D to A/D2. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D", txn_root, "A/D2", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 4: Modify A/D/H/chi and A/D2/H/chi. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D/H/chi", "4", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/D2/H/chi", "4", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 5: Delete A/D2/G. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D2/G", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 6: Restore A/D2/G (from version 4). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, 4, subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D2/G", txn_root, "A/D2/G", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 7: Move A/D2 to A/D (replacing it). */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D", subpool));
+ SVN_ERR(svn_fs_copy(root, "A/D2", txn_root, "A/D", subpool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/D2", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* */
+ for (subtest = subtests; subtest->path; subtest++)
+ {
+ SVN_ERR(check_location_segments(repos, subtest->path, subtest->peg,
+ subtest->start, subtest->end,
+ subtest->segments, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test that the reporter doesn't send deltas under excluded paths. */
+static svn_error_t *
+reporter_depth_exclude(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_revnum_t youngest_rev;
+ const svn_delta_editor_t *editor;
+ void *edit_baton, *report_baton;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-reporter-depth-exclude",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: make a bunch of changes */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ {
+ static svn_test__txn_script_command_t script_entries[] = {
+ { 'e', "iota", "Changed file 'iota'.\n" },
+ { 'e', "A/D/G/pi", "Changed file 'pi'.\n" },
+ { 'e', "A/mu", "Changed file 'mu'.\n" },
+ { 'a', "A/D/foo", "New file 'foo'.\n" },
+ { 'a', "A/B/bar", "New file 'bar'.\n" },
+ { 'd', "A/D/H", NULL },
+ { 'd', "A/B/E/beta", NULL }
+ };
+ SVN_ERR(svn_test__txn_script_exec(txn_root,
+ script_entries,
+ sizeof(script_entries)/
+ sizeof(script_entries[0]),
+ subpool));
+ }
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Confirm the contents of r2. */
+ {
+ svn_fs_root_t *revision_root;
+ static svn_test__tree_entry_t entries[] = {
+ { "iota", "Changed file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "Changed file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/bar", "New file 'bar'.\n" },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/foo", "New file 'foo'.\n" },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "Changed file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ };
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs,
+ youngest_rev, subpool));
+ SVN_ERR(svn_test__validate_tree(revision_root,
+ entries,
+ sizeof(entries)/sizeof(entries[0]),
+ subpool));
+ }
+
+ /* Run an update from r1 to r2, excluding iota and everything under
+ A/D. Record the editor commands in a temporary txn. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(dir_delta_get_editor(&editor, &edit_baton, fs,
+ txn_root, "", subpool));
+
+ SVN_ERR(svn_repos_begin_report3(&report_baton, 2, repos, "/", "", NULL,
+ TRUE, svn_depth_infinity, FALSE, FALSE,
+ editor, edit_baton, NULL, NULL, 0,
+ subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "", 1,
+ svn_depth_infinity,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "iota", SVN_INVALID_REVNUM,
+ svn_depth_exclude,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "A/D", SVN_INVALID_REVNUM,
+ svn_depth_exclude,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_finish_report(report_baton, subpool));
+
+ /* Confirm the contents of the txn. */
+ /* This should have iota and A/D from r1, and everything else from
+ r2. */
+ {
+ static svn_test__tree_entry_t entries[] = {
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", 0 },
+ { "A/mu", "Changed file 'mu'.\n" },
+ { "A/B", 0 },
+ { "A/B/bar", "New file 'bar'.\n" },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", 0 },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/F", 0 },
+ { "A/C", 0 },
+ { "A/D", 0 },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", 0 },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", 0 },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" }
+ };
+ SVN_ERR(svn_test__validate_tree(txn_root,
+ entries,
+ sizeof(entries)/sizeof(entries[0]),
+ subpool));
+ }
+
+ /* Clean up after ourselves. */
+ svn_error_clear(svn_fs_abort_txn(txn, subpool));
+ svn_pool_clear(subpool);
+
+ /* Expect an error on an illegal report for r1 to r2. The illegal
+ sequence is that we exclude A/D, then set_path() below A/D. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(dir_delta_get_editor(&editor, &edit_baton, fs,
+ txn_root, "", subpool));
+
+ SVN_ERR(svn_repos_begin_report3(&report_baton, 2, repos, "/", "", NULL,
+ TRUE, svn_depth_infinity, FALSE, FALSE,
+ editor, edit_baton, NULL, NULL, 0,
+ subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "", 1,
+ svn_depth_infinity,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "iota", SVN_INVALID_REVNUM,
+ svn_depth_exclude,
+ FALSE, NULL, subpool));
+ SVN_ERR(svn_repos_set_path3(report_baton, "A/D", SVN_INVALID_REVNUM,
+ svn_depth_exclude,
+ FALSE, NULL, subpool));
+
+ /* This is the illegal call, since A/D was excluded above; the call
+ itself will not error, but finish_report() will. As of r868172,
+ this delayed error behavior is not actually promised by the
+ reporter API, which merely warns callers not to touch a path
+ underneath a previously excluded path without defining what will
+ happen if they do. However, it's still useful to test for the
+ error, since the reporter code is sensitive and we'd certainly
+ want to know about it if the behavior were to change. */
+ SVN_ERR(svn_repos_set_path3(report_baton, "A/D/G/pi",
+ SVN_INVALID_REVNUM,
+ svn_depth_infinity,
+ FALSE, NULL, subpool));
+ err = svn_repos_finish_report(report_baton, subpool);
+ if (! err)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Illegal report of \"A/D/G/pi\" did not error as expected");
+ }
+ else if (err->apr_err != SVN_ERR_FS_NOT_FOUND)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, err,
+ "Illegal report of \"A/D/G/pi\" got wrong kind of error:");
+ }
+
+ /* Clean up after ourselves. */
+ svn_error_clear(err);
+ svn_error_clear(svn_fs_abort_txn(txn, subpool));
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test if prop values received by the server are validated.
+ * These tests "send" property values to the server and diagnose the
+ * behaviour.
+ */
+
+/* Helper function that makes an arbitrary change to a given repository
+ * REPOS and runs a commit with a specific revision property set to a
+ * certain value. The property name, type and value are given in PROP_KEY,
+ * PROP_KLEN and PROP_VAL, as in apr_hash_set(), using a const char* key.
+ *
+ * The FILENAME argument names a file in the test repository to add in
+ * this commit, e.g. "/A/should_fail_1".
+ *
+ * On success, the given file is added to the repository. So, using
+ * the same name multiple times on the same repository might fail. Thus,
+ * use different FILENAME arguments for every call to this function
+ * (e.g. "/A/f1", "/A/f2", "/A/f3" etc).
+ */
+static svn_error_t *
+prop_validation_commit_with_revprop(const char *filename,
+ const char *prop_key,
+ apr_ssize_t prop_klen,
+ const svn_string_t *prop_val,
+ svn_repos_t *repos,
+ apr_pool_t *pool)
+{
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ void *root_baton;
+ void *file_baton;
+
+ /* Prepare revision properties */
+ apr_hash_t *revprop_table = apr_hash_make(pool);
+
+ /* Add the requested property */
+ apr_hash_set(revprop_table, prop_key, prop_klen, prop_val);
+
+ /* Set usual author and log props, if not set already */
+ if (strcmp(prop_key, SVN_PROP_REVISION_AUTHOR) != 0)
+ {
+ apr_hash_set(revprop_table, SVN_PROP_REVISION_AUTHOR,
+ APR_HASH_KEY_STRING,
+ svn_string_create("plato", pool));
+ }
+ else if (strcmp(prop_key, SVN_PROP_REVISION_LOG) != 0)
+ {
+ apr_hash_set(revprop_table, SVN_PROP_REVISION_LOG,
+ APR_HASH_KEY_STRING,
+ svn_string_create("revision log", pool));
+ }
+
+ /* Make an arbitrary change and commit using above values... */
+
+ SVN_ERR(svn_repos_get_commit_editor5(&editor, &edit_baton, repos,
+ NULL, "file://test", "/",
+ revprop_table,
+ NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton));
+
+ SVN_ERR(editor->add_file(filename, root_baton, NULL,
+ SVN_INVALID_REVNUM, pool,
+ &file_baton));
+
+ SVN_ERR(editor->close_file(file_baton, NULL, pool));
+
+ SVN_ERR(editor->close_directory(root_baton, pool));
+
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Expect failure of invalid commit in these cases:
+ * - log message contains invalid UTF-8 octet (issue 1796)
+ * - log message contains invalid linefeed style (non-LF) (issue 1796)
+ */
+static svn_error_t *
+prop_validation(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_error_t *err;
+ svn_repos_t *repos;
+ const char non_utf8_string[5] = { 'a', '\xff', 'b', '\n', 0 };
+ const char *non_lf_string = "a\r\nb\n\rc\rd\n";
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-prop-validation",
+ opts, subpool));
+
+
+ /* Test an invalid commit log message: UTF-8 */
+ err = prop_validation_commit_with_revprop
+ ("/non_utf8_log_msg",
+ SVN_PROP_REVISION_LOG, APR_HASH_KEY_STRING,
+ svn_string_create(non_utf8_string, subpool),
+ repos, subpool);
+
+ if (err == SVN_NO_ERROR)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Failed to reject a log with invalid "
+ "UTF-8");
+ else if (err->apr_err != SVN_ERR_BAD_PROPERTY_VALUE)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_BAD_PROPERTY_VALUE for "
+ "a log with invalid UTF-8, "
+ "got another error.");
+ svn_error_clear(err);
+
+
+ /* Test an invalid commit log message: LF */
+ err = prop_validation_commit_with_revprop
+ ("/non_lf_log_msg",
+ SVN_PROP_REVISION_LOG, APR_HASH_KEY_STRING,
+ svn_string_create(non_lf_string, subpool),
+ repos, subpool);
+
+ if (err == SVN_NO_ERROR)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Failed to reject a log with inconsistent "
+ "line ending style");
+ else if (err->apr_err != SVN_ERR_BAD_PROPERTY_VALUE)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Expected SVN_ERR_BAD_PROPERTY_VALUE for "
+ "a log with inconsistent line ending style, "
+ "got another error.");
+ svn_error_clear(err);
+
+
+ /* Done. */
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Tests for svn_repos_get_logsN() */
+
+/* Log receiver which simple increments a counter. */
+static svn_error_t *
+log_receiver(void *baton,
+ svn_log_entry_t *log_entry,
+ apr_pool_t *pool)
+{
+ svn_revnum_t *count = baton;
+ (*count)++;
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+get_logs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t start, end, youngest_rev = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-get-logs",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1: Add the Greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Revision 2: Tweak A/mu and A/B/E/alpha. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu",
+ "Revision 2", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha",
+ "Revision 2", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* Revision 3: Tweak A/B/E/alpha and A/B/E/beta. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/alpha",
+ "Revision 3", subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/beta",
+ "Revision 3", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+
+ for (start = 0; start <= youngest_rev; start++)
+ {
+ for (end = 0; end <= youngest_rev; end++)
+ {
+ svn_revnum_t start_arg = start ? start : SVN_INVALID_REVNUM;
+ svn_revnum_t end_arg = end ? end : SVN_INVALID_REVNUM;
+ svn_revnum_t eff_start = start ? start : youngest_rev;
+ svn_revnum_t eff_end = end ? end : youngest_rev;
+ int limit;
+ svn_revnum_t max_logs =
+ MAX(eff_start, eff_end) + 1 - MIN(eff_start, eff_end);
+ svn_revnum_t num_logs;
+
+ /* this may look like it can get in an infinite loop if max_logs
+ * ended up being larger than the size limit can represent. It
+ * can't because a negative limit will end up failing to match
+ * the existed number of logs. */
+ for (limit = 0; limit <= max_logs; limit++)
+ {
+ svn_revnum_t num_expected = limit ? limit : max_logs;
+
+ svn_pool_clear(subpool);
+ num_logs = 0;
+ SVN_ERR(svn_repos_get_logs4(repos, NULL, start_arg, end_arg,
+ limit, FALSE, FALSE, FALSE, NULL,
+ NULL, NULL, log_receiver, &num_logs,
+ subpool));
+ if (num_logs != num_expected)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Log with start=%ld,end=%ld,limit=%d "
+ "returned %ld entries (expected %ld)",
+ start_arg, end_arg, limit,
+ num_logs, num_expected);
+ }
+ }
+ }
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+/* Tests for svn_repos_get_file_revsN() */
+
+typedef struct file_revs_t {
+ svn_revnum_t rev;
+ const char *path;
+ svn_boolean_t result_of_merge;
+ const char *author;
+} file_revs_t;
+
+/* Finds the revision REV in the hash table passed in in BATON, and checks
+ if the PATH and RESULT_OF_MERGE match are as expected. */
+static svn_error_t *
+file_rev_handler(void *baton, const char *path, svn_revnum_t rev,
+ apr_hash_t *rev_props, svn_boolean_t result_of_merge,
+ svn_txdelta_window_handler_t *delta_handler,
+ void **delta_baton, apr_array_header_t *prop_diffs,
+ apr_pool_t *pool)
+{
+ apr_hash_t *ht = baton;
+ const char *author;
+ file_revs_t *file_rev = apr_hash_get(ht, &rev, sizeof(rev));
+
+ if (!file_rev)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Revision rev info not expected for rev %ld "
+ "from path %s",
+ rev, path);
+
+ author = svn_prop_get_value(rev_props,
+ SVN_PROP_REVISION_AUTHOR);
+
+ SVN_TEST_STRING_ASSERT(author, file_rev->author);
+ SVN_TEST_STRING_ASSERT(path, file_rev->path);
+ SVN_TEST_ASSERT(rev == file_rev->rev);
+ SVN_TEST_ASSERT(result_of_merge == file_rev->result_of_merge);
+
+ /* Remove this revision from this list so we'll be able to verify that we
+ have seen all expected revisions. */
+ apr_hash_set(ht, &rev, sizeof(rev), NULL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_get_file_revs(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos = NULL;
+ svn_fs_t *fs;
+ svn_revnum_t youngest_rev = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ int i;
+
+ file_revs_t trunk_results[] = {
+ { 2, "/trunk/A/mu", FALSE, "initial" },
+ { 3, "/trunk/A/mu", FALSE, "user-trunk" },
+ { 4, "/branches/1.0.x/A/mu", TRUE, "copy" },
+ { 5, "/trunk/A/mu", FALSE, "user-trunk" },
+ { 6, "/branches/1.0.x/A/mu", TRUE, "user-branch" },
+ { 7, "/branches/1.0.x/A/mu", TRUE, "user-merge1" },
+ { 8, "/trunk/A/mu", FALSE, "user-merge2" },
+ };
+ file_revs_t branch_results[] = {
+ { 2, "/trunk/A/mu", FALSE, "initial" },
+ { 3, "/trunk/A/mu", FALSE, "user-trunk" },
+ { 4, "/branches/1.0.x/A/mu", FALSE, "copy" },
+ { 5, "/trunk/A/mu", TRUE, "user-trunk" },
+ { 6, "/branches/1.0.x/A/mu", FALSE, "user-branch" },
+ { 7, "/branches/1.0.x/A/mu", FALSE, "user-merge1" },
+ };
+ apr_hash_t *ht_trunk_results = apr_hash_make(subpool);
+ apr_hash_t *ht_branch_results = apr_hash_make(subpool);
+ apr_hash_t *ht_reverse_results = apr_hash_make(subpool);
+
+ for (i = 0; i < sizeof(trunk_results) / sizeof(trunk_results[0]); i++)
+ apr_hash_set(ht_trunk_results, &trunk_results[i].rev,
+ sizeof(trunk_results[i].rev), &trunk_results[i]);
+
+ for (i = 0; i < sizeof(branch_results) / sizeof(branch_results[0]); i++)
+ apr_hash_set(ht_branch_results, &branch_results[i].rev,
+ sizeof(branch_results[i].rev), &branch_results[i]);
+
+ for (i = 0; i < sizeof(trunk_results) / sizeof(trunk_results[0]); i++)
+ if (!trunk_results[i].result_of_merge)
+ apr_hash_set(ht_reverse_results, &trunk_results[i].rev,
+ sizeof(trunk_results[i].rev), &trunk_results[i]);
+
+ /* Check for feature support */
+ if (opts->server_minor_version && (opts->server_minor_version < 5))
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "not supported in pre-1.5 SVN");
+
+ /* Create the repository and verify blame results. */
+ SVN_ERR(svn_test__create_blame_repository(&repos, "test-repo-get-filerevs",
+ opts, subpool));
+ fs = svn_repos_fs(repos);
+
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, subpool));
+
+ /* Verify blame of /trunk/A/mu */
+ SVN_ERR(svn_repos_get_file_revs2(repos, "/trunk/A/mu", 0, youngest_rev,
+ TRUE, NULL, NULL,
+ file_rev_handler,
+ ht_trunk_results,
+ subpool));
+ SVN_TEST_ASSERT(apr_hash_count(ht_trunk_results) == 0);
+
+ /* Verify blame of /branches/1.0.x/A/mu */
+ SVN_ERR(svn_repos_get_file_revs2(repos, "/branches/1.0.x/A/mu", 0,
+ youngest_rev,
+ TRUE, NULL, NULL,
+ file_rev_handler,
+ ht_branch_results,
+ subpool));
+ SVN_TEST_ASSERT(apr_hash_count(ht_branch_results) == 0);
+
+ /* ### TODO: Verify blame of /branches/1.0.x/A/mu in range 6-7 */
+
+ SVN_ERR(svn_repos_get_file_revs2(repos, "/trunk/A/mu", youngest_rev, 0,
+ FALSE, NULL, NULL,
+ file_rev_handler,
+ ht_reverse_results,
+ subpool));
+ SVN_TEST_ASSERT(apr_hash_count(ht_reverse_results) == 0);
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+issue_4060(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_authz_t *authz_cfg;
+ svn_boolean_t allowed;
+ const char *authz_contents =
+ "[/A/B]" NL
+ "ozymandias = rw" NL
+ "[/]" NL
+ "ozymandias = r" NL
+ "" NL;
+
+ SVN_ERR(authz_get_handle(&authz_cfg, authz_contents, FALSE, subpool));
+
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, "babylon",
+ "/A/B/C", "ozymandias",
+ svn_authz_write | svn_authz_recursive,
+ &allowed, subpool));
+ SVN_TEST_ASSERT(allowed);
+
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, "",
+ "/A/B/C", "ozymandias",
+ svn_authz_write | svn_authz_recursive,
+ &allowed, subpool));
+ SVN_TEST_ASSERT(allowed);
+
+ SVN_ERR(svn_repos_authz_check_access(authz_cfg, NULL,
+ "/A/B/C", "ozymandias",
+ svn_authz_write | svn_authz_recursive,
+ &allowed, subpool));
+ SVN_TEST_ASSERT(allowed);
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_repos_delete(). */
+static svn_error_t *
+test_delete_repos(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *path;
+ svn_node_kind_t kind;
+
+ /* We have to use a subpool to close the svn_repos_t before calling
+ svn_repos_delete. */
+ {
+ svn_repos_t *repos;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-delete-repos", opts,
+ subpool));
+ path = svn_repos_path(repos, pool);
+ svn_pool_destroy(subpool);
+ }
+
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ SVN_TEST_ASSERT(kind != svn_node_none);
+ SVN_ERR(svn_repos_delete(path, pool));
+ SVN_ERR(svn_io_check_path(path, &kind, pool));
+ SVN_TEST_ASSERT(kind == svn_node_none);
+
+ /* Recreate dir so that test cleanup doesn't fail. */
+ SVN_ERR(svn_io_dir_make(path, APR_OS_DEFAULT, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Prepare a commit for the filename_with_control_chars() tests */
+static svn_error_t *
+fwcc_prepare(const svn_delta_editor_t **editor_p,
+ void **edit_baton_p,
+ void **root_baton,
+ svn_repos_t *repos,
+ apr_pool_t *scratch_pool)
+{
+ /* Checks for control characters are implemented in the commit editor,
+ * not in the FS API. */
+ SVN_ERR(svn_repos_get_commit_editor4(editor_p, edit_baton_p, repos,
+ NULL, "file://test", "/",
+ "plato", "test commit",
+ dummy_commit_cb, NULL, NULL, NULL,
+ scratch_pool));
+ SVN_ERR((*editor_p)->open_root(*edit_baton_p, 1, scratch_pool, root_baton));
+ return SVN_NO_ERROR;
+}
+
+/* Related to issue 4340, "filenames containing \n corrupt FSFS repositories" */
+static svn_error_t *
+filename_with_control_chars(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ svn_revnum_t youngest_rev = 0;
+ svn_error_t *err;
+ static const char *bad_paths[] = {
+ "/bar\t",
+ "/bar\n",
+ "/\barb\az",
+ "/\x02 baz",
+ NULL,
+ };
+ const char *p;
+ int i;
+ void *edit_baton;
+ void *root_baton;
+ void *out_baton;
+ const svn_delta_editor_t *editor;
+
+ /* Create the repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-filename-with-cntrl-chars",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1: Add a directory /foo */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/foo", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Attempt to copy /foo to a bad path P. This should fail. */
+ i = 0;
+ do
+ {
+ p = bad_paths[i++];
+ if (p == NULL)
+ break;
+ svn_pool_clear(subpool);
+
+ SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool));
+ err = editor->add_directory(p, root_baton, "/foo", 1, subpool,
+ &out_baton);
+ if (!err)
+ err = editor->close_edit(edit_baton, subpool);
+ svn_error_clear(editor->abort_edit(edit_baton, subpool));
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+ } while (p);
+
+ /* Attempt to add a file with bad path P. This should fail. */
+ i = 0;
+ do
+ {
+ p = bad_paths[i++];
+ if (p == NULL)
+ break;
+ svn_pool_clear(subpool);
+
+ SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool));
+ err = editor->add_file(p, root_baton, NULL, SVN_INVALID_REVNUM,
+ subpool, &out_baton);
+ if (!err)
+ err = editor->close_edit(edit_baton, subpool);
+ svn_error_clear(editor->abort_edit(edit_baton, subpool));
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+ } while (p);
+
+
+ /* Attempt to add a directory with bad path P. This should fail. */
+ i = 0;
+ do
+ {
+ p = bad_paths[i++];
+ if (p == NULL)
+ break;
+ svn_pool_clear(subpool);
+
+ SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool));
+ err = editor->add_directory(p, root_baton, NULL, SVN_INVALID_REVNUM,
+ subpool, &out_baton);
+ if (!err)
+ err = editor->close_edit(edit_baton, subpool);
+ svn_error_clear(editor->abort_edit(edit_baton, subpool));
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX);
+ } while (p);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_repos_info(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_test_opts_t opts2;
+ apr_hash_t *capabilities;
+ svn_version_t *supports_version;
+ svn_version_t v1_0_0 = {1, 0, 0, ""};
+ svn_version_t v1_4_0 = {1, 4, 0, ""};
+ int repos_format;
+ svn_boolean_t is_fsx = strcmp(opts->fs_type, "fsx") == 0;
+
+ opts2 = *opts;
+
+ /* for repo types that have been around before 1.4 */
+ if (!is_fsx)
+ {
+ opts2.server_minor_version = 3;
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-info-3",
+ &opts2, pool));
+ SVN_ERR(svn_repos_capabilities(&capabilities, repos, pool, pool));
+ SVN_TEST_ASSERT(apr_hash_count(capabilities) == 0);
+ SVN_ERR(svn_repos_info_format(&repos_format, &supports_version, repos,
+ pool, pool));
+ SVN_TEST_ASSERT(repos_format == 3);
+ SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_0_0));
+ }
+
+ opts2.server_minor_version = 9;
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-info-9",
+ &opts2, pool));
+ SVN_ERR(svn_repos_capabilities(&capabilities, repos, pool, pool));
+ SVN_TEST_ASSERT(apr_hash_count(capabilities) == 1);
+ SVN_TEST_ASSERT(svn_hash_gets(capabilities, SVN_REPOS_CAPABILITY_MERGEINFO));
+ SVN_ERR(svn_repos_info_format(&repos_format, &supports_version, repos,
+ pool, pool));
+ SVN_TEST_ASSERT(repos_format == 5);
+ SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_4_0));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_config_pool(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repo_name = "test-repo-config-pool";
+ svn_repos_t *repos;
+ svn_stringbuf_t *cfg_buffer1, *cfg_buffer2;
+ svn_config_t *cfg;
+ apr_hash_t *sections1, *sections2;
+ int i;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root, *rev_root;
+ svn_revnum_t rev;
+ const char *repo_root_url;
+ const char *srcdir;
+ svn_error_t *err;
+
+ svn_repos__config_pool_t *config_pool;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ const char *wrk_dir = svn_test_data_path("config_pool", pool);
+
+ SVN_ERR(svn_io_make_dir_recursively(wrk_dir, pool));
+
+ /* read all config info through a single config pool. */
+ SVN_ERR(svn_repos__config_pool_create(&config_pool, TRUE, pool));
+
+ /* have two different configurations */
+ SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool));
+ SVN_ERR(svn_stringbuf_from_file2(
+ &cfg_buffer1,
+ svn_dirent_join(srcdir,
+ "../libsvn_subr/config-test.cfg",
+ pool),
+ pool));
+ cfg_buffer2 = svn_stringbuf_dup(cfg_buffer1, pool);
+ svn_stringbuf_appendcstr(cfg_buffer2, "\n[more]\nU=\"X\"\n");
+
+ /* write them to 2x2 files */
+ SVN_ERR(svn_io_write_atomic2(svn_dirent_join(wrk_dir,
+ "config-pool-test1.cfg",
+ pool),
+ cfg_buffer1->data, cfg_buffer1->len, NULL,
+ FALSE, pool));
+ SVN_ERR(svn_io_write_atomic2(svn_dirent_join(wrk_dir,
+ "config-pool-test2.cfg",
+ pool),
+ cfg_buffer1->data, cfg_buffer1->len, NULL,
+ FALSE, pool));
+ SVN_ERR(svn_io_write_atomic2(svn_dirent_join(wrk_dir,
+ "config-pool-test3.cfg",
+ pool),
+ cfg_buffer2->data, cfg_buffer2->len, NULL,
+ FALSE, pool));
+ SVN_ERR(svn_io_write_atomic2(svn_dirent_join(wrk_dir,
+ "config-pool-test4.cfg",
+ pool),
+ cfg_buffer2->data, cfg_buffer2->len, NULL,
+ FALSE, pool));
+
+ /* requesting a config over and over again should return the same
+ (even though it is not being referenced) */
+ sections1 = NULL;
+ for (i = 0; i < 4; ++i)
+ {
+ SVN_ERR(svn_repos__config_pool_get(
+ &cfg, config_pool,
+ svn_dirent_join(wrk_dir,
+ "config-pool-test1.cfg",
+ pool),
+ TRUE, NULL, subpool));
+
+ if (sections1 == NULL)
+ sections1 = cfg->sections;
+ else
+ SVN_TEST_ASSERT(cfg->sections == sections1);
+
+ svn_pool_clear(subpool);
+ }
+
+ /* requesting the same config from another file should return the same
+ (even though it is not being referenced) */
+ for (i = 0; i < 4; ++i)
+ {
+ SVN_ERR(svn_repos__config_pool_get(
+ &cfg, config_pool,
+ svn_dirent_join(wrk_dir,
+ "config-pool-test2.cfg",
+ pool),
+ TRUE, NULL, subpool));
+
+ SVN_TEST_ASSERT(cfg->sections == sections1);
+
+ svn_pool_clear(subpool);
+ }
+
+ /* reading a different configuration should return a different pointer */
+ sections2 = NULL;
+ for (i = 0; i < 2; ++i)
+ {
+ SVN_ERR(svn_repos__config_pool_get(
+ &cfg, config_pool,
+ svn_dirent_join(wrk_dir,
+ "config-pool-test3.cfg",
+ pool),
+ TRUE, NULL, subpool));
+
+ if (sections2 == NULL)
+ sections2 = cfg->sections;
+ else
+ SVN_TEST_ASSERT(cfg->sections == sections2);
+
+ SVN_TEST_ASSERT(sections1 != sections2);
+ svn_pool_clear(subpool);
+ }
+
+ /* create an in-repo config */
+ SVN_ERR(svn_dirent_get_absolute(&repo_root_url, repo_name, pool));
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&repo_root_url, repo_root_url,
+ pool));
+
+ SVN_ERR(svn_test__create_repos(&repos, repo_name, opts, pool));
+ SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), 0, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(root, "dir", pool));
+ SVN_ERR(svn_fs_make_file(root, "dir/config", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "dir/config",
+ cfg_buffer1->data, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* reading the config from the repo should still give cfg1 */
+ SVN_ERR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(
+ repo_root_url,
+ "dir/config", pool),
+ TRUE, NULL, subpool));
+ SVN_TEST_ASSERT(cfg->sections == sections1);
+ svn_pool_clear(subpool);
+
+ /* create another in-repo config */
+ SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), rev, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, svn_repos_fs(repos), rev, pool));
+ SVN_ERR(svn_fs_copy(rev_root, "dir", root, "another-dir", pool));
+ SVN_ERR(svn_test__set_file_contents(root, "dir/config",
+ cfg_buffer2->data, pool));
+ SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool));
+
+ /* reading the config from the repo should give cfg2 now */
+ SVN_ERR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(
+ repo_root_url,
+ "dir/config", pool),
+ TRUE, NULL, subpool));
+ SVN_TEST_ASSERT(cfg->sections == sections2);
+ svn_pool_clear(subpool);
+
+ /* reading the copied config should still give cfg1 */
+ SVN_ERR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(
+ repo_root_url,
+ "another-dir/config",
+ pool),
+ TRUE, NULL, subpool));
+ SVN_TEST_ASSERT(cfg->sections == sections1);
+ svn_pool_clear(subpool);
+
+ /* once again: repeated reads. This triggers a different code path. */
+ SVN_ERR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(
+ repo_root_url,
+ "dir/config", pool),
+ TRUE, NULL, subpool));
+ SVN_TEST_ASSERT(cfg->sections == sections2);
+ SVN_ERR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(
+ repo_root_url,
+ "another-dir/config",
+ pool),
+ TRUE, NULL, subpool));
+ SVN_TEST_ASSERT(cfg->sections == sections1);
+ svn_pool_clear(subpool);
+
+ /* access paths that don't exist */
+ SVN_TEST_ASSERT_ERROR(svn_repos__config_pool_get(&cfg, config_pool,
+ svn_path_url_add_component2(repo_root_url, "X",
+ pool),
+ TRUE, NULL, subpool),
+ SVN_ERR_ILLEGAL_TARGET);
+ err = svn_repos__config_pool_get(&cfg, config_pool, "X.cfg", TRUE, NULL,
+ subpool);
+ SVN_TEST_ASSERT(err && APR_STATUS_IS_ENOENT(err->apr_err));
+ svn_error_clear(err);
+ svn_pool_clear(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_repos_fs_type(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+
+ /* Create test repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-repos_fs_type",
+ opts, pool));
+
+ SVN_TEST_STRING_ASSERT(svn_repos_fs_type(repos, pool), opts->fs_type);
+
+ /* Re-open repository and verify fs-type again. */
+ SVN_ERR(svn_repos_open3(&repos, svn_repos_path(repos, pool), NULL,
+ pool, pool));
+
+ SVN_TEST_STRING_ASSERT(svn_repos_fs_type(repos, pool), opts->fs_type);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+deprecated_access_context_api(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_access_t *access;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ const char *conflict;
+ svn_revnum_t new_rev;
+ const char *hook;
+
+ /* Create test repository. */
+ SVN_ERR(svn_test__create_repos(&repos,
+ "test-repo-deprecated-access-context-api",
+ opts, pool));
+
+ /* Set an empty pre-commit hook. */
+#ifdef WIN32
+ hook = apr_pstrcat(pool, svn_repos_pre_commit_hook(repos, pool), ".bat",
+ SVN_VA_NULL);
+ SVN_ERR(svn_io_file_create(hook,
+ "exit 0" APR_EOL_STR,
+ pool));
+#else
+ hook = svn_repos_pre_commit_hook(repos, pool);
+ SVN_ERR(svn_io_file_create(hook,
+ "#!/bin/sh" APR_EOL_STR "exit 0" APR_EOL_STR,
+ pool));
+ SVN_ERR(svn_io_set_file_executable(hook, TRUE, FALSE, pool));
+#endif
+
+ /* Set some access context using svn_fs_access_add_lock_token(). */
+ SVN_ERR(svn_fs_create_access(&access, "jrandom", pool));
+ SVN_ERR(svn_fs_access_add_lock_token(access, "opaquelocktoken:abc"));
+ SVN_ERR(svn_fs_set_access(svn_repos_fs(repos), access));
+
+ /* Commit a new revision. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit2(&txn, repos, 0,
+ apr_hash_make(pool), pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(root, "/whatever", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(&conflict, repos, &new_rev, txn, pool));
+
+ SVN_TEST_STRING_ASSERT(conflict, NULL);
+ SVN_TEST_ASSERT(new_rev == 1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+mkdir_delete_copy(svn_repos_t *repos,
+ const char *src,
+ const char *dst,
+ apr_pool_t *pool)
+{
+ svn_fs_t *fs = svn_repos_fs(repos);
+ svn_revnum_t youngest_rev;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A/T", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_delete(txn_root, "A/T", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev - 1, pool));
+ SVN_ERR(svn_fs_copy(rev_root, src, txn_root, dst, pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+
+ return SVN_NO_ERROR;
+}
+
+struct authz_read_baton_t {
+ apr_hash_t *paths;
+ apr_pool_t *pool;
+ const char *deny;
+};
+
+static svn_error_t *
+authz_read_func(svn_boolean_t *allowed,
+ svn_fs_root_t *root,
+ const char *path,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct authz_read_baton_t *b = baton;
+
+ if (b->deny && !strcmp(b->deny, path))
+ *allowed = FALSE;
+ else
+ *allowed = TRUE;
+
+ svn_hash_sets(b->paths, apr_pstrdup(b->pool, path), (void*)1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_locations(apr_hash_t *actual,
+ apr_hash_t *expected,
+ apr_hash_t *checked,
+ apr_pool_t *pool)
+{
+ apr_hash_index_t *hi;
+
+ for (hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi))
+ {
+ const svn_revnum_t *rev = apr_hash_this_key(hi);
+ const char *path = apr_hash_get(actual, rev, sizeof(*rev));
+
+ if (!path)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected %s for %d found (null)",
+ (char*)apr_hash_this_val(hi), (int)*rev);
+ else if (strcmp(path, apr_hash_this_val(hi)))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected %s for %d found %s",
+ (char*)apr_hash_this_val(hi), (int)*rev, path);
+
+ }
+
+ for (hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi))
+ {
+ const svn_revnum_t *rev = apr_hash_this_key(hi);
+ const char *path = apr_hash_get(expected, rev, sizeof(*rev));
+
+ if (!path)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "found %s for %d expected (null)",
+ (char*)apr_hash_this_val(hi), (int)*rev);
+ else if (strcmp(path, apr_hash_this_val(hi)))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "found %s for %d expected %s",
+ (char*)apr_hash_this_val(hi), (int)*rev, path);
+
+ if (!svn_hash_gets(checked, path))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "did not check %s", path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static void
+set_expected(apr_hash_t *expected,
+ svn_revnum_t rev,
+ const char *path,
+ apr_pool_t *pool)
+{
+ svn_revnum_t *rp = apr_palloc(pool, sizeof(svn_revnum_t));
+ *rp = rev;
+ apr_hash_set(expected, rp, sizeof(*rp), path);
+}
+
+static svn_error_t *
+trace_node_locations_authz(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_revnum_t youngest_rev = 0;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ struct authz_read_baton_t arb;
+ apr_array_header_t *revs = apr_array_make(pool, 10, sizeof(svn_revnum_t));
+ apr_hash_t *locations;
+ apr_hash_t *expected = apr_hash_make(pool);
+ int i;
+
+ /* Create test repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-trace-node-locations-authz",
+ opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* r1 create A */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "A", pool));
+ SVN_ERR(svn_fs_make_file(txn_root, "A/f", pool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "A/f", "foobar", pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+
+ /* r4 copy A to B */
+ SVN_ERR(mkdir_delete_copy(repos, "A", "B", pool));
+
+ /* r7 copy B to C */
+ SVN_ERR(mkdir_delete_copy(repos, "B", "C", pool));
+
+ /* r10 copy C to D */
+ SVN_ERR(mkdir_delete_copy(repos, "C", "D", pool));
+
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+ SVN_ERR_ASSERT(youngest_rev == 10);
+
+ arb.paths = apr_hash_make(pool);
+ arb.pool = pool;
+ arb.deny = NULL;
+
+ apr_array_clear(revs);
+ for (i = 0; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ set_expected(expected, 10, "/D/f", pool);
+ set_expected(expected, 8, "/C/f", pool);
+ set_expected(expected, 7, "/C/f", pool);
+ set_expected(expected, 5, "/B/f", pool);
+ set_expected(expected, 4, "/B/f", pool);
+ set_expected(expected, 2, "/A/f", pool);
+ set_expected(expected, 1, "/A/f", pool);
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ apr_array_clear(revs);
+ for (i = 1; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ apr_array_clear(revs);
+ for (i = 2; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ set_expected(expected, 1, NULL, pool);
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ apr_array_clear(revs);
+ for (i = 3; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ set_expected(expected, 2, NULL, pool);
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ apr_array_clear(revs);
+ for (i = 6; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ set_expected(expected, 5, NULL, pool);
+ set_expected(expected, 4, NULL, pool);
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ arb.deny = "/B/f";
+ apr_array_clear(revs);
+ for (i = 0; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ apr_array_clear(revs);
+ for (i = 6; i <= youngest_rev; ++i)
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = i;
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ APR_ARRAY_PUSH(revs, svn_revnum_t) = 0;
+ apr_hash_clear(arb.paths);
+ SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs,
+ authz_read_func, &arb, pool));
+ SVN_ERR(verify_locations(locations, expected, arb.paths, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_aborted_txn(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root;
+ const char *conflict;
+ svn_revnum_t new_rev;
+ svn_revnum_t youngest_rev;
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-commit-aborted-txn",
+ opts, pool));
+
+ /* Create and abort the transaction. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit2(&txn, repos, 0,
+ apr_hash_make(pool), pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "/A", pool));
+ SVN_ERR(svn_fs_abort_txn(txn, pool));
+
+ /* Committing the aborted transaction should fail. */
+ SVN_TEST_ASSERT_ANY_ERROR(svn_repos_fs_commit_txn(&conflict, repos,
+ &new_rev, txn, pool));
+
+ /* Ensure that output arguments follow svn_repos_fs_commit_txn()'s
+ contract -- NEW_REV should be set to SVN_INVALID_REVNUM and
+ CONFLICT should be NULL. */
+ SVN_TEST_ASSERT(new_rev == SVN_INVALID_REVNUM);
+ SVN_TEST_ASSERT(conflict == NULL);
+
+ /* Re-open repository and verify that it's still empty. */
+ SVN_ERR(svn_repos_open3(&repos, svn_repos_path(repos, pool), NULL,
+ pool, pool));
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, svn_repos_fs(repos), pool));
+ SVN_TEST_ASSERT(youngest_rev == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+list_callback(const char *path,
+ svn_dirent_t *dirent,
+ void *baton,
+ apr_pool_t *pool)
+{
+ *(int *)baton += 1;
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_list(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *rev_root;
+ svn_revnum_t youngest_rev;
+ int counter = 0;
+ apr_array_header_t *patterns;
+
+ /* Create yet another greek tree repository. */
+ SVN_ERR(svn_test__create_repos(&repos, "test-repo-list", opts, pool));
+ fs = svn_repos_fs(repos);
+
+ /* Prepare a txn to receive the greek tree. */
+ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool));
+ SVN_ERR(svn_test__create_greek_tree(txn_root, pool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ /* List all nodes under /A that contain an 'a'. */
+
+ patterns = apr_array_make(pool, 1, sizeof(const char *));
+ APR_ARRAY_PUSH(patterns, const char *) = "*a*";
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool));
+ SVN_ERR(svn_repos_list(rev_root, "/A", patterns, svn_depth_infinity, FALSE,
+ NULL, NULL, list_callback, &counter, NULL, NULL,
+ pool));
+ SVN_TEST_ASSERT(counter == 6);
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(dir_deltas,
+ "test svn_repos_dir_delta2"),
+ SVN_TEST_OPTS_PASS(node_tree_delete_under_copy,
+ "test deletions under copies in node_tree code"),
+ SVN_TEST_OPTS_PASS(revisions_changed,
+ "test svn_repos_history() (partially)"),
+ SVN_TEST_OPTS_PASS(node_locations,
+ "test svn_repos_node_locations"),
+ SVN_TEST_OPTS_PASS(node_locations2,
+ "test svn_repos_node_locations some more"),
+ SVN_TEST_OPTS_PASS(rmlocks,
+ "test removal of defunct locks"),
+ SVN_TEST_PASS2(authz,
+ "test authz access control"),
+ SVN_TEST_OPTS_PASS(in_repo_authz,
+ "test authz stored in the repo"),
+ SVN_TEST_OPTS_PASS(in_repo_groups_authz,
+ "test authz and global groups stored in the repo"),
+ SVN_TEST_OPTS_PASS(groups_authz,
+ "test authz with global groups"),
+ SVN_TEST_OPTS_PASS(commit_editor_authz,
+ "test authz in the commit editor"),
+ SVN_TEST_OPTS_PASS(commit_continue_txn,
+ "test commit with explicit txn"),
+ SVN_TEST_OPTS_PASS(node_location_segments,
+ "test svn_repos_node_location_segments"),
+ SVN_TEST_OPTS_PASS(reporter_depth_exclude,
+ "test reporter and svn_depth_exclude"),
+ SVN_TEST_OPTS_PASS(prop_validation,
+ "test if revprops are validated by repos"),
+ SVN_TEST_OPTS_PASS(get_logs,
+ "test svn_repos_get_logs ranges and limits"),
+ SVN_TEST_OPTS_PASS(test_get_file_revs,
+ "test svn_repos_get_file_revsN"),
+ SVN_TEST_OPTS_PASS(issue_4060,
+ "test issue 4060"),
+ SVN_TEST_OPTS_PASS(test_delete_repos,
+ "test svn_repos_delete"),
+ SVN_TEST_OPTS_PASS(filename_with_control_chars,
+ "test filenames with control characters"),
+ SVN_TEST_OPTS_PASS(test_repos_info,
+ "test svn_repos_info_*"),
+ SVN_TEST_OPTS_PASS(test_config_pool,
+ "test svn_repos__config_pool_*"),
+ SVN_TEST_OPTS_PASS(test_repos_fs_type,
+ "test test_repos_fs_type"),
+ SVN_TEST_OPTS_PASS(deprecated_access_context_api,
+ "test deprecated access context api"),
+ SVN_TEST_OPTS_PASS(trace_node_locations_authz,
+ "authz for svn_repos_trace_node_locations"),
+ SVN_TEST_OPTS_PASS(commit_aborted_txn,
+ "test committing a previously aborted txn"),
+ SVN_TEST_PASS2(test_authz_prefixes,
+ "test authz prefixes"),
+ SVN_TEST_PASS2(test_authz_recursive_override,
+ "test recursively authz rule override"),
+ SVN_TEST_PASS2(test_authz_pattern_tests,
+ "test various basic authz pattern combinations"),
+ SVN_TEST_PASS2(test_authz_wildcards,
+ "test the different types of authz wildcards"),
+ SVN_TEST_SKIP2(test_authz_wildcard_performance, TRUE,
+ "optional authz wildcard performance test"),
+ SVN_TEST_OPTS_PASS(test_list,
+ "test svn_repos_list"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/auth-test.c b/subversion/tests/libsvn_subr/auth-test.c
new file mode 100644
index 0000000..c184442
--- /dev/null
+++ b/subversion/tests/libsvn_subr/auth-test.c
@@ -0,0 +1,479 @@
+/*
+ * auth-test.c -- test the auth functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_auth.h"
+#include "svn_dirent_uri.h"
+#include "svn_private_config.h"
+
+#include "../svn_test.h"
+#include "private/svn_auth_private.h"
+
+static svn_error_t *
+test_platform_specific_auth_providers(apr_pool_t *pool)
+{
+ apr_array_header_t *providers;
+ svn_auth_provider_object_t *provider;
+ int number_of_providers = 0;
+
+ /* Test non-available auth provider */
+ SVN_ERR(svn_auth_get_platform_specific_provider(&provider, "fake", "fake",
+ pool));
+
+ if (provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('fake', 'fake') should " \
+ "return NULL");
+
+ /* Make sure you get appropriate number of providers when retrieving
+ all auth providers */
+ SVN_ERR(svn_auth_get_platform_specific_client_providers(&providers, NULL,
+ pool));
+
+#if defined(SVN_HAVE_GNOME_KEYRING) || defined(SVN_HAVE_LIBSECRET)
+ number_of_providers += 2;
+#endif
+#ifdef SVN_HAVE_KWALLET
+ number_of_providers += 2;
+#endif
+#ifdef SVN_HAVE_GPG_AGENT
+ number_of_providers += 1;
+#endif
+#ifdef SVN_HAVE_KEYCHAIN_SERVICES
+ number_of_providers += 2;
+#endif
+#if defined(WIN32) && !defined(__MINGW32__)
+ number_of_providers += 4;
+#endif
+ if (providers->nelts != number_of_providers)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_client_providers should return " \
+ "an array of %d providers, but returned %d providers",
+ number_of_providers, providers->nelts);
+
+ /* Test Keychain auth providers */
+#ifdef SVN_HAVE_KEYCHAIN_SERVICES
+ svn_auth_get_platform_specific_provider(&provider, "keychain",
+ "simple", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('keychain', 'simple') "
+ "should not return NULL");
+
+ svn_auth_get_platform_specific_provider(&provider, "keychain",
+ "ssl_client_cert_pw", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('keychain', " \
+ "'ssl_client_cert_pw') should not return NULL");
+
+ /* Make sure you do not get a Windows auth provider */
+ svn_auth_get_platform_specific_provider(&provider, "windows",
+ "simple", pool);
+
+ if (provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', 'simple') should " \
+ "return NULL");
+#endif
+
+ /* Test Windows auth providers */
+#if defined(WIN32) && !defined(__MINGW32__)
+ svn_auth_get_platform_specific_provider(&provider, "windows",
+ "simple", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', 'simple') "
+ "should not return NULL");
+
+
+ svn_auth_get_platform_specific_provider(&provider, "windows",
+ "ssl_client_cert_pw", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', "
+ "'ssl_client_cert_pw') should not return NULL");
+
+ svn_auth_get_platform_specific_provider(&provider, "windows",
+ "ssl_server_trust", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', "
+ "'ssl_server_trust') should not return NULL");
+
+ /* Make sure you do not get a Keychain auth provider */
+ svn_auth_get_platform_specific_provider(&provider, "keychain",
+ "simple", pool);
+
+ if (provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('keychain', 'simple') should " \
+ "return NULL");
+#endif
+
+ /* Test GNOME Keyring auth providers */
+#if defined(SVN_HAVE_GNOME_KEYRING) || defined(SVN_HAVE_LIBSECRET)
+ SVN_ERR(svn_auth_get_platform_specific_provider(&provider, "gnome_keyring",
+ "simple", pool));
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('gnome_keyring', 'simple') "
+ "should not return NULL");
+
+ SVN_ERR(svn_auth_get_platform_specific_provider(&provider, "gnome_keyring",
+ "ssl_client_cert_pw", pool));
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('gnome_keyring', " \
+ "'ssl_client_cert_pw') should not return NULL");
+
+ /* Make sure you do not get a Windows auth provider */
+ SVN_ERR(svn_auth_get_platform_specific_provider(&provider, "windows",
+ "simple", pool));
+
+ if (provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', 'simple') should " \
+ "return NULL");
+#endif
+
+ /* Test KWallet auth providers */
+#ifdef SVN_HAVE_KWALLET
+ svn_auth_get_platform_specific_provider(&provider, "kwallet",
+ "simple", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('kwallet', 'simple') "
+ "should not return NULL");
+
+ svn_auth_get_platform_specific_provider(&provider, "kwallet",
+ "ssl_client_cert_pw", pool);
+
+ if (!provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('kwallet', " \
+ "'ssl_client_cert_pw') should not return NULL");
+
+ /* Make sure you do not get a Windows auth provider */
+ svn_auth_get_platform_specific_provider(&provider, "windows",
+ "simple", pool);
+
+ if (provider)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_auth_get_platform_specific_provider('windows', 'simple') should " \
+ "return NULL");
+#endif
+
+ return SVN_NO_ERROR;
+}
+
+/* Helper for test_auth_clear(). Implements svn_config_auth_walk_func_t */
+static svn_error_t *
+cleanup_callback(svn_boolean_t *delete_cred,
+ void *walk_baton,
+ const char *cred_kind,
+ const char *realmstring,
+ apr_hash_t *cred_hash,
+ apr_pool_t *scratch_pool)
+{
+ svn_auth_baton_t *b = walk_baton;
+
+ SVN_TEST_STRING_ASSERT(cred_kind, SVN_AUTH_CRED_SIMPLE);
+ SVN_TEST_STRING_ASSERT(realmstring, "<http://my.host> My realm");
+
+ SVN_ERR(svn_auth_forget_credentials(b, cred_kind, realmstring, scratch_pool));
+
+ *delete_cred = TRUE;
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_auth_clear(apr_pool_t *pool)
+{
+ const char *auth_dir;
+ svn_auth_provider_object_t *provider;
+ svn_auth_baton_t *baton;
+ apr_array_header_t *providers;
+ void *credentials;
+ svn_auth_cred_simple_t *creds;
+ svn_auth_iterstate_t *state;
+
+ SVN_ERR(svn_dirent_get_absolute(&auth_dir, "", pool));
+ auth_dir = svn_dirent_join(auth_dir, "auth-clear", pool);
+
+ svn_test_add_dir_cleanup(auth_dir);
+
+ SVN_ERR(svn_io_remove_dir2(auth_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_dir_make(auth_dir, APR_OS_DEFAULT, pool));
+
+ svn_auth_get_simple_provider2(&provider, NULL, NULL, pool);
+
+ providers = apr_array_make(pool, 1, sizeof(svn_auth_provider_object_t *));
+ APR_ARRAY_PUSH(providers, svn_auth_provider_object_t *) = provider;
+
+ svn_auth_open(&baton, providers, pool);
+
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_USERNAME, "jrandom");
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_PASSWORD, "rayjandom");
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_CONFIG_DIR, auth_dir);
+
+ /* Create the auth subdirs. Without these we can't store passwords */
+ SVN_ERR(svn_config_ensure(auth_dir, pool));
+
+ /* Obtain the default credentials just passed */
+ SVN_ERR(svn_auth_first_credentials(&credentials,
+ &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "<http://my.host> My realm",
+ baton,
+ pool));
+
+ creds = credentials;
+ SVN_TEST_STRING_ASSERT(creds->username, "jrandom");
+ SVN_TEST_ASSERT(creds->may_save);
+
+ /* And tell that they are ok and can be saved */
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+
+ /* Ok, and now we try to remove the credentials */
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_USERNAME, NULL);
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_PASSWORD, NULL);
+
+ /* Are they still in the baton? */
+ SVN_ERR(svn_auth_first_credentials(&credentials,
+ &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "<http://my.host> My realm",
+ baton,
+ pool));
+
+ SVN_TEST_ASSERT(credentials);
+ creds = credentials;
+ SVN_TEST_STRING_ASSERT(creds->username, "jrandom");
+ SVN_TEST_ASSERT(creds->may_save);
+
+ /* Use our walker function to delete credentials (and forget them
+ from the auth baton). */
+ SVN_ERR(svn_config_walk_auth_data(auth_dir, cleanup_callback, baton, pool));
+
+ /* Finally, they should be gone! */
+ SVN_ERR(svn_auth_first_credentials(&credentials,
+ &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "<http://my.host> My realm",
+ baton,
+ pool));
+
+ SVN_TEST_ASSERT(! credentials);
+
+ return SVN_NO_ERROR;
+}
+
+struct plaintext_baton_t
+{
+ int nr_calls;
+ svn_boolean_t may_save;
+};
+static svn_error_t *
+plaintext_prompt_cb(svn_boolean_t *may_save_plaintext,
+ const char *realmstring,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct plaintext_baton_t *b = baton;
+ b->nr_calls++;
+ *may_save_plaintext = b->may_save;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_save_cleartext(apr_pool_t *pool)
+{
+#ifndef SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE
+# define EXPECT_NO_CALLS 0
+# define EXPECT_ONE_CALL 1
+# define EXPECT_TWO_CALLS 2
+#else
+# define EXPECT_NO_CALLS 0
+# define EXPECT_ONE_CALL 0
+# define EXPECT_TWO_CALLS 0
+#endif
+
+ const char *auth_dir;
+ svn_auth_baton_t *baton, *slave;
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers;
+ void *credentials;
+ svn_auth_iterstate_t *state;
+ struct plaintext_baton_t pb = {0, FALSE};
+
+ SVN_ERR(svn_dirent_get_absolute(&auth_dir, "save-cleartext", pool));
+
+ SVN_ERR(svn_io_remove_dir2(auth_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_dir_make(auth_dir, APR_OS_DEFAULT, pool));
+ svn_test_add_dir_cleanup(auth_dir);
+
+ svn_auth_get_simple_provider2(&provider, plaintext_prompt_cb, &pb, pool);
+
+ providers = apr_array_make(pool, 1, sizeof(svn_auth_provider_object_t *));
+ APR_ARRAY_PUSH(providers, svn_auth_provider_object_t *) = provider;
+
+ svn_auth_open(&baton, providers, pool);
+
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_USERNAME, "jrandom");
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_DEFAULT_PASSWORD, "rayjandom");
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_CONFIG_DIR, auth_dir);
+
+ /* Create the auth subdirs. Without these we can't store passwords */
+ SVN_ERR(svn_config_ensure(auth_dir, pool));
+ pb.nr_calls = 0;
+
+ /* Legacy behavior: Don't ask: Save */
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-1", baton, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_NO_CALLS);
+
+ /* Set to ask */
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_ASK);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-2", baton, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_ONE_CALL);
+
+ /* Set to true */
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_TRUE);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-3", baton, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_ONE_CALL);
+
+ /* Set to false */
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_FALSE);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-4", baton, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_ONE_CALL);
+
+ /* Reset baton...*/
+ svn_auth_set_parameter(baton, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ NULL);
+ pb.nr_calls = 0;
+
+ SVN_ERR(svn_auth__make_session_auth(&slave, baton, NULL, "dummy",
+ pool, pool));
+
+
+ /* Standard behavior after make session auth: */
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-1a", slave, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_ONE_CALL);
+
+ /* Set to ask */
+ svn_auth_set_parameter(slave, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_ASK);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-2a", slave, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_TWO_CALLS);
+
+ /* Set to true */
+ svn_auth_set_parameter(slave, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_TRUE);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-3a", slave, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_TWO_CALLS);
+
+ /* Set to false */
+ svn_auth_set_parameter(slave, SVN_AUTH_PARAM_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_FALSE);
+ SVN_ERR(svn_auth_first_credentials(&credentials, &state,
+ SVN_AUTH_CRED_SIMPLE,
+ "realm-4a", slave, pool));
+ SVN_TEST_ASSERT(credentials != NULL);
+ SVN_ERR(svn_auth_save_credentials(state, pool));
+ SVN_TEST_ASSERT(pb.nr_calls == EXPECT_TWO_CALLS);
+
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_platform_specific_auth_providers,
+ "test retrieving platform-specific auth providers"),
+ SVN_TEST_PASS2(test_auth_clear,
+ "test svn_auth_clear()"),
+ SVN_TEST_PASS2(test_save_cleartext,
+ "test save cleartext info"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/bit-array-test.c b/subversion/tests/libsvn_subr/bit-array-test.c
new file mode 100644
index 0000000..501f622
--- /dev/null
+++ b/subversion/tests/libsvn_subr/bit-array-test.c
@@ -0,0 +1,168 @@
+/*
+ * bit-array-test.c: a collection of svn_bit_array__* tests
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_error.h"
+#include "svn_string.h" /* This includes <apr_*.h> */
+#include "private/svn_subr_private.h"
+
+static svn_error_t *
+test_zero_defaults(apr_pool_t *pool)
+{
+ svn_bit_array__t *array = svn_bit_array__create(0, pool);
+
+ /* Test (default) allocation boundaries */
+ SVN_TEST_ASSERT(svn_bit_array__get(array, 0x7ffff) == 0);
+ SVN_TEST_ASSERT(svn_bit_array__get(array, 0x80000) == 0);
+
+ /* Test address boundaries */
+ SVN_TEST_ASSERT(svn_bit_array__get(array, 0) == 0);
+ SVN_TEST_ASSERT(svn_bit_array__get(array, APR_SIZE_MAX) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_get_set(apr_pool_t *pool)
+{
+ svn_bit_array__t *array = svn_bit_array__create(0, pool);
+ apr_size_t i, min = 0x7ff00, max = 0x7ff00 + 1025;
+
+ /* All values default to 0. */
+ for (i = min; i < max; ++i)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == 0);
+
+ /* Create a pattern, setting every other bit. Array will also auto-grow. */
+ for (i = min; i < max; ++i)
+ if (i % 2)
+ svn_bit_array__set(array, i, 1);
+
+ /* Verify pattern */
+ for (i = min; i < max; ++i)
+ {
+ if (i % 2)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == TRUE);
+ else
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == FALSE);
+ }
+
+ /* Zero the zeros in the pattern -> should be no change. */
+ for (i = min; i < max; ++i)
+ if (i % 2 == 0)
+ svn_bit_array__set(array, i, 0);
+
+ /* Verify pattern */
+ for (i = min; i < max; ++i)
+ {
+ if (i % 2)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == TRUE);
+ else
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == FALSE);
+ }
+
+ /* Write an inverted pattern while verifying the old one. */
+ for (i = min; i < max; ++i)
+ {
+ if (i % 2)
+ {
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == TRUE);
+ svn_bit_array__set(array, i, FALSE);
+ }
+ else
+ {
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == FALSE);
+ svn_bit_array__set(array, i, TRUE);
+ }
+ }
+
+ /* Verify pattern */
+ for (i = min; i < max; ++i)
+ {
+ if (i % 2)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == FALSE);
+ else
+ SVN_TEST_ASSERT(svn_bit_array__get(array, i) == TRUE);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_sparse(apr_pool_t *pool)
+{
+ svn_bit_array__t *array = svn_bit_array__create(0, pool);
+ apr_size_t i, k, min = 0x7ff00, max = 0x7ff00 + 1025, SCALE = 0x10000000;
+
+ /* All values default to 0. */
+ for (i = 0; i < 15; ++i)
+ for (k = i * SCALE + min; k < i * SCALE + max; ++k)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, k) == 0);
+
+ /* Create a pattern, setting every other bit. Array will also auto-grow. */
+ for (i = 0; i < 15; ++i)
+ for (k = i * SCALE + min; k < i * SCALE + max; ++k)
+ if (k % 2)
+ svn_bit_array__set(array, k, 1);
+
+ /* Verify pattern */
+ for (i = 0; i < 15; ++i)
+ for (k = i * SCALE + min; k < i * SCALE + max; ++k)
+ {
+ if (k % 2)
+ SVN_TEST_ASSERT(svn_bit_array__get(array, k) == TRUE);
+ else
+ SVN_TEST_ASSERT(svn_bit_array__get(array, k) == FALSE);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_zero_defaults,
+ "check entries to default to zero"),
+ SVN_TEST_PASS2(test_get_set,
+ "get / set entries"),
+ SVN_TEST_PASS2(test_sparse,
+ "get / set sparse entries"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/cache-test.c b/subversion/tests/libsvn_subr/cache-test.c
new file mode 100644
index 0000000..dabdf0b
--- /dev/null
+++ b/subversion/tests/libsvn_subr/cache-test.c
@@ -0,0 +1,622 @@
+/*
+ * cache-test.c -- test the in-memory cache
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+#include <apr_lib.h>
+#include <apr_time.h>
+
+#include "svn_pools.h"
+
+#include "private/svn_cache.h"
+#include "svn_private_config.h"
+
+#include "../svn_test.h"
+
+/* Create memcached cache if configured */
+static svn_error_t *
+create_memcache(svn_memcache_t **memcache,
+ const svn_test_opts_t *opts,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_config_t *config = NULL;
+ if (opts->config_file)
+ {
+ SVN_ERR(svn_config_read3(&config, opts->config_file,
+ TRUE, FALSE, FALSE, scratch_pool));
+ }
+ else if (opts->memcached_server)
+ {
+ SVN_ERR(svn_config_create2(&config, FALSE, FALSE, scratch_pool));
+
+ svn_config_set(config, SVN_CACHE_CONFIG_CATEGORY_MEMCACHED_SERVERS,
+ "key" /* some value; ignored*/,
+ opts->memcached_server);
+ }
+
+ if (config)
+ {
+ SVN_ERR(svn_cache__make_memcache_from_config(memcache, config,
+ result_pool, scratch_pool));
+ }
+ else
+ *memcache = NULL;
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_cache__serialize_func_t */
+static svn_error_t *
+serialize_revnum(void **data,
+ apr_size_t *data_len,
+ void *in,
+ apr_pool_t *pool)
+{
+ *data_len = sizeof(svn_revnum_t);
+ *data = apr_pmemdup(pool, in, *data_len);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Implements svn_cache__deserialize_func_t */
+static svn_error_t *
+deserialize_revnum(void **out,
+ void *data,
+ apr_size_t data_len,
+ apr_pool_t *pool)
+{
+ const svn_revnum_t *in_rev = (const svn_revnum_t *) data;
+ svn_revnum_t *out_rev;
+
+ if (data_len != sizeof(*in_rev))
+ return svn_error_create(SVN_ERR_REVNUM_PARSE_FAILURE, NULL,
+ _("Bad size for revision number in cache"));
+ out_rev = apr_palloc(pool, sizeof(*out_rev));
+ *out_rev = *in_rev;
+ *out = out_rev;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+basic_cache_test(svn_cache__t *cache,
+ svn_boolean_t size_is_one,
+ apr_pool_t *pool)
+{
+ svn_boolean_t found;
+ svn_revnum_t twenty = 20, thirty = 30, *answer;
+ apr_pool_t *subpool;
+
+ /* We use a subpool for all calls in this test and aggressively
+ * clear it, to try to find any bugs where the cached values aren't
+ * actually saved away in the cache's pools. */
+ subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, "twenty", subpool));
+ if (found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache found an entry that wasn't there");
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_cache__set(cache, "twenty", &twenty, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, "twenty", subpool));
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for 'twenty'");
+ if (*answer != 20)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 20 but found '%ld'", *answer);
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_cache__set(cache, "thirty", &thirty, subpool));
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, "thirty", subpool));
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for 'thirty'");
+ if (*answer != 30)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 30 but found '%ld'", *answer);
+
+ if (size_is_one)
+ {
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, "twenty", subpool));
+ if (found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache found entry for 'twenty' that should have "
+ "expired");
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_inprocess_cache_basic(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_inprocess(&cache,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ 1,
+ 1,
+ TRUE,
+ "",
+ pool));
+
+ return basic_cache_test(cache, TRUE, pool);
+}
+
+static svn_error_t *
+test_memcache_basic(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_memcache_t *memcache = NULL;
+ const char *prefix = apr_psprintf(pool,
+ "test_memcache_basic-%" APR_TIME_T_FMT,
+ apr_time_now());
+
+ SVN_ERR(create_memcache(&memcache, opts, pool, pool));
+ if (! memcache)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "not configured to use memcached");
+
+
+ /* Create a memcache-based cache. */
+ SVN_ERR(svn_cache__create_memcache(&cache,
+ memcache,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ prefix,
+ pool));
+
+ return basic_cache_test(cache, FALSE, pool);
+}
+
+static svn_error_t *
+test_membuffer_cache_basic(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ FALSE,
+ pool, pool));
+
+ return basic_cache_test(cache, FALSE, pool);
+}
+
+/* Implements svn_cache__deserialize_func_t */
+static svn_error_t *
+raise_error_deserialize_func(void **out,
+ void *data,
+ apr_size_t data_len,
+ apr_pool_t *pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+/* Implements svn_cache__partial_getter_func_t */
+static svn_error_t *
+raise_error_partial_getter_func(void **out,
+ const void *data,
+ apr_size_t data_len,
+ void *baton,
+ apr_pool_t *result_pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+/* Implements svn_cache__partial_setter_func_t */
+static svn_error_t *
+raise_error_partial_setter_func(void **data,
+ apr_size_t *data_len,
+ void *baton,
+ apr_pool_t *result_pool)
+{
+ return svn_error_create(APR_EGENERAL, NULL, NULL);
+}
+
+static svn_error_t *
+test_membuffer_serializer_error_handling(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+ svn_revnum_t twenty = 20;
+ svn_boolean_t found;
+ void *val;
+
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ raise_error_deserialize_func,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ FALSE,
+ pool, pool));
+
+ SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool));
+
+ /* Test retrieving data from cache using full getter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__get(&val, &found, cache, "twenty", pool),
+ APR_EGENERAL);
+
+ /* Test retrieving data from cache using partial getter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__get_partial(&val, &found, cache, "twenty",
+ raise_error_partial_getter_func,
+ NULL, pool),
+ APR_EGENERAL);
+
+ /* Create a new cache. */
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ FALSE,
+ pool, pool));
+
+ /* Store one entry in cache. */
+ SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool));
+
+ /* Test setting data in cache using partial setter that
+ always raises an error. */
+ SVN_TEST_ASSERT_ERROR(
+ svn_cache__set_partial(cache, "twenty",
+ raise_error_partial_setter_func,
+ NULL, pool),
+ APR_EGENERAL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_memcache_long_key(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_memcache_t *memcache = NULL;
+ svn_revnum_t fifty = 50, *answer;
+ svn_boolean_t found = FALSE;
+ const char *prefix = apr_psprintf(pool,
+ "test_memcache_long_key-%" APR_TIME_T_FMT,
+ apr_time_now());
+ static const char *long_key =
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 50 */
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 100 */
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 150 */
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 200 */
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 250 */
+ "0123456789" "0123456789" "0123456789" "0123456789" "0123456789" /* 300 */
+ ;
+
+ SVN_ERR(create_memcache(&memcache, opts, pool, pool));
+
+ if (! memcache)
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "not configured to use memcached");
+
+
+ /* Create a memcache-based cache. */
+ SVN_ERR(svn_cache__create_memcache(&cache,
+ memcache,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ prefix,
+ pool));
+
+ SVN_ERR(svn_cache__set(cache, long_key, &fifty, pool));
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, long_key, pool));
+
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for 'fifty'");
+ if (*answer != 50)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 50 but found '%ld'", *answer);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_membuffer_cache_clearing(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+ svn_boolean_t found;
+ svn_revnum_t *value;
+ svn_revnum_t valueA = 12345;
+ svn_revnum_t valueB = 67890;
+
+ /* Create a simple cache for strings, keyed by strings. */
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+ SVN_ERR(svn_cache__create_membuffer_cache(&cache,
+ membuffer,
+ serialize_revnum,
+ deserialize_revnum,
+ APR_HASH_KEY_STRING,
+ "cache:",
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY,
+ FALSE,
+ FALSE,
+ pool, pool));
+
+ /* Initially, the cache is empty. */
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool));
+ SVN_TEST_ASSERT(!found);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool));
+ SVN_TEST_ASSERT(!found);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool));
+ SVN_TEST_ASSERT(!found);
+
+ /* Add entries. */
+ SVN_ERR(svn_cache__set(cache, "key A", &valueA, pool));
+ SVN_ERR(svn_cache__set(cache, "key B", &valueB, pool));
+
+ /* Added entries should be cached (too small to get evicted already). */
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool));
+ SVN_TEST_ASSERT(found);
+ SVN_TEST_ASSERT(*value == valueA);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool));
+ SVN_TEST_ASSERT(found);
+ SVN_TEST_ASSERT(*value == valueB);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool));
+ SVN_TEST_ASSERT(!found);
+
+ /* Clear the cache. */
+ SVN_ERR(svn_cache__membuffer_clear(membuffer));
+
+ /* The cache is empty again. */
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool));
+ SVN_TEST_ASSERT(!found);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool));
+ SVN_TEST_ASSERT(!found);
+ SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool));
+ SVN_TEST_ASSERT(!found);
+
+ /* But still functional: */
+ SVN_ERR(svn_cache__set(cache, "key B", &valueB, pool));
+ SVN_ERR(svn_cache__has_key(&found, cache, "key A", pool));
+ SVN_TEST_ASSERT(!found);
+ SVN_ERR(svn_cache__has_key(&found, cache, "key B", pool));
+ SVN_TEST_ASSERT(found);
+ SVN_ERR(svn_cache__has_key(&found, cache, "key C", pool));
+ SVN_TEST_ASSERT(!found);
+
+ return SVN_NO_ERROR;
+}
+
+/* Implements svn_iter_apr_hash_cb_t. */
+static svn_error_t *
+null_cache_iter_func(void *baton,
+ const void *key,
+ apr_ssize_t klen,
+ void *val,
+ apr_pool_t *pool)
+{
+ /* shall never be called */
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "should not be called");
+}
+
+static svn_error_t *
+test_null_cache(apr_pool_t *pool)
+{
+ svn_boolean_t found, done;
+ int *data = NULL;
+ svn_cache__info_t info;
+
+ svn_cache__t *cache;
+ SVN_ERR(svn_cache__create_null(&cache, "test-dummy", pool));
+
+ /* Can't cache anything. */
+ SVN_TEST_ASSERT(svn_cache__is_cachable(cache, 0) == FALSE);
+ SVN_TEST_ASSERT(svn_cache__is_cachable(cache, 1) == FALSE);
+
+ /* No point in adding data. */
+ SVN_ERR(svn_cache__set(cache, "data", &data, pool));
+ SVN_ERR(svn_cache__get((void **)&data, &found, cache, "data", pool));
+ SVN_TEST_ASSERT(found == FALSE);
+
+ SVN_ERR(svn_cache__has_key(&found, cache, "data", pool));
+ SVN_TEST_ASSERT(found == FALSE);
+
+ /* Iteration "works" but is a no-op. */
+ SVN_ERR(svn_cache__iter(&done, cache, null_cache_iter_func, NULL, pool));
+ SVN_TEST_ASSERT(done);
+
+ /* It shall know its name. */
+ SVN_ERR(svn_cache__get_info(cache, &info, TRUE, pool));
+ SVN_TEST_STRING_ASSERT(info.id, "test-dummy");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_membuffer_unaligned_string_keys(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+ svn_revnum_t fifty = 50;
+ svn_revnum_t *answer;
+ svn_boolean_t found = FALSE;
+
+ /* Allocate explicitly to have aligned string and this add one
+ * to have unaligned string.*/
+ const char *aligned_key = apr_pstrdup(pool, "fifty");
+ const char *unaligned_key = apr_pstrdup(pool, "_fifty") + 1;
+ const char *unaligned_prefix = apr_pstrdup(pool, "_cache:") + 1;
+
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_membuffer_cache(
+ &cache, membuffer, serialize_revnum, deserialize_revnum,
+ APR_HASH_KEY_STRING, unaligned_prefix,
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, FALSE, FALSE,
+ pool, pool));
+
+ SVN_ERR(svn_cache__set(cache, unaligned_key, &fifty, pool));
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, unaligned_key,
+ pool));
+
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for 'fifty'");
+ if (*answer != 50)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 50 but found '%ld'", *answer);
+
+ /* Make sure that we get proper result when providing aligned key*/
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, aligned_key,
+ pool));
+
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for 'fifty'");
+ if (*answer != 50)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 50 but found '%ld'", *answer);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_membuffer_unaligned_fixed_keys(apr_pool_t *pool)
+{
+ svn_cache__t *cache;
+ svn_membuffer_t *membuffer;
+ svn_revnum_t fifty = 50;
+ svn_revnum_t *answer;
+ svn_boolean_t found = FALSE;
+
+ /* Allocate explicitly to have aligned string and this add one
+ * to have unaligned key.*/
+ const char *aligned_key = apr_pstrdup(pool, "12345678");
+ const char *unaligned_key = apr_pstrdup(pool, "_12345678") + 1;
+ const char *unaligned_prefix = apr_pstrdup(pool, "_cache:") + 1;
+
+ SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0,
+ TRUE, TRUE, pool));
+
+ /* Create a cache with just one entry. */
+ SVN_ERR(svn_cache__create_membuffer_cache(
+ &cache, membuffer, serialize_revnum, deserialize_revnum,
+ 8 /* klen*/,
+ unaligned_prefix,
+ SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, FALSE, FALSE,
+ pool, pool));
+
+ SVN_ERR(svn_cache__set(cache, unaligned_key, &fifty, pool));
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, unaligned_key,
+ pool));
+
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for '12345678' (unaligned)");
+ if (*answer != 50)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 50 but found '%ld'", *answer);
+
+ /* Make sure that we get proper result when providing aligned key*/
+ SVN_ERR(svn_cache__get((void **) &answer, &found, cache, aligned_key,
+ pool));
+
+ if (! found)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "cache failed to find entry for '12345678' (aligned)");
+ if (*answer != 50)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected 50 but found '%ld'", *answer);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_inprocess_cache_basic,
+ "basic inprocess svn_cache test"),
+ SVN_TEST_OPTS_PASS(test_memcache_basic,
+ "basic memcache svn_cache test"),
+ SVN_TEST_OPTS_PASS(test_memcache_long_key,
+ "memcache svn_cache with very long keys"),
+ SVN_TEST_PASS2(test_membuffer_cache_basic,
+ "basic membuffer svn_cache test"),
+ SVN_TEST_PASS2(test_membuffer_serializer_error_handling,
+ "test for error handling in membuffer svn_cache"),
+ SVN_TEST_PASS2(test_membuffer_cache_clearing,
+ "test clearing a membuffer svn_cache"),
+ SVN_TEST_PASS2(test_null_cache,
+ "basic null svn_cache test"),
+ SVN_TEST_PASS2(test_membuffer_unaligned_string_keys,
+ "test membuffer cache with unaligned string keys"),
+ SVN_TEST_PASS2(test_membuffer_unaligned_fixed_keys,
+ "test membuffer cache with unaligned fixed keys"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/checksum-test.c b/subversion/tests/libsvn_subr/checksum-test.c
new file mode 100644
index 0000000..164e4bf
--- /dev/null
+++ b/subversion/tests/libsvn_subr/checksum-test.c
@@ -0,0 +1,395 @@
+/*
+ * checksum-test.c: tests checksum functions.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+
+#include <zlib.h>
+
+#include "svn_error.h"
+#include "svn_io.h"
+
+#include "../svn_test.h"
+
+/* Verify that DIGEST of checksum type KIND can be parsed and
+ * converted back to a string matching DIGEST. NAME will be used
+ * to identify the type of checksum in error messages.
+ */
+static svn_error_t *
+checksum_parse_kind(const char *digest,
+ svn_checksum_kind_t kind,
+ const char *name,
+ apr_pool_t *pool)
+{
+ const char *checksum_display;
+ svn_checksum_t *checksum;
+
+ SVN_ERR(svn_checksum_parse_hex(&checksum, kind, digest, pool));
+ checksum_display = svn_checksum_to_cstring_display(checksum, pool);
+
+ if (strcmp(checksum_display, digest) != 0)
+ return svn_error_createf
+ (SVN_ERR_CHECKSUM_MISMATCH, NULL,
+ "verify-checksum: %s checksum mismatch:\n"
+ " expected: %s\n"
+ " actual: %s\n", name, digest, checksum_display);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_checksum_parse(apr_pool_t *pool)
+{
+ SVN_ERR(checksum_parse_kind("8518b76f7a45fe4de2d0955085b41f98",
+ svn_checksum_md5, "md5", pool));
+ SVN_ERR(checksum_parse_kind("74d82379bcc6771454377db03b912c2b62704139",
+ svn_checksum_sha1, "sha1", pool));
+ SVN_ERR(checksum_parse_kind("deadbeef",
+ svn_checksum_fnv1a_32, "fnv-1a", pool));
+ SVN_ERR(checksum_parse_kind("cafeaffe",
+ svn_checksum_fnv1a_32x4,
+ "modified fnv-1a", pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_checksum_empty(apr_pool_t *pool)
+{
+ svn_checksum_kind_t kind;
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ {
+ svn_checksum_t *checksum;
+ char data = '\0';
+
+ checksum = svn_checksum_empty_checksum(kind, pool);
+ SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum));
+
+ SVN_ERR(svn_checksum(&checksum, kind, &data, 0, pool));
+ SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Verify that "zero" checksums work properly for the given checksum KIND.
+ */
+static svn_error_t *
+zero_match_kind(svn_checksum_kind_t kind, apr_pool_t *pool)
+{
+ svn_checksum_t *zero;
+ svn_checksum_t *A;
+ svn_checksum_t *B;
+
+ zero = svn_checksum_create(kind, pool);
+ SVN_ERR(svn_checksum_clear(zero));
+ SVN_ERR(svn_checksum(&A, kind, "A", 1, pool));
+ SVN_ERR(svn_checksum(&B, kind, "B", 1, pool));
+
+ /* Different non-zero don't match. */
+ SVN_TEST_ASSERT(!svn_checksum_match(A, B));
+
+ /* Zero matches anything of the same kind. */
+ SVN_TEST_ASSERT(svn_checksum_match(A, zero));
+ SVN_TEST_ASSERT(svn_checksum_match(zero, B));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+zero_match(apr_pool_t *pool)
+{
+ svn_checksum_kind_t kind;
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ SVN_ERR(zero_match_kind(kind, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+zero_cross_match(apr_pool_t *pool)
+{
+ svn_checksum_kind_t i_kind;
+ svn_checksum_kind_t k_kind;
+
+ for (i_kind = svn_checksum_md5;
+ i_kind <= svn_checksum_fnv1a_32x4;
+ ++i_kind)
+ {
+ svn_checksum_t *i_zero;
+ svn_checksum_t *i_A;
+
+ i_zero = svn_checksum_create(i_kind, pool);
+ SVN_ERR(svn_checksum_clear(i_zero));
+ SVN_ERR(svn_checksum(&i_A, i_kind, "A", 1, pool));
+
+ for (k_kind = svn_checksum_md5;
+ k_kind <= svn_checksum_fnv1a_32x4;
+ ++k_kind)
+ {
+ svn_checksum_t *k_zero;
+ svn_checksum_t *k_A;
+ if (i_kind == k_kind)
+ continue;
+
+ k_zero = svn_checksum_create(k_kind, pool);
+ SVN_ERR(svn_checksum_clear(k_zero));
+ SVN_ERR(svn_checksum(&k_A, k_kind, "A", 1, pool));
+
+ /* Different non-zero don't match. */
+ SVN_TEST_ASSERT(!svn_checksum_match(i_A, k_A));
+
+ /* Zero doesn't match anything of a different kind... */
+ SVN_TEST_ASSERT(!svn_checksum_match(i_zero, k_A));
+ SVN_TEST_ASSERT(!svn_checksum_match(i_A, k_zero));
+
+ /* ...even another zero. */
+ SVN_TEST_ASSERT(!svn_checksum_match(i_zero, k_zero));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+zlib_expansion_test(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *data_path;
+ const char *srcdir;
+ svn_stringbuf_t *deflated;
+ Byte dst_buffer[256 * 1024];
+ Byte *src_buffer;
+ uInt sz;
+
+ SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool));
+ data_path = svn_dirent_join(srcdir, "zlib.deflated", pool);
+
+ SVN_ERR(svn_stringbuf_from_file2(&deflated, data_path, pool));
+ src_buffer = (Byte*)deflated->data;
+
+ /* Try to decompress the same data with different blocksizes */
+ for (sz = 1; sz < 256; sz++)
+ {
+ z_stream stream;
+ uLong crc = crc32(0, Z_NULL, 0);
+ memset(&stream, 0, sizeof(stream));
+ inflateInit2(&stream, -15 /* DEFLATE_WINDOW_SIZE */);
+
+ stream.avail_in = sz;
+ stream.next_in = src_buffer;
+ stream.avail_out = sizeof(dst_buffer);
+ stream.next_out = dst_buffer;
+
+ do
+ {
+ int zr = inflate(&stream, Z_NO_FLUSH);
+
+ if (zr != Z_OK && zr != Z_STREAM_END)
+ {
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "Failure decompressing with blocksize %u", sz);
+ }
+ crc = crc32(crc, dst_buffer, sizeof(dst_buffer) - stream.avail_out);
+ stream.avail_out = sizeof(dst_buffer);
+ stream.next_out = dst_buffer;
+ stream.avail_in += sz;
+ } while (stream.next_in + stream.avail_in < src_buffer + deflated->len);
+
+ stream.avail_in = (uInt) (deflated->len - stream.total_in);
+
+ {
+ int zr = inflate(&stream, Z_NO_FLUSH);
+
+ if (zr != Z_STREAM_END)
+ {
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "Final flush failed with blocksize %u", sz);
+ }
+ crc = crc32(crc, dst_buffer, sizeof(dst_buffer) - stream.avail_out);
+
+ zr = inflateEnd(&stream);
+
+ if (zr != Z_OK)
+ {
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "End of stream handling failed with blocksize %u",
+ sz);
+ }
+ }
+
+ if (stream.total_out != 242014 || crc != 0x8f03d934)
+ {
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "Decompressed data doesn't match expected size or crc with "
+ "blocksize %u: Found crc32=0x%08lx, size=%lu.\n"
+ "Verify your ZLib installation, as this should never happen",
+ sz, crc, stream.total_out);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialization(apr_pool_t *pool)
+{
+ svn_checksum_kind_t kind;
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ {
+ const svn_checksum_t *parsed_checksum;
+ svn_checksum_t *checksum = svn_checksum_empty_checksum(kind, pool);
+ const char *serialized = svn_checksum_serialize(checksum, pool, pool);
+
+ SVN_ERR(svn_checksum_deserialize(&parsed_checksum, serialized, pool,
+ pool));
+
+ SVN_TEST_ASSERT(parsed_checksum->kind == kind);
+ SVN_TEST_ASSERT(svn_checksum_match(checksum, parsed_checksum));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_checksum_parse_all_zero(apr_pool_t *pool)
+{
+ svn_checksum_kind_t kind;
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ {
+ svn_checksum_t *checksum;
+ const char *hex;
+
+ checksum = svn_checksum_create(kind, pool);
+
+ hex = svn_checksum_to_cstring_display(checksum, pool);
+ SVN_ERR(svn_checksum_parse_hex(&checksum, kind, hex, pool));
+
+ /* All zeroes checksum is NULL by definition. See
+ svn_checksum_parse_hex().*/
+ SVN_TEST_ASSERT(checksum == NULL);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_checksummed_stream_read(apr_pool_t *pool)
+{
+ const svn_string_t *str = svn_string_create("abcde", pool);
+ svn_checksum_kind_t kind;
+
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ {
+ svn_stream_t *stream;
+ svn_checksum_t *expected_checksum;
+ svn_checksum_t *actual_checksum;
+ char buf[64];
+ apr_size_t len;
+
+ stream = svn_stream_from_string(str, pool);
+ stream = svn_stream_checksummed2(stream, &actual_checksum, NULL,
+ kind, TRUE, pool);
+ len = str->len;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, str->len);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ SVN_ERR(svn_checksum(&expected_checksum, kind,
+ str->data, str->len, pool));
+ SVN_TEST_ASSERT(svn_checksum_match(expected_checksum, actual_checksum));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_checksummed_stream_reset(apr_pool_t *pool)
+{
+ const svn_string_t *str = svn_string_create("abcde", pool);
+ svn_checksum_kind_t kind;
+
+ for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind)
+ {
+ svn_stream_t *stream;
+ svn_checksum_t *expected_checksum;
+ svn_checksum_t *actual_checksum;
+ char buf[64];
+ apr_size_t len;
+
+ stream = svn_stream_from_string(str, pool);
+ stream = svn_stream_checksummed2(stream, &actual_checksum, NULL,
+ kind, TRUE, pool);
+ len = str->len;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, str->len);
+
+ SVN_ERR(svn_stream_reset(stream));
+
+ len = str->len;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ SVN_TEST_INT_ASSERT((int) len, str->len);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ SVN_ERR(svn_checksum(&expected_checksum, kind,
+ str->data, str->len, pool));
+ SVN_TEST_ASSERT(svn_checksum_match(expected_checksum, actual_checksum));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_checksum_parse,
+ "checksum parse"),
+ SVN_TEST_PASS2(test_checksum_empty,
+ "checksum emptiness"),
+ SVN_TEST_PASS2(zero_match,
+ "zero checksum matching"),
+ SVN_TEST_OPTS_PASS(zlib_expansion_test,
+ "zlib expansion test (zlib regression)"),
+ SVN_TEST_PASS2(zero_cross_match,
+ "zero checksum cross-type matching"),
+ SVN_TEST_PASS2(test_serialization,
+ "checksum (de-)serialization"),
+ SVN_TEST_PASS2(test_checksum_parse_all_zero,
+ "checksum parse all zero"),
+ SVN_TEST_PASS2(test_checksummed_stream_read,
+ "read from checksummed stream"),
+ SVN_TEST_PASS2(test_checksummed_stream_reset,
+ "reset checksummed stream"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/compat-test.c b/subversion/tests/libsvn_subr/compat-test.c
new file mode 100644
index 0000000..8414847
--- /dev/null
+++ b/subversion/tests/libsvn_subr/compat-test.c
@@ -0,0 +1,227 @@
+/*
+ * compat-test.c: tests svn_ver_compatible
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+
+#include "svn_error.h"
+#include "svn_pools.h"
+#include "svn_version.h"
+
+#include "../svn_test.h"
+#include "svn_private_config.h"
+#include "private/svn_subr_private.h"
+
+#ifndef SVN_DISABLE_FULL_VERSION_MATCH
+#define FALSE_IF_FULL FALSE
+#else
+#define FALSE_IF_FULL TRUE
+#endif
+
+static svn_error_t *
+test_version_compatibility(apr_pool_t *pool)
+{
+ unsigned int i;
+
+ struct version_pair {
+ svn_version_t my_version;
+ svn_version_t lib_version;
+ svn_boolean_t result;
+ } versions[] = {
+ { {1, 0, 0, ""}, {1, 0, 0, ""}, TRUE },
+ { {1, 0, 0, ""}, {2, 0, 0, ""}, FALSE },
+ { {2, 0, 0, ""}, {1, 0, 0, ""}, FALSE },
+
+ { {1, 0, 0, ""}, {1, 0, 1, ""}, TRUE },
+ { {1, 0, 1, ""}, {1, 0, 0, ""}, TRUE },
+ { {1, 0, 1, ""}, {1, 0, 1, ""}, TRUE },
+
+ { {1, 0, 0, ""}, {1, 1, 0, ""}, TRUE },
+ { {1, 0, 1, ""}, {1, 1, 0, ""}, TRUE },
+ { {1, 0, 0, ""}, {1, 1, 1, ""}, TRUE },
+ { {1, 1, 0, ""}, {1, 0, 0, ""}, FALSE },
+
+ { {1, 0, 0, "dev"}, {1, 0, 0, "dev"}, TRUE },
+ { {1, 0, 1, "dev"}, {1, 0, 1, "dev"}, TRUE },
+ { {1, 1, 0, "dev"}, {1, 1, 0, "dev"}, TRUE },
+ { {1, 1, 1, "dev"}, {1, 1, 1, "dev"}, TRUE },
+ { {1, 0, 0, "dev"}, {1, 0, 1, "dev"}, FALSE_IF_FULL },
+ { {1, 0, 0, "dev"}, {1, 1, 0, "dev"}, FALSE_IF_FULL },
+ { {1, 0, 0, "cev"}, {1, 0, 0, "dev"}, FALSE_IF_FULL },
+ { {1, 0, 0, "eev"}, {1, 0, 0, "dev"}, FALSE_IF_FULL },
+ { {1, 0, 1, "dev"}, {1, 0, 0, "dev"}, FALSE_IF_FULL },
+ { {1, 1, 0, "dev"}, {1, 0, 0, "dev"}, FALSE },
+
+ { {1, 0, 0, ""}, {1, 0, 0, "dev"}, FALSE_IF_FULL },
+
+ { {1, 0, 0, "dev"}, {1, 0, 0, ""}, FALSE_IF_FULL },
+ { {1, 0, 1, "dev"}, {1, 0, 0, ""}, TRUE },
+ { {1, 1, 0, "dev"}, {1, 0, 0, ""}, FALSE },
+ { {1, 1, 1, "dev"}, {1, 1, 0, ""}, TRUE },
+ { {1, 1, 1, "dev"}, {1, 0, 0, ""}, FALSE },
+ { {2, 0, 0, "dev"}, {1, 0, 0, ""}, FALSE },
+ { {1, 0, 0, "dev"}, {2, 0, 0, ""}, FALSE },
+ };
+
+ for (i = 0; i < sizeof(versions)/sizeof(versions[0]); ++i)
+ {
+ if (svn_ver_compatible(&versions[i].my_version,
+ &versions[i].lib_version) != versions[i].result)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_ver_compatible (%d.%d.%d(%s), %d.%d.%d(%s)) failed",
+ versions[i].my_version.major,
+ versions[i].my_version.minor,
+ versions[i].my_version.patch,
+ versions[i].my_version.tag,
+ versions[i].lib_version.major,
+ versions[i].lib_version.minor,
+ versions[i].lib_version.patch,
+ versions[i].lib_version.tag);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_version_parsing(apr_pool_t *pool)
+{
+ unsigned int i;
+ apr_pool_t *iterpool;
+
+ struct version_pair {
+ const char *str;
+ svn_boolean_t malformed;
+ svn_version_t version;
+ } versions[] = {
+ /* str malformed version */
+ { "1.8", FALSE, { 1, 8, 0, ""} },
+ { "1.8-dev", TRUE, { 0, 0, 0, ""} },
+ { "1.1.0", FALSE, { 1, 1, 0, ""} },
+ { "1.1.3", FALSE, { 1, 1, 3, ""} },
+ { "2.10.0", FALSE, { 2, 10, 0, ""} },
+ { "1.8.0-dev", FALSE, { 1, 8, 0, "dev"} },
+ { "1.7.0-beta1", FALSE, { 1, 7, 0, "beta1"} },
+ { "1a.8.0", TRUE, { 0, 0, 0, ""} },
+ { "1a.8.0", TRUE, { 0, 0, 0, ""} },
+ { "1.a8.0", TRUE, { 0, 0, 0, ""} },
+ { "1.8.0a", TRUE, { 0, 0, 0, ""} },
+ { "1.8.0.1", TRUE, { 0, 0, 0, ""} },
+ };
+
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < sizeof(versions)/sizeof(versions[0]); ++i)
+ {
+ svn_version_t *version;
+ svn_error_t *err;
+
+ svn_pool_clear(iterpool);
+
+ err = svn_version__parse_version_string(&version, versions[i].str,
+ iterpool);
+ if (err && (err->apr_err != SVN_ERR_MALFORMED_VERSION_STRING))
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Unexpected error code");
+ if (err)
+ {
+ if (! versions[i].malformed)
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Unexpected parsing error returned");
+ else
+ svn_error_clear(err);
+ }
+ else
+ {
+ if (versions[i].malformed)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Parsing error expected; none returned");
+ if (! svn_ver_equal(version, &(versions[i].version)))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Parsed version of '%s' doesn't match "
+ "expected", versions[i].str);
+ }
+ }
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_version_at_least(apr_pool_t *pool)
+{
+ unsigned int i;
+
+ struct version_pair {
+ svn_version_t version;
+ int major;
+ int minor;
+ int patch;
+ svn_boolean_t at_least;
+ } versions[] = {
+ /* maj min pat version at_least */
+ { { 1, 3, 3, ""}, 1, 3, 3, TRUE },
+ { { 1, 3, 3, ""}, 1, 3, 4, FALSE },
+ { { 1, 3, 3, ""}, 1, 4, 3, FALSE },
+ { { 1, 3, 3, ""}, 0, 4, 3, TRUE },
+ { { 1, 3, 3, ""}, 2, 0, 0, FALSE },
+ { { 1, 3, 3, ""}, 1, 3, 2, TRUE },
+ { { 1, 3, 3, ""}, 1, 2, 4, TRUE },
+ { { 1, 3, 3, "dev"}, 1, 3, 2, TRUE },
+ { { 1, 3, 3, "dev"}, 1, 3, 3, FALSE },
+ { { 1, 3, 3, ""}, 0, 4, 3, TRUE },
+ };
+
+ for (i = 0; i < sizeof(versions)/sizeof(versions[0]); ++i)
+ {
+ svn_boolean_t at_least = svn_version__at_least(&(versions[i].version),
+ versions[i].major,
+ versions[i].minor,
+ versions[i].patch);
+ if (at_least && (! versions[i].at_least))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Expected at-least to be FALSE; got TRUE");
+ if ((! at_least) && versions[i].at_least)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Expected at-least to be TRUE; got FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_version_compatibility,
+ "svn_ver_compatible"),
+ SVN_TEST_PASS2(test_version_parsing,
+ "svn_version__parse_version_string"),
+ SVN_TEST_PASS2(test_version_at_least,
+ "svn_version__at_least"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/compress-test.c b/subversion/tests/libsvn_subr/compress-test.c
new file mode 100644
index 0000000..f20bcb1
--- /dev/null
+++ b/subversion/tests/libsvn_subr/compress-test.c
@@ -0,0 +1,93 @@
+/*
+ * compress-test.c: tests the compression functions.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "private/svn_subr_private.h"
+#include "../svn_test.h"
+
+static svn_error_t *
+test_decompress_lz4(apr_pool_t *pool)
+{
+ const char input[] =
+ "\x61\xc0\x61\x61\x61\x61\x62\x62\x62\x62\x63\x63\x63\x63\x0c\x00\x00\x08"
+ "\x00\x00\x10\x00\x00\x0c\x00\x08\x08\x00\x00\x18\x00\x00\x14\x00\x00\x08"
+ "\x00\x08\x18\x00\x00\x14\x00\x00\x10\x00\x00\x18\x00\x00\x0c\x00\x00\x08"
+ "\x00\x00\x10\x00\x90\x61\x61\x61\x61\x62\x62\x62\x62";
+ svn_stringbuf_t *decompressed = svn_stringbuf_create_empty(pool);
+
+ SVN_ERR(svn__decompress_lz4(input, sizeof(input), decompressed, 100));
+ SVN_TEST_STRING_ASSERT(decompressed->data,
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb"
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb"
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_compress_lz4(apr_pool_t *pool)
+{
+ const char input[] =
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb"
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb"
+ "aaaabbbbccccaaaaccccbbbbaaaabbbb";
+ svn_stringbuf_t *compressed = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *decompressed = svn_stringbuf_create_empty(pool);
+
+ SVN_ERR(svn__compress_lz4(input, sizeof(input), compressed));
+ SVN_ERR(svn__decompress_lz4(compressed->data, compressed->len,
+ decompressed, 100));
+ SVN_TEST_STRING_ASSERT(decompressed->data, input);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_compress_lz4_empty(apr_pool_t *pool)
+{
+ svn_stringbuf_t *compressed = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *decompressed = svn_stringbuf_create_empty(pool);
+
+ SVN_ERR(svn__compress_lz4("", 0, compressed));
+ SVN_ERR(svn__decompress_lz4(compressed->data, compressed->len,
+ decompressed, 100));
+ SVN_TEST_STRING_ASSERT(decompressed->data, "");
+
+ return SVN_NO_ERROR;
+}
+
+static int max_threads = -1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+{
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_decompress_lz4,
+ "test svn__decompress_lz4()"),
+ SVN_TEST_PASS2(test_compress_lz4,
+ "test svn__compress_lz4()"),
+ SVN_TEST_PASS2(test_compress_lz4_empty,
+ "test svn__compress_lz4() with empty input"),
+ SVN_TEST_NULL
+};
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/config-test.c b/subversion/tests/libsvn_subr/config-test.c
new file mode 100644
index 0000000..cb1ec63
--- /dev/null
+++ b/subversion/tests/libsvn_subr/config-test.c
@@ -0,0 +1,500 @@
+/*
+ * config-test.c: tests svn_config
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <string.h>
+
+#include <apr_getopt.h>
+#include <apr_pools.h>
+
+#include "svn_dirent_uri.h"
+#include "svn_error.h"
+#include "svn_config.h"
+#include "private/svn_subr_private.h"
+#include "private/svn_config_private.h"
+
+#include "../svn_test.h"
+
+
+/* A quick way to create error messages. */
+static svn_error_t *
+fail(apr_pool_t *pool, const char *fmt, ...)
+{
+ va_list ap;
+ char *msg;
+
+ va_start(ap, fmt);
+ msg = apr_pvsprintf(pool, fmt, ap);
+ va_end(ap);
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, SVN_NO_ERROR, msg);
+}
+
+static svn_error_t *
+get_config_file_path(const char **cfg_file,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *srcdir;
+
+ SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool));
+ *cfg_file = svn_dirent_join(srcdir, "config-test.cfg", pool);
+
+ return SVN_NO_ERROR;
+}
+
+static const char *config_keys[] = { "foo", "a", "b", "c", "d", "e", "f", "g",
+ "h", "i", "m", NULL };
+static const char *config_values[] = { "bar", "Aa", "100", "bar",
+ "a %(bogus)s oyster bar",
+ "%(bogus)s shmoo %(",
+ "%Aa", "lyrical bard", "%(unterminated",
+ "Aa 100", "foo bar baz", NULL };
+
+static svn_error_t *
+test_text_retrieval(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ int i;
+ const char *cfg_file;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool));
+
+ /* Test values retrieved from our ConfigParser instance against
+ values retrieved using svn_config. */
+ for (i = 0; config_keys[i] != NULL; i++)
+ {
+ const char *key, *py_val, *c_val;
+
+ key = config_keys[i];
+ py_val = config_values[i];
+ svn_config_get(cfg, &c_val, "section1", key, "default value");
+#if 0
+ printf("Testing expected value '%s' against '%s' for "
+ "option '%s'\n", py_val, c_val, key);
+#endif
+ /* Fail iff one value is null, or the strings don't match. */
+ if ((c_val == NULL) != (py_val == NULL)
+ || (c_val != NULL && py_val != NULL && strcmp(c_val, py_val) != 0))
+ return fail(pool, "Expected value '%s' not equal to '%s' for "
+ "option '%s'", py_val, c_val, key);
+ }
+
+ {
+ const char *value = svn_config_get_server_setting(cfg, "server group",
+ "setting", "default");
+ if (value == NULL || strcmp(value, "default") != 0)
+ return svn_error_create(SVN_ERR_TEST_FAILED, SVN_NO_ERROR,
+ "Expected a svn_config_get_server_setting()"
+ "to return 'default'");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static const char *true_keys[] = {"true1", "true2", "true3", "true4",
+ NULL};
+static const char *false_keys[] = {"false1", "false2", "false3", "false4",
+ NULL};
+
+static svn_error_t *
+test_boolean_retrieval(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ int i;
+ const char *cfg_file;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool));
+
+ for (i = 0; true_keys[i] != NULL; i++)
+ {
+ svn_boolean_t value;
+ SVN_ERR(svn_config_get_bool(cfg, &value, "booleans", true_keys[i],
+ FALSE));
+ if (!value)
+ return fail(pool, "Value of option '%s' is not true", true_keys[i]);
+ }
+
+ for (i = 0; false_keys[i] != NULL; i++)
+ {
+ svn_boolean_t value;
+ SVN_ERR(svn_config_get_bool(cfg, &value, "booleans", false_keys[i],
+ TRUE));
+ if (value)
+ return fail(pool, "Value of option '%s' is not true", false_keys[i]);
+ }
+
+ {
+ svn_error_t *err;
+ svn_boolean_t value;
+
+ svn_error_clear((err = svn_config_get_bool(cfg, &value,
+ "booleans", "bad_true",
+ TRUE)));
+ if (!err)
+ return fail(pool, "No error on bad truth value");
+
+ svn_error_clear((err = svn_config_get_bool(cfg, &value,
+ "booleans", "bad_false",
+ FALSE)));
+ if (!err)
+ return fail(pool, "No error on bad truth value");
+ }
+
+ {
+ svn_boolean_t value;
+ SVN_ERR(svn_config_get_server_setting_bool(cfg, &value, "server group",
+ "setting", FALSE));
+ if (value)
+ return svn_error_create(SVN_ERR_TEST_FAILED, SVN_NO_ERROR,
+ "Expected a svn_config_get_server_setting_bool()"
+ "to return FALSE, but it returned TRUE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_has_section_case_insensitive(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool));
+
+ if (! svn_config_has_section(cfg, "section1"))
+ return fail(pool, "Failed to find section1");
+
+ if (! svn_config_has_section(cfg, "SECTION1"))
+ return fail(pool, "Failed to find SECTION1");
+
+ if (! svn_config_has_section(cfg, "UpperCaseSection"))
+ return fail(pool, "Failed to find UpperCaseSection");
+
+ if (! svn_config_has_section(cfg, "uppercasesection"))
+ return fail(pool, "Failed to find UpperCaseSection");
+
+ if (svn_config_has_section(cfg, "notthere"))
+ return fail(pool, "Returned true on missing section");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_has_section_case_sensitive(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool));
+
+ if (! svn_config_has_section(cfg, "section1"))
+ return fail(pool, "Failed to find section1");
+
+ if (svn_config_has_section(cfg, "SECTION1"))
+ return fail(pool, "Returned true on missing section");
+
+ if (! svn_config_has_section(cfg, "UpperCaseSection"))
+ return fail(pool, "Failed to find UpperCaseSection");
+
+ if (svn_config_has_section(cfg, "uppercasesection"))
+ return fail(pool, "Returned true on missing section");
+
+ if (svn_config_has_section(cfg, "notthere"))
+ return fail(pool, "Returned true on missing section");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_has_option_case_sensitive(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file;
+ apr_int64_t value;
+ int i;
+
+ static struct test_dataset {
+ const char *option;
+ apr_int64_t value;
+ } const test_data[] = {
+ { "a", 1 },
+ { "A", 2 },
+ { "B", 3 },
+ { "b", 4 }
+ };
+ static const int test_data_size = sizeof(test_data)/sizeof(*test_data);
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, TRUE, pool));
+
+ for (i = 0; i < test_data_size; ++i)
+ {
+ SVN_ERR(svn_config_get_int64(cfg, &value, "case-sensitive-option",
+ test_data[i].option, -1));
+ if (test_data[i].value != value)
+ return fail(pool,
+ apr_psprintf(pool,
+ "case-sensitive-option.%s != %"
+ APR_INT64_T_FMT" but %"APR_INT64_T_FMT,
+ test_data[i].option,
+ test_data[i].value,
+ value));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_interface(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file;
+ svn_stream_t *stream;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, cfg_file, pool, pool));
+
+ SVN_ERR(svn_config_parse(&cfg, stream, TRUE, TRUE, pool));
+
+ /* nominal test to make sure cfg is populated with something since
+ * svn_config_parse will happily return an empty cfg if the stream is
+ * empty. */
+ if (! svn_config_has_section(cfg, "section1"))
+ return fail(pool, "Failed to find section1");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_ignore_bom(apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ svn_string_t *cfg_string = svn_string_create("\xEF\xBB\xBF[s1]\nfoo=bar\n",
+ pool);
+ svn_stream_t *stream = svn_stream_from_string(cfg_string, pool);
+
+ SVN_ERR(svn_config_parse(&cfg, stream, TRUE, TRUE, pool));
+
+ if (! svn_config_has_section(cfg, "s1"))
+ return fail(pool, "failed to find section s1");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_read_only_mode(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ svn_config_t *cfg2;
+ const char *cfg_file;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool));
+
+ /* setting CFG to r/o mode shall toggle the r/o mode and expand values */
+
+ SVN_TEST_ASSERT(!svn_config__is_read_only(cfg));
+ SVN_TEST_ASSERT(!svn_config__is_expanded(cfg, "section1", "i"));
+
+ svn_config__set_read_only(cfg, pool);
+
+ SVN_TEST_ASSERT(svn_config__is_read_only(cfg));
+ SVN_TEST_ASSERT(svn_config__is_expanded(cfg, "section1", "i"));
+
+ /* copies should be r/w with values */
+
+ SVN_ERR(svn_config_dup(&cfg2, cfg, pool));
+ SVN_TEST_ASSERT(!svn_config__is_read_only(cfg2));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_expand(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ const char *cfg_file, *val;
+
+ SVN_ERR(get_config_file_path(&cfg_file, opts, pool));
+ SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool));
+
+ /* Get expanded "g" which requires expanding "c". */
+ svn_config_get(cfg, &val, "section1", "g", NULL);
+
+ /* Get expanded "c". */
+ svn_config_get(cfg, &val, "section1", "c", NULL);
+
+ /* With pool debugging enabled this ensures that the expanded value
+ of "c" was not created in a temporary pool when expanding "g". */
+ SVN_TEST_STRING_ASSERT(val, "bar");
+
+ /* Get expanded "j" and "k" which have cyclic definitions.
+ * They must return empty values. */
+ svn_config_get(cfg, &val, "section1", "j", NULL);
+ SVN_TEST_STRING_ASSERT(val, "");
+ svn_config_get(cfg, &val, "section1", "k", NULL);
+ SVN_TEST_STRING_ASSERT(val, "");
+
+ /* Get expanded "l" which depends on a cyclic definition.
+ * So, it also considered "undefined" and will be normalized to "". */
+ svn_config_get(cfg, &val, "section1", "l", NULL);
+ SVN_TEST_STRING_ASSERT(val, "");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_invalid_bom(apr_pool_t *pool)
+{
+ svn_config_t *cfg;
+ svn_error_t *err;
+ svn_string_t *cfg_string;
+ svn_stream_t *stream;
+
+ cfg_string = svn_string_create("\xEF", pool);
+ stream = svn_stream_from_string(cfg_string, pool);
+ err = svn_config_parse(&cfg, stream, TRUE, TRUE, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_MALFORMED_FILE);
+
+ cfg_string = svn_string_create("\xEF\xBB", pool);
+ stream = svn_stream_from_string(cfg_string, pool);
+ err = svn_config_parse(&cfg, stream, TRUE, TRUE, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_MALFORMED_FILE);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialization(apr_pool_t *pool)
+{
+ svn_stringbuf_t *original_content;
+ svn_stringbuf_t *written_content;
+ svn_config_t *cfg;
+
+ const struct
+ {
+ const char *section;
+ const char *option;
+ const char *value;
+ } test_data[] =
+ {
+ { "my section", "value1", "some" },
+ { "my section", "value2", "something" },
+ { "another Section", "value1", "one" },
+ { "another Section", "value2", "two" },
+ { "another Section", "value 3", "more" },
+ };
+ int i;
+
+ /* Format the original with the same formatting that the writer will use. */
+ original_content = svn_stringbuf_create("\n[my section]\n"
+ "value1=some\n"
+ "value2=%(value1)sthing\n"
+ "\n[another Section]\n"
+ "value1=one\n"
+ "value2=two\n"
+ "value 3=more\n",
+ pool);
+ written_content = svn_stringbuf_create_empty(pool);
+
+ SVN_ERR(svn_config_parse(&cfg,
+ svn_stream_from_stringbuf(original_content, pool),
+ TRUE, TRUE, pool));
+ SVN_ERR(svn_config__write(svn_stream_from_stringbuf(written_content, pool),
+ cfg, pool));
+ SVN_ERR(svn_config_parse(&cfg,
+ svn_stream_from_stringbuf(written_content, pool),
+ TRUE, TRUE, pool));
+
+ /* The serialized and re-parsed config must have the expected contents. */
+ for (i = 0; i < sizeof(test_data) / sizeof(test_data[0]); ++i)
+ {
+ const char *val;
+ svn_config_get(cfg, &val, test_data[i].section, test_data[i].option,
+ NULL);
+ SVN_TEST_STRING_ASSERT(val, test_data[i].value);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ ====================================================================
+ If you add a new test to this file, update this array.
+
+ (These globals are required by our included main())
+*/
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_text_retrieval,
+ "test svn_config"),
+ SVN_TEST_OPTS_PASS(test_boolean_retrieval,
+ "test svn_config boolean conversion"),
+ SVN_TEST_OPTS_PASS(test_has_section_case_insensitive,
+ "test svn_config_has_section (case insensitive)"),
+ SVN_TEST_OPTS_PASS(test_has_section_case_sensitive,
+ "test svn_config_has_section (case sensitive)"),
+ SVN_TEST_OPTS_PASS(test_has_option_case_sensitive,
+ "test case-sensitive option name lookup"),
+ SVN_TEST_OPTS_PASS(test_stream_interface,
+ "test svn_config_parse"),
+ SVN_TEST_PASS2(test_ignore_bom,
+ "test parsing config file with BOM"),
+ SVN_TEST_OPTS_PASS(test_read_only_mode,
+ "test r/o mode"),
+ SVN_TEST_OPTS_PASS(test_expand,
+ "test variable expansion"),
+ SVN_TEST_PASS2(test_invalid_bom,
+ "test parsing config file with invalid BOM"),
+ SVN_TEST_PASS2(test_serialization,
+ "test writing a config"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/config-test.cfg b/subversion/tests/libsvn_subr/config-test.cfg
new file mode 100644
index 0000000..d9c6153
--- /dev/null
+++ b/subversion/tests/libsvn_subr/config-test.cfg
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# default values across all sections
+[DEFAULT]
+foo=bar
+# Not implementing __name__ expansions
+#baz=%(__name__)s
+
+[section1]
+# Trailing whitespace
+a=Aa
+# leading whitespace / numeric
+b= 100
+# Variable expansion
+c=%(foo)s
+# Expansion for non-existent option (ConfigParser throws an
+# InterpolationError with the message "Bad value substitution")
+d=a %(bogus)s oyster %(foo)s
+e=%(bogus)s shmoo %(
+# Expansion format escaping doesn't seem possible
+f=%%(a)s
+# Two-level variable expansion with surrounding text
+g=lyrical %(c)sd
+# Unterminated format string
+h= %(unterminated
+# Multiple expansions
+i=%(a)s %(b)s
+# Recursive two-level variable expansion with surrounding text
+j=some %(k)scle
+k=c%(j)sy
+# Depends on a cyclic definition
+l=depends on a %(j)scycle!
+# line continuation
+m = foo
+ bar
+ baz
+
+[UpperCaseSection]
+a=Aa
+
+[booleans]
+true1 = true
+true2 = Yes
+true3 = oN
+true4 = 1
+bad_true = och, aye
+false1 = FALSE
+false2 = no
+false3 = oFf
+false4 = 0
+bad_false = nyet!
+
+[case-sensitive-option]
+a = 1
+A = 2
+B = 3
+b = 4
diff --git a/subversion/tests/libsvn_subr/crypto-test.c b/subversion/tests/libsvn_subr/crypto-test.c
new file mode 100644
index 0000000..91fd6c7
--- /dev/null
+++ b/subversion/tests/libsvn_subr/crypto-test.c
@@ -0,0 +1,192 @@
+/*
+ * crypto-test.c -- test cryptographic routines
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include "svn_pools.h"
+
+#include "../svn_test.h"
+#include "../../libsvn_subr/crypto.h"
+
+
+/*** Helper functions ***/
+
+/* Encrypt PASSWORD within CTX using MASTER, then
+ decrypt those results and ensure the original PASSWORD comes out
+ the other end. */
+static svn_error_t *
+encrypt_decrypt(svn_crypto__ctx_t *ctx,
+ const svn_string_t *master,
+ const char *password,
+ apr_pool_t *pool)
+{
+ const svn_string_t *ciphertext, *iv, *salt;
+ const char *password_again;
+
+ SVN_ERR(svn_crypto__encrypt_password(&ciphertext, &iv, &salt, ctx,
+ password, master, pool, pool));
+ if (! ciphertext)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Encryption failed to return ciphertext");
+ if (! salt)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Encryption failed to return salt");
+ if (! iv)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Encryption failed to return initialization "
+ "vector");
+
+ SVN_ERR(svn_crypto__decrypt_password(&password_again, ctx, ciphertext, iv,
+ salt, master, pool, pool));
+
+ if (! password_again)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Decryption failed to generate results");
+
+ if (strcmp(password, password_again) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Encrypt/decrypt cycle failed to produce "
+ "original result\n"
+ " orig (%s)\n"
+ " new (%s)\n",
+ password, password_again);
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/*** Test functions ***/
+
+static svn_error_t *
+test_encrypt_decrypt_password(apr_pool_t *pool)
+{
+ svn_crypto__ctx_t *ctx;
+ const svn_string_t *master = svn_string_create("Pastor Massword", pool);
+ int i;
+ apr_pool_t *iterpool;
+ const char *passwords[] = {
+ "3ncryptm!3", /* fits in one block */
+ "this is a particularly long password", /* spans blocks */
+ "mypassphrase", /* with 4-byte padding, should align on block boundary */
+ };
+
+ /* Skip this test if the crypto subsystem is unavailable. */
+ if (! svn_crypto__is_available())
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ SVN_ERR(svn_crypto__context_create(&ctx, pool));
+
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < (sizeof(passwords) / sizeof(const char *)); i++)
+ {
+ svn_pool_clear(iterpool);
+ SVN_ERR(encrypt_decrypt(ctx, master, passwords[i], iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_passphrase_check(apr_pool_t *pool)
+{
+ svn_crypto__ctx_t *ctx;
+ int i;
+ apr_pool_t *iterpool;
+ const char *passwords[] = {
+ "3ncryptm!3", /* fits in one block */
+ "this is a particularly long password", /* spans blocks */
+ "mypassphrase", /* with 4-byte padding, should align on block boundary */
+ };
+ const svn_string_t *ciphertext, *iv, *salt, *secret;
+ const char *checktext;
+ svn_boolean_t is_valid;
+ int num_passwords = sizeof(passwords) / sizeof(const char *);
+
+ /* Skip this test if the crypto subsystem is unavailable. */
+ if (! svn_crypto__is_available())
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL);
+
+ SVN_ERR(svn_crypto__context_create(&ctx, pool));
+
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < num_passwords; i++)
+ {
+ svn_pool_clear(iterpool);
+ secret = svn_string_create(passwords[i], iterpool);
+ SVN_ERR(svn_crypto__generate_secret_checktext(&ciphertext, &iv, &salt,
+ &checktext, ctx, secret,
+ iterpool, iterpool));
+ SVN_ERR(svn_crypto__verify_secret(&is_valid, ctx, secret, ciphertext,
+ iv, salt, checktext, iterpool));
+ if (! is_valid)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Error validating secret against checktext");
+ }
+
+ /* Now check that a bogus secret causes the validation to fail. We
+ try to verify each secret against the checktext generated by the
+ previous one. */
+ for (i = 0; i < num_passwords; i++)
+ {
+ int test_secret_index = (i + 1) % num_passwords;
+
+ svn_pool_clear(iterpool);
+ secret = svn_string_create(passwords[i], iterpool);
+ SVN_ERR(svn_crypto__generate_secret_checktext(&ciphertext, &iv, &salt,
+ &checktext, ctx, secret,
+ iterpool, iterpool));
+ secret = svn_string_create(passwords[test_secret_index], iterpool);
+ SVN_ERR(svn_crypto__verify_secret(&is_valid, ctx, secret, ciphertext,
+ iv, salt, checktext, iterpool));
+ if (is_valid)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Expected secret validation failure; "
+ "got success");
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+
+
+
+/* The test table. */
+
+static int max_threads = -1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_encrypt_decrypt_password,
+ "basic password encryption/decryption test"),
+ SVN_TEST_PASS2(test_passphrase_check,
+ "password checktext generation/validation"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/dirent_uri-test.c b/subversion/tests/libsvn_subr/dirent_uri-test.c
new file mode 100644
index 0000000..086cdbc
--- /dev/null
+++ b/subversion/tests/libsvn_subr/dirent_uri-test.c
@@ -0,0 +1,3025 @@
+/*
+ * dirent_uri-test.c -- test the directory entry and URI functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#if defined(WIN32) || defined(__OS2__)
+#include <direct.h>
+#define getcwd _getcwd
+#define getdcwd _getdcwd
+#else
+#include <unistd.h> /* for getcwd() */
+#endif
+
+#include <apr_general.h>
+
+#include "svn_pools.h"
+#include "svn_dirent_uri.h"
+#include "private/svn_fspath.h"
+#include "private/svn_cert.h"
+
+#include "../svn_test.h"
+
+#define SVN_EMPTY_PATH ""
+
+/* This check must match the check on top of dirent_uri.c and path-tests.c */
+#if defined(WIN32) || defined(__CYGWIN__) || defined(__OS2__)
+#define SVN_USE_DOS_PATHS
+#endif
+
+#define COUNT_OF(x) (sizeof(x) / sizeof(x[0]))
+
+static svn_error_t *
+test_dirent_is_root(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "/", TRUE },
+ { "/foo/bar", FALSE },
+ { "/foo", FALSE },
+ { "", FALSE },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/foo", FALSE },
+ { "X:/", TRUE },
+ { "X:foo", FALSE }, /* Based on non absolute root */
+ { "X:", TRUE },
+ { "//srv/shr", TRUE },
+ { "//srv/shr/fld", FALSE },
+ { "//srv/s r", TRUE },
+ { "//srv/s r/fld", FALSE },
+#else /* !SVN_USE_DOS_PATHS */
+ { "/", TRUE },
+ { "/X:foo", FALSE },
+ { "/X:", FALSE },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_dirent_is_root(tests[i].path, strlen(tests[i].path));
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_root (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_is_root(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "file://", TRUE },
+ { "file://a", FALSE },
+ { "file:///a", FALSE },
+ { "file:///A:", FALSE },
+ { "http://server", TRUE },
+ { "http://server/file", FALSE },
+ { "http://", TRUE },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_uri_is_root(tests[i].path, strlen(tests[i].path));
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_is_root (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_is_absolute(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "foo/bar", FALSE },
+ { "foo", FALSE },
+ { "", FALSE },
+#ifdef SVN_USE_DOS_PATHS
+ { "/foo/bar", FALSE },
+ { "/foo", FALSE },
+ { "/", FALSE },
+ { "C:/foo", TRUE },
+ { "C:/", TRUE },
+ { "c:/", FALSE },
+ { "c:/foo", FALSE },
+ { "//srv/shr", TRUE },
+ { "//srv/shr/fld", TRUE },
+ { "//srv/s r", TRUE },
+ { "//srv/s r/fld", TRUE },
+#else /* !SVN_USE_DOS_PATHS */
+ { "/foo/bar", TRUE },
+ { "/foo", TRUE },
+ { "/", TRUE },
+ { "X:/foo", FALSE },
+ { "X:/", FALSE },
+#endif /* SVN_USE_DOS_PATHS */
+ { "X:foo", FALSE }, /* Not special on Posix, relative on Windows */
+ { "X:foo/bar", FALSE },
+ { "X:", FALSE },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_dirent_is_absolute(tests[i].path);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_absolute (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+
+ /* Don't get absolute paths for the UNC paths, because this will
+ always fail */
+ if (tests[i].result &&
+ strncmp(tests[i].path, "//", 2) != 0)
+ {
+ const char *abspath;
+
+ SVN_ERR(svn_dirent_get_absolute(&abspath, tests[i].path, pool));
+
+ if (tests[i].result != (strcmp(tests[i].path, abspath) == 0))
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED,
+ NULL,
+ "svn_dirent_is_absolute(%s) returned TRUE, but "
+ "svn_dirent_get_absolute() returned \"%s\"",
+ tests[i].path,
+ abspath);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_join(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ static const char * const joins[][3] = {
+ { "abc", "def", "abc/def" },
+ { "a", "def", "a/def" },
+ { "a", "d", "a/d" },
+ { "/", "d", "/d" },
+ { "/abc", "d", "/abc/d" },
+ { "/abc", "def", "/abc/def" },
+ { "/abc", "/def", "/def" },
+ { "/abc", "/d", "/d" },
+ { "/abc", "/", "/" },
+ { "abc", "/def", "/def" },
+ { SVN_EMPTY_PATH, "/", "/" },
+ { "/", SVN_EMPTY_PATH, "/" },
+ { SVN_EMPTY_PATH, "abc", "abc" },
+ { "abc", SVN_EMPTY_PATH, "abc" },
+ { SVN_EMPTY_PATH, "/abc", "/abc" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "/", "/", "/" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", SVN_EMPTY_PATH, "X:/" },
+ { "X:/", "abc", "X:/abc" },
+ { "X:/", "/def", "X:/def" },
+ { "X:/abc", "/d", "X:/d" },
+ { "X:/abc", "/", "X:/" },
+ { "X:/abc", "X:/", "X:/" },
+ { "X:/abc", "X:/def", "X:/def" },
+ { "X:", SVN_EMPTY_PATH, "X:" },
+ { "X:", "abc", "X:abc" },
+ { "X:", "/def", "X:/def" },
+ { "X:abc", "/d", "X:/d" },
+ { "X:abc", "/", "X:/" },
+ { "X:abc", "X:/", "X:/" },
+ { "X:abc", "X:/def", "X:/def" },
+ { "//srv/shr", "fld", "//srv/shr/fld" },
+ { "//srv/shr/fld", "subfld", "//srv/shr/fld/subfld" },
+ { "//srv/shr/fld", "//srv/shr", "//srv/shr" },
+ { "//srv/s r", "fld", "//srv/s r/fld" },
+ { "aa", "/dir", "/dir"} ,
+ { "aa", "A:", "A:" },
+ { "aa", "A:file", "A:file"},
+ { "A:", "/", "A:/" },
+#else /* !SVN_USE_DOS_PATHS */
+ { "X:abc", "X:/def", "X:abc/X:/def" },
+ { "X:","abc", "X:/abc" },
+ { "X:/abc", "X:/def", "X:/abc/X:/def" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(joins); i++ )
+ {
+ const char *base = joins[i][0];
+ const char *comp = joins[i][1];
+ const char *expect = joins[i][2];
+
+ result = svn_dirent_join(base, comp, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_join(\"%s\", \"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ base, comp, result, expect);
+
+ result = svn_dirent_join_many(pool, base, comp, SVN_VA_NULL);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_join_many(\"%s\", \"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ base, comp, result, expect);
+ }
+
+#define TEST_MANY(args, expect) \
+ result = svn_dirent_join_many args ; \
+ if (strcmp(result, expect) != 0) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "svn_dirent_join_many" #args " returns \"%s\". " \
+ "expected \"%s\"", \
+ result, expect);
+
+ TEST_MANY((pool, "abc", SVN_VA_NULL), "abc");
+ TEST_MANY((pool, "/abc", SVN_VA_NULL), "/abc");
+ TEST_MANY((pool, "/", SVN_VA_NULL), "/");
+
+ TEST_MANY((pool, "abc", "def", "ghi", SVN_VA_NULL), "abc/def/ghi");
+ TEST_MANY((pool, "abc", "/def", "ghi", SVN_VA_NULL), "/def/ghi");
+ TEST_MANY((pool, "/abc", "def", "ghi", SVN_VA_NULL), "/abc/def/ghi");
+ TEST_MANY((pool, "abc", "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "/def", "/ghi", SVN_VA_NULL), "/ghi");
+
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", SVN_VA_NULL), "def/ghi");
+ TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "abc/ghi");
+ TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "abc/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, SVN_VA_NULL), "def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "ghi");
+ TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "abc");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", SVN_VA_NULL), "/ghi");
+
+ TEST_MANY((pool, "/", "def", "ghi", SVN_VA_NULL), "/def/ghi");
+ TEST_MANY((pool, "abc", "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "abc", "def", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "/", "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "/", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, SVN_VA_NULL), "/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", SVN_VA_NULL), "/");
+
+#ifdef SVN_USE_DOS_PATHS
+ TEST_MANY((pool, "X:/", "def", "ghi", SVN_VA_NULL), "X:/def/ghi");
+ TEST_MANY((pool, "abc", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "abc", "def", "X:/", SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, "X:/", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", "X:/", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", SVN_VA_NULL), "X:/");
+
+ TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:def/ghi");
+ TEST_MANY((pool, "X:", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:", "X:/", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:ghi");
+ TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:ghi");
+ TEST_MANY((pool, "//srv/shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi");
+ TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi");
+ TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr");
+ TEST_MANY((pool, "//srv/s r/fld", "def", "//srv/s r", SVN_VA_NULL), "//srv/s r");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr");
+
+ TEST_MANY((pool, "abcd", "/dir", "A:", "file", SVN_VA_NULL), "A:file");
+ TEST_MANY((pool, "abcd", "A:", "/dir", "file", SVN_VA_NULL), "A:/dir/file");
+
+#else /* !SVN_USE_DOS_PATHS */
+ TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:/def/ghi");
+ TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:/ghi");
+#endif /* SVN_USE_DOS_PATHS */
+
+ /* ### probably need quite a few more tests... */
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_join(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ static const char * const joins[][3] = {
+ { "abc", "def", "abc/def" },
+ { "a", "def", "a/def" },
+ { "a", "d", "a/d" },
+ { SVN_EMPTY_PATH, "abc", "abc" },
+ { "abc", SVN_EMPTY_PATH, "abc" },
+ { "", "", "" },
+ };
+
+ for (i = 0; i < COUNT_OF(joins); i++)
+ {
+ const char *base = joins[i][0];
+ const char *comp = joins[i][1];
+ const char *expect = joins[i][2];
+
+ result = svn_relpath_join(base, comp, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_join(\"%s\", \"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ base, comp, result, expect);
+
+ /*result = svn_relpath_join_many(pool, base, comp, NULL);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_join_many(\"%s\", \"%s\") "
+ "returned \"%s\". expected \"%s\"",
+ base, comp, result, expect);*/
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_basename(apr_pool_t *pool)
+{
+ int i;
+ const char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "abc", "abc" },
+ { "/abc", "abc" },
+ { "/abc", "abc" },
+ { "/x/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "a", "a" },
+ { "/a", "a" },
+ { "/b/a", "a" },
+ { "/b/a", "a" },
+ { "/", "" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "X:/abc", "abc" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:", "" },
+ { "X:/", "" },
+ { "X:abc", "abc" },
+ { "//srv/shr", "" },
+ { "//srv/shr/fld", "fld" },
+ { "//srv/shr/fld/subfld", "subfld" },
+ { "//srv/s r/fld", "fld" },
+#else /* !SVN_USE_DOS_PATHS */
+ { "X:", "X:" },
+ { "X:abc", "X:abc" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_dirent_basename(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_basename(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_basename(apr_pool_t *pool)
+{
+ int i;
+ const char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { " ", " " },
+ { "foo/bar", "bar" },
+ { "foo/bar/bad", "bad" },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_relpath_basename(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_basename(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_basename(apr_pool_t *pool)
+{
+ int i;
+ const char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "http://s/file", "file" },
+ { "http://s/dir/file", "file" },
+ { "http://s/some%20dir/other%20file", "other file" },
+ { "http://s", "" },
+ { "file://", "" },
+ { "file:///a", "a" },
+ { "file:///a/b", "b" },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_uri_basename(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_basename(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_dirname(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "abc", "" },
+ { "/abc", "/" },
+ { "/x/abc", "/x" },
+ { "/xx/abc", "/xx" },
+ { "a", "" },
+ { "/a", "/" },
+ { "/b/a", "/b" },
+ { "/", "/" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "X:abc/def", "X:abc" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", "X:/" },
+ { "X:/abc", "X:/" },
+ { "X:abc", "X:" },
+ { "X:", "X:" },
+ { "//srv/shr", "//srv/shr" },
+ { "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld/subfld", "//srv/shr/fld" },
+ { "//srv/s r/fld", "//srv/s r" },
+#else /* !SVN_USE_DOS_PATHS */
+ /* on non-Windows platforms, ':' is allowed in pathnames */
+ { "X:", "" },
+ { "X:abc", "" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_dirent_dirname(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_dirname(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_dirname(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { " ", "" },
+ { "foo", "" },
+ { "foo/bar", "foo" },
+ { "foo/bar/bad", "foo/bar" },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_relpath_dirname(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_dirname(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_dirname(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "http://server/dir", "http://server" },
+ { "http://server/dir/file", "http://server/dir" },
+ { "http://server", "http://server" },
+ { "file:///a/b", "file:///a" },
+ { "file:///a", "file://" },
+ { "file://", "file://" },
+#ifdef WIN32
+ { "file:///A:/dir", "file:///A:" },
+ { "file:///A:", "file://" },
+#endif
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_uri_dirname(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_dirname(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Paths to test and the expected result, for canonicalize tests. */
+typedef struct testcase_canonicalize_t {
+ const char *path;
+ const char *result;
+} testcase_canonicalize_t;
+
+static svn_error_t *
+test_dirent_canonicalize(apr_pool_t *pool)
+{
+ const testcase_canonicalize_t *t;
+ static const testcase_canonicalize_t tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "/", "/" },
+ { "/.", "/" },
+ { "./", "" },
+ { "./.", "" },
+ { "//", "/" },
+ { "/////", "/" },
+ { "./././.", "" },
+ { "////././.", "/" },
+ { "foo", "foo" },
+ { ".foo", ".foo" },
+ { "foo.", "foo." },
+ { "/foo", "/foo" },
+ { "foo/", "foo" },
+ { "foo./", "foo." },
+ { "foo./.", "foo." },
+ { "foo././/.", "foo." },
+ { "/foo/bar", "/foo/bar" },
+ { "foo/..", "foo/.." },
+ { "foo/../", "foo/.." },
+ { "foo/../.", "foo/.." },
+ { "foo//.//bar", "foo/bar" },
+ { "//foo", "/foo" },
+ { "///foo", "/foo" },
+ { "/.//./.foo", "/.foo" },
+ { ".///.foo", ".foo" },
+ { "../foo", "../foo" },
+ { "../../foo/", "../../foo" },
+ { "../../foo/..", "../../foo/.." },
+ { "/../../", "/../.." },
+ { "X:/foo", "X:/foo" },
+ { "X:", "X:" },
+ { "X:foo", "X:foo" },
+ { "C:/folder/subfolder/file", "C:/folder/subfolder/file" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", "X:/" },
+ { "X:/./", "X:/" },
+ { "x:/", "X:/" },
+ { "x:", "X:" },
+ { "x:AAAAA", "X:AAAAA" },
+ /* We permit UNC dirents on Windows. By definition UNC
+ * dirents must have two components so we should remove the
+ * double slash if there is only one component. */
+ { "//hst/foo", "//hst/foo" },
+ { "//hst", "/hst" },
+ { "//hst/./", "/hst" },
+ { "//server/share/", "//server/share" },
+ { "//server/SHare/", "//server/SHare" },
+ { "//SERVER/SHare/", "//server/SHare" },
+ { "//srv/s r", "//srv/s r" },
+ { "//srv/s r/qq", "//srv/s r/qq" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *canonical = svn_dirent_canonicalize(t->path, pool);
+
+ if (strcmp(canonical, t->result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_canonicalize(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path, canonical, t->result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_canonicalize(apr_pool_t *pool)
+{
+ const testcase_canonicalize_t *t;
+ static const testcase_canonicalize_t tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "/", "" },
+ { "/.", "" },
+ { "./", "" },
+ { "./.", "" },
+ { "//", "" },
+ { "/////", "" },
+ { "./././.", "" },
+ { "////././.", "" },
+ { "foo", "foo" },
+ { ".foo", ".foo" },
+ { "foo.", "foo." },
+ { "/foo", "foo" },
+ { "foo/", "foo" },
+ { "foo./", "foo." },
+ { "foo./.", "foo." },
+ { "foo././/.", "foo." },
+ { "/foo/bar", "foo/bar" },
+ { "foo/..", "foo/.." },
+ { "foo/../", "foo/.." },
+ { "foo/../.", "foo/.." },
+ { "foo//.//bar", "foo/bar" },
+ { "//foo", "foo" },
+ { "///foo", "foo" },
+ { "/.//./.foo", ".foo" },
+ { ".///.foo", ".foo" },
+ { "../foo", "../foo" },
+ { "../../foo/", "../../foo" },
+ { "../../foo/..", "../../foo/.." },
+ { "/../../", "../.." },
+ { "X:/foo", "X:/foo" },
+ { "X:", "X:" },
+ { "X:foo", "X:foo" },
+ { "C:/folder/subfolder/file", "C:/folder/subfolder/file" },
+ { "http://hst", "http:/hst" },
+ { "http://hst/foo/../bar","http:/hst/foo/../bar" },
+ { "http://hst/", "http:/hst" },
+ { "http:///", "http:" },
+ { "https://", "https:" },
+ { "file:///", "file:" },
+ { "file://", "file:" },
+ { "svn:///", "svn:" },
+ { "svn+ssh:///", "svn+ssh:" },
+ { "http://HST/", "http:/HST" },
+ { "http://HST/FOO/BaR", "http:/HST/FOO/BaR" },
+ { "svn+ssh://j.raNDom@HST/BaR", "svn+ssh:/j.raNDom@HST/BaR" },
+ { "svn+SSH://j.random:jRaY@HST/BaR", "svn+SSH:/j.random:jRaY@HST/BaR" },
+ { "SVN+ssh://j.raNDom:jray@HST/BaR", "SVN+ssh:/j.raNDom:jray@HST/BaR" },
+ { "fILe:///Users/jrandom/wc", "fILe:/Users/jrandom/wc" },
+ { "fiLE:///", "fiLE:" },
+ { "fiLE://", "fiLE:" },
+ { "file://SRV/shr/repos", "file:/SRV/shr/repos" },
+ { "file://SRV/SHR/REPOS", "file:/SRV/SHR/REPOS" },
+ { "http://server////", "http:/server" },
+ { "http://server/file//", "http:/server/file" },
+ { "http://server//.//f//", "http:/server/f" },
+ { "file:///c:/temp/repos", "file:/c:/temp/repos" },
+ { "file:///c:/temp/REPOS", "file:/c:/temp/REPOS" },
+ { "file:///C:/temp/REPOS", "file:/C:/temp/REPOS" },
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *canonical = svn_relpath_canonicalize(t->path, pool);
+
+ if (strcmp(canonical, t->result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_canonicalize(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path, canonical, t->result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static const testcase_canonicalize_t uri_canonical_tests[] =
+ {
+ { "http://hst", "http://hst" },
+ { "http://hst/foo/../bar","http://hst/foo/../bar" },
+ { "http://hst/", "http://hst" },
+ { "http:///", "http://" },
+ { "http:///example.com/", "http:///example.com" },
+ { "http:////example.com/", "http:///example.com" },
+ { "http://///////example.com/", "http:///example.com" },
+ { "https://", "https://" },
+ { "file:///", "file://" },
+ { "file://", "file://" },
+ { "svn:///", "svn://" },
+ { "svn+ssh:///", "svn+ssh://" },
+ { "http://HST/", "http://hst" },
+ { "http://HST/FOO/BaR", "http://hst/FOO/BaR" },
+ { "svn+ssh://jens@10.0.1.1", "svn+ssh://jens@10.0.1.1" },
+ { "svn+ssh://j.raNDom@HST/BaR", "svn+ssh://j.raNDom@hst/BaR" },
+ { "svn+SSH://j.random:jRaY@HST/BaR", "svn+ssh://j.random:jRaY@hst/BaR" },
+ { "SVN+ssh://j.raNDom:jray@HST/BaR", "svn+ssh://j.raNDom:jray@hst/BaR" },
+ { "svn+ssh://j.raNDom:jray@hst/BaR", "svn+ssh://j.raNDom:jray@hst/BaR" },
+ { "fILe:///Users/jrandom/wc", "file:///Users/jrandom/wc" },
+ { "fiLE:///", "file://" },
+ { "fiLE://", "file://" },
+ { "file://SRV/shr/repos", "file://srv/shr/repos" },
+ { "file://SRV/SHR/REPOS", "file://srv/SHR/REPOS" },
+ { "http://server////", "http://server" },
+ { "http://server/file//", "http://server/file" },
+ { "http://server//.//f//", "http://server/f" },
+ { "http://server/d/.", "http://server/d" },
+ { "http://server/d/%2E", "http://server/d" },
+ { "http://server/d/./q", "http://server/d/q" },
+ { "http://server/d/%2E/q", "http://server/d/q" },
+ { "http://server/%", "http://server/%25" },
+ { "http://server/%25", "http://server/%25" },
+ { "http://server/%/d", "http://server/%25/d" },
+ { "http://server/%25/d", "http://server/%25/d" },
+ { "http://server/+", "http://server/+" },
+ { "http://server/%2B", "http://server/+" },
+ { "http://server/ ", "http://server/%20" },
+ { "http://server/#", "http://server/%23" },
+ { "http://server/d/a%2Fb", "http://server/d/a/b" },
+ { "http://server/d/.%2F.", "http://server/d" },
+ { "http://server/d/%2E%2F%2E", "http://server/d" },
+ { "file:///C%3a/temp", "file:///C:/temp" },
+ { "http://server/cr%AB", "http://server/cr%AB" },
+ { "http://server/cr%ab", "http://server/cr%AB" },
+ { "http://hst/foo/bar/", "http://hst/foo/bar" },
+ { "http://hst/foo/.", "http://hst/foo" },
+ { "http://hst/foo/%2E", "http://hst/foo" },
+ { "http://hst/%", "http://hst/%25" },
+ { "http://hst/+", "http://hst/+" },
+ { "http://hst/#", "http://hst/%23" },
+ { "http://hst/ ", "http://hst/%20" },
+ { "http://hst/%2B", "http://hst/+" },
+ { "http://HST", "http://hst" },
+ { "http://hst/foo/./bar", "http://hst/foo/bar" },
+ { "hTTp://hst/foo/bar", "http://hst/foo/bar" },
+ { "http://hst/foo/bar/", "http://hst/foo/bar" },
+ { "file://SRV/share/repo", "file://srv/share/repo" },
+ { "file://srv/SHARE/repo", "file://srv/SHARE/repo" },
+ { "file://srv/share/repo", "file://srv/share/repo" },
+ { "file://srv/share/repo/","file://srv/share/repo" },
+ { "file:///folder/c#", "file:///folder/c%23" }, /* # needs escaping */
+ { "file:///fld/with space","file:///fld/with%20space" }, /* # needs escaping */
+ { "file:///fld/c%23", "file:///fld/c%23" }, /* Properly escaped C# */
+ { "file:///%DE%AD%BE%EF", "file:///%DE%AD%BE%EF" },
+ { "file:///%de%ad%be%ef", "file:///%DE%AD%BE%EF" },
+ { "file:///%DE%ad%BE%ef", "file:///%DE%AD%BE%EF" },
+ /* With default port number; these are non-canonical */
+ { "http://server:", "http://server" },
+ { "http://server:/", "http://server" },
+ { "http://server:80", "http://server" },
+ { "http://SERVER:80", "http://server" },
+ { "http://server:80/", "http://server" },
+ { "http://server:80/p", "http://server/p" },
+ { "https://server:", "https://server" },
+ { "https://Server:443/q", "https://server/q" },
+ { "svn://server:3690/", "svn://server" },
+ { "svn://sERVER:3690/r", "svn://server/r" },
+ { "svn://server:/r", "svn://server/r" },
+ /* With non-default port number; both canonical and non-c. examples */
+ { "http://server:1", "http://server:1" },
+ { "http://server:443", "http://server:443" },
+ { "http://server:81/", "http://server:81" },
+ { "http://SERVER:3690/", "http://server:3690" },
+ { "https://server:3690", "https://server:3690" },
+ { "https://SERVER:80/", "https://server:80" },
+ { "svn://server:80", "svn://server:80" },
+ { "svn://SERVER:443/", "svn://server:443" },
+ { "file:///C%7C/temp/REPOS", "file:///C%7C/temp/REPOS" },
+ { "file:///C|/temp/REPOS", "file:///C%7C/temp/REPOS" },
+ { "file:///C:/", "file:///C:" },
+ { "http://[::1]/", "http://[::1]" },
+ { "http://[::1]:80/", "http://[::1]" },
+ { "https://[::1]:443", "https://[::1]" },
+ { "http://[::1]/", "http://[::1]" },
+ { "http://[::1]:80/", "http://[::1]" },
+ { "https://[::1]:443", "https://[::1]" },
+ { "http://[FACE:B00C::]/s","http://[face:b00c::]/s" },
+ { "svn+ssh://b@[1:2::3]/s","svn+ssh://b@[1:2::3]/s" },
+ { "file:///A%2f%2Fb%2fc", "file:///A/b/c"},
+ { "file:///A%2fb%2f%2Fc", "file:///A/b/c"},
+#ifdef SVN_USE_DOS_PATHS
+ { "file:///c:/temp/repos", "file:///C:/temp/repos" },
+ { "file:///c:/temp/REPOS", "file:///C:/temp/REPOS" },
+ { "file:///C:/temp/REPOS", "file:///C:/temp/REPOS" },
+ { "file:///c:/", "file:///C:" },
+ { "file:///c:%2ftemp", "file:///C:/temp"},
+ { "file:///C:hi", "file:///C:hi" },
+ { "file:///c:hi", "file:///C:hi" },
+ { "file:///C:hi/Q", "file:///C:hi/Q" },
+ { "file:///c:hi/q", "file:///C:hi/q" },
+ { "file:///c:hi%2fD", "file:///C:hi/D" },
+ { "file:///c:hi%25/A", "file:///C:hi%25/A"},
+ { "file:///c:hi%2E/A", "file:///C:hi./A"},
+ { "file:///c:hi%/A", "file:///C:hi%25/A"},
+#else /* !SVN_USE_DOS_PATHS */
+ { "file:///c:/temp/repos", "file:///c:/temp/repos" },
+ { "file:///c:/temp/REPOS", "file:///c:/temp/REPOS" },
+ { "file:///C:/temp/REPOS", "file:///C:/temp/REPOS" },
+ { "file:///c:/", "file:///c:" },
+ { "file:///c:%2ftemp", "file:///c:/temp"},
+ { "file:///C:hi", "file:///C:hi" },
+ { "file:///c:hi", "file:///c:hi" },
+ { "file:///C:hi/Q", "file:///C:hi/Q" },
+ { "file:///c:hi/q", "file:///c:hi/q" },
+ { "file:///c:hi%2fD", "file:///c:hi/D" },
+ { "file:///c:hi%25/A", "file:///c:hi%25/A" },
+ { "file:///c:hi%2E/A", "file:///c:hi./A"},
+ { "file:///c:hi%/A", "file:///c:hi%25/A"},
+#endif /* SVN_USE_DOS_PATHS */
+ /* Hostnames that look like non-canonical paths */
+ { "file://./foo", "file://./foo" },
+ { "http://./foo", "http://./foo" },
+ /* Some invalid URLs, these still have a canonical form */
+ { "http://server:81:81/", "http://server:81:81" },
+ { "http://server:81foo/", "http://server:81foo" },
+ { "http://server::/", "http://server::" },
+ { "http://server:-/", "http://server:-" },
+ { "http://hst:1.2.3.4.5/", "http://hst:1.2.3.4.5"},
+ { "http://hst:1.2.999.4/", "http://hst:1.2.999.4"},
+ /* svn_uri_is_canonical() was a private function in the 1.6 API, and
+ has since taken a MAJOR change of direction, namely that only
+ absolute URLs are considered canonical uris now. */
+ { "", NULL },
+ { ".", NULL },
+ { "/", NULL },
+ { "/.", NULL },
+ { "./", NULL },
+ { "./.", NULL },
+ { "//", NULL },
+ { "/////", NULL },
+ { "./././.", NULL },
+ { "////././.", NULL },
+ { "foo", NULL },
+ { ".foo", NULL },
+ { "foo.", NULL },
+ { "/foo", NULL },
+ { "foo/", NULL },
+ { "foo./", NULL },
+ { "foo./.", NULL },
+ { "foo././/.", NULL },
+ { "/foo/bar", NULL },
+ { "foo/..", NULL },
+ { "foo/../", NULL },
+ { "foo/../.", NULL },
+ { "foo//.//bar", NULL },
+ { "//foo", NULL },
+ { "///foo", NULL },
+ { "/.//./.foo", NULL },
+ { ".///.foo", NULL },
+ { "../foo", NULL },
+ { "../../foo/", NULL },
+ { "../../foo/..", NULL },
+ { "/../../", NULL },
+ { "dirA", NULL },
+ { "foo/dirA", NULL },
+ { "foo/./bar", NULL },
+ { "C:/folder/subfolder/file", NULL },
+ { "X:/foo", NULL },
+ { "X:", NULL },
+ { "X:foo", NULL },
+ { "X:foo/", NULL },
+ /* Some people use colons in their filenames. */
+ { ":", NULL },
+ { ".:", NULL },
+ { "foo/.:", NULL },
+ { "//server/share", NULL }, /* Only valid as dirent */
+ { "//server", NULL },
+ { "//", NULL },
+ { "sch://@/", NULL },
+ { "sch:///", NULL },
+ { "svn://:", NULL },
+ };
+
+static svn_error_t *
+test_uri_canonicalize(apr_pool_t *pool)
+{
+ const testcase_canonicalize_t *t;
+
+ for (t = uri_canonical_tests;
+ t < uri_canonical_tests + COUNT_OF(uri_canonical_tests);
+ t++)
+ {
+ const char *canonical;
+
+ if (! t->result)
+ continue;
+
+ canonical = svn_uri_canonicalize(t->path, pool);
+
+ if (strcmp(canonical, t->result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_canonicalize(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path, canonical, t->result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Paths to test and the expected result, for is_canonical tests. */
+typedef struct testcase_is_canonical_t {
+ const char *path;
+ svn_boolean_t canonical;
+} testcase_is_canonical_t;
+
+static svn_error_t *
+test_dirent_is_canonical(apr_pool_t *pool)
+{
+ const testcase_is_canonical_t *t;
+ static const testcase_is_canonical_t tests[] = {
+ { "", TRUE },
+ { ".", FALSE },
+ { "/", TRUE },
+ { "/.", FALSE },
+ { "./", FALSE },
+ { "./.", FALSE },
+ { "//", FALSE },
+ { "/////", FALSE },
+ { "./././.", FALSE },
+ { "////././.", FALSE },
+ { "foo", TRUE },
+ { ".foo", TRUE },
+ { "foo.", TRUE },
+ { "/foo", TRUE },
+ { "foo/", FALSE },
+ { "foo./", FALSE },
+ { "foo./.", FALSE },
+ { "foo././/.", FALSE },
+ { "/foo/bar", TRUE },
+ { "foo/..", TRUE },
+ { "foo/../", FALSE },
+ { "foo/../.", FALSE },
+ { "foo//.//bar", FALSE },
+ { "//foo", FALSE },
+ { "///foo", FALSE },
+ { "/.//./.foo", FALSE },
+ { ".///.foo", FALSE },
+ { "../foo", TRUE },
+ { "../../foo/", FALSE },
+ { "../../foo/..", TRUE },
+ { "/../../", FALSE },
+ { "dirA", TRUE },
+ { "foo/dirA", TRUE },
+ { "foo/./bar", FALSE },
+ { "C:/folder/subfolder/file", TRUE },
+ { "X:/foo", TRUE },
+ { "X:", TRUE },
+ { "X:foo", TRUE },
+ { "X:foo/", FALSE },
+ { "file with spaces", TRUE },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", TRUE },
+ { "X:/foo", TRUE },
+ { "X:", TRUE },
+ { "X:foo", TRUE },
+ { "x:/", FALSE },
+ { "x:/foo", FALSE },
+ { "x:", FALSE },
+ { "x:foo", FALSE },
+ /* We permit UNC dirents on Windows. By definition UNC
+ * dirents must have two components so we should remove the
+ * double slash if there is only one component. */
+ { "//hst", FALSE },
+ { "//hst/./", FALSE },
+ { "//server/share/", FALSE },
+ { "//server/share", TRUE },
+ { "//server/SHare", TRUE },
+ { "//SERVER/SHare", FALSE },
+ { "//srv/SH RE", TRUE },
+#else /* !SVN_USE_DOS_PATHS */
+ { "X:/", FALSE },
+ /* Some people use colons in their filenames. */
+ { ":", TRUE },
+ { ".:", TRUE },
+ { "foo/.:", TRUE },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ svn_boolean_t canonical;
+ const char* canonicalized;
+
+ canonical = svn_dirent_is_canonical(t->path, pool);
+ if (t->canonical != canonical)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_canonical(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path,
+ canonical ? "TRUE" : "FALSE",
+ t->canonical ? "TRUE" : "FALSE");
+
+ canonicalized = svn_dirent_canonicalize(t->path, pool);
+
+ if ((canonical && strcmp(t->path, canonicalized) != 0)
+ || (!canonical && strcmp(t->path, canonicalized) == 0))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_canonicalize(\"%s\") returned \"%s\" "
+ "while svn_dirent_is_canonical returned TRUE",
+ t->path,
+ canonicalized);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_is_canonical(apr_pool_t *pool)
+{
+ const testcase_is_canonical_t *t;
+ static const testcase_is_canonical_t tests[] = {
+ { "", TRUE },
+ { ".", FALSE },
+ { "..", TRUE },
+ { "/", FALSE },
+ { "/.", FALSE },
+ { "./", FALSE },
+ { "./.", FALSE },
+ { "//", FALSE },
+ { "/////", FALSE },
+ { "./././.", FALSE },
+ { "////././.", FALSE },
+ { "foo", TRUE },
+ { ".foo", TRUE },
+ { "foo.", TRUE },
+ { "/foo", FALSE },
+ { "foo/", FALSE },
+ { "foo./", FALSE },
+ { "foo./.", FALSE },
+ { "foo././/.", FALSE },
+ { "/foo/bar", FALSE },
+ { "foo/..", TRUE },
+ { "foo/../", FALSE },
+ { "foo/../.", FALSE },
+ { "foo//.//bar", FALSE },
+ { "//foo", FALSE },
+ { "///foo", FALSE },
+ { "/.//./.foo", FALSE },
+ { ".///.foo", FALSE },
+ { "../foo", TRUE },
+ { "../../foo/", FALSE },
+ { "../../foo/..", TRUE },
+ { "/../../", FALSE },
+ { "dirA", TRUE },
+ { "foo/dirA", TRUE },
+ { "foo/./bar", FALSE },
+ { "http://hst", FALSE },
+ { "http://hst/foo/../bar", FALSE },
+ { "http://HST/", FALSE },
+ { "http://HST/FOO/BaR", FALSE },
+ { "svn+ssh://jens@10.0.1.1", FALSE },
+ { "svn+ssh:/jens@10.0.1.1", TRUE },
+ { "fILe:///Users/jrandom/wc", FALSE },
+ { "fILe:/Users/jrandom/wc", TRUE },
+ { "X:/foo", TRUE },
+ { "X:", TRUE },
+ { "X:foo", TRUE },
+ { "X:foo/", FALSE },
+ /* Some people use colons in their filenames. */
+ { ":", TRUE },
+ { ".:", TRUE },
+ { "foo/.:", TRUE },
+ { "//server/share", FALSE }, /* Only valid as dirent */
+ { "//server", FALSE },
+ { "//", FALSE },
+ { "file:///c:/temp/repos", FALSE },
+ { "file:///c:/temp/REPOS", FALSE },
+ { "file:///C:/temp/REPOS", FALSE },
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ svn_boolean_t canonical;
+ const char* canonicalized;
+
+ canonical = svn_relpath_is_canonical(t->path);
+ if (t->canonical != canonical)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_is_canonical(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path,
+ canonical ? "TRUE" : "FALSE",
+ t->canonical ? "TRUE" : "FALSE");
+
+ canonicalized = svn_relpath_canonicalize(t->path, pool);
+
+ if ((canonical && strcmp(t->path, canonicalized) != 0)
+ || (!canonical && strcmp(t->path, canonicalized) == 0))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_canonicalize(\"%s\") returned "
+ "\"%s\" while svn_relpath_is_canonical "
+ "returned %s",
+ t->path,
+ canonicalized,
+ canonical ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_is_canonical(apr_pool_t *pool)
+{
+ const testcase_canonicalize_t *t;
+
+ for (t = uri_canonical_tests;
+ t < uri_canonical_tests + COUNT_OF(uri_canonical_tests);
+ t++)
+ {
+ svn_boolean_t canonical;
+
+ canonical = svn_uri_is_canonical(t->path, pool);
+ if (canonical != (t->result && strcmp(t->path, t->result) == 0))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_is_canonical(\"%s\") returned "
+ "\"%s\"; canonical form is \"%s\"",
+ t->path,
+ canonical ? "TRUE" : "FALSE",
+ t->result);
+
+ if (t->result && !svn_uri_is_canonical(t->result, pool))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_is_canonical(\"%s\") returned "
+ "FALSE on canonical form",
+ t->result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_split(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ static const char * const paths[][3] = {
+ { "/foo/bar", "/foo", "bar" },
+ { "/foo/bar/ ", "/foo/bar", " " },
+ { "/foo", "/", "foo" },
+ { "foo", SVN_EMPTY_PATH, "foo" },
+ { ".bar", SVN_EMPTY_PATH, ".bar" },
+ { "/.bar", "/", ".bar" },
+ { "foo/bar", "foo", "bar" },
+ { "/foo/bar", "/foo", "bar" },
+ { "foo/bar", "foo", "bar" },
+ { "foo./.bar", "foo.", ".bar" },
+ { "../foo", "..", "foo" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "/flu\\b/\\blarg", "/flu\\b", "\\blarg" },
+ { "/", "/", "" },
+ { "X:/foo/bar", "X:/foo", "bar" },
+ { "X:foo/bar", "X:foo", "bar" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", "X:/", "" },
+ { "X:/foo", "X:/", "foo" },
+ { "X:foo", "X:", "foo" },
+ { "//srv/shr", "//srv/shr", "" },
+ { "//srv/shr/fld", "//srv/shr", "fld" },
+ { "//srv/s r", "//srv/s r", "" },
+#else /* !SVN_USE_DOS_PATHS */
+ { "X:foo", SVN_EMPTY_PATH, "X:foo" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(paths); i++)
+ {
+ const char *dir, *base_name;
+
+ svn_dirent_split(&dir, &base_name, paths[i][0], pool);
+ if (strcmp(dir, paths[i][1]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_split (%s) returned dirname '%s' instead of '%s'",
+ paths[i][0], dir, paths[i][1]);
+ }
+ if (strcmp(base_name, paths[i][2]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_split (%s) returned basename '%s' instead of '%s'",
+ paths[i][0], base_name, paths[i][2]);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_split(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ static const char * const paths[][3] = {
+ { "", "", "" },
+ { "bar", "", "bar" },
+ { "foo/bar", "foo", "bar" },
+ { "a/b/c", "a/b", "c" },
+ };
+
+ for (i = 0; i < COUNT_OF(paths); i++)
+ {
+ const char *dir, *base_name;
+
+ svn_relpath_split( &dir, &base_name, paths[i][0], pool);
+ if (strcmp(dir, paths[i][1]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_split (%s) returned dirname '%s' instead of '%s'",
+ paths[i][0], dir, paths[i][1]);
+ }
+ if (strcmp(base_name, paths[i][2]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_split (%s) returned basename '%s' instead of '%s'",
+ paths[i][0], base_name, paths[i][2]);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_uri_split(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ static const char * const paths[][3] = {
+ { "http://server/foo/bar", "http://server/foo", "bar" },
+ { "http://server/dir/foo/bar", "http://server/dir/foo", "bar" },
+ { "http://server/some%20dir/foo%20bar", "http://server/some%20dir", "foo bar" },
+ { "http://server/foo", "http://server", "foo" },
+ { "http://server", "http://server", "" },
+ { "file://", "file://", "" },
+ { "file:///a", "file://", "a" }
+ };
+
+ for (i = 0; i < COUNT_OF(paths); i++)
+ {
+ const char *dir, *base_name;
+
+ svn_uri_split(&dir, &base_name, paths[i][0], pool);
+ if (strcmp(dir, paths[i][1]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_split (%s) returned dirname '%s' instead of '%s'",
+ paths[i][0], dir, paths[i][1]);
+ }
+ if (strcmp(base_name, paths[i][2]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_split (%s) returned basename '%s' instead of '%s'",
+ paths[i][0], base_name, paths[i][2]);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Paths to test and the expected result, for ancestor tests. */
+typedef struct testcase_ancestor_t {
+ const char *path1;
+ const char *path2;
+ const char *result;
+} testcase_ancestor_t;
+
+static const testcase_ancestor_t dirent_ancestor_tests[] =
+ {
+ { "", "", "" },
+ { "", "foo", "foo" },
+ { "", ".bar", ".bar" },
+ { "", "/", NULL },
+ { "", "/foo", NULL },
+ { "/", "", NULL },
+ { "/", "foo", NULL },
+ { "/", "/", "" },
+ { "/", "/foo", "foo" },
+ { "/", "bar/bla", NULL },
+ { "/foo", "/foo", "" },
+ { "/foo", "/foot", NULL },
+ { "/foo", "/foo/bar", "bar" },
+ { "/foo/bar", "/foot/bar", NULL },
+ { "/foot", "/foo", NULL },
+ { "/foo/bar/bla", "/foo/bar", NULL },
+ { "/foo/bar", "/foo/bar/bla", "bla" },
+ { "foo/bar", "foo", NULL },
+ { "/foo/bar", "foo", NULL },
+ { "/.bar", "/", NULL },
+ { "/foo/bar", "/foo", NULL },
+ { "foo", "foo/bar", "bar" },
+ { "foo.", "foo./.bar", ".bar" },
+ { "X:foo", "X:bar", NULL },
+ { "../foo", "..", NULL },
+ { "/foo/bar/zig", "/foo", NULL },
+ { "/foo/bar/zig", "/foo/ba", NULL },
+ { "/foo/bar/zig", "/foo/bar/zi", NULL },
+#ifdef SVN_USE_DOS_PATHS
+ { "", "C:", NULL },
+ { "", "C:foo", NULL },
+ { "", "C:/", NULL },
+ { "", "C:/foo", NULL },
+ { "X", "X:", NULL },
+ { "X", "X:foo", NULL },
+ { "X", "X:/", NULL },
+ { "X", "X:/foo", NULL },
+ { "X:", "X:", "" },
+ { "X:", "X:foo", "foo" },
+ { "X:", "X:/", NULL },
+ { "X:", "X:/foo", NULL },
+ { "X:/", "X:", NULL },
+ { "X:/", "X:foo", NULL },
+ { "X:/", "X:/", "" },
+ { "X:/", "X:/foo", "foo" },
+ { "X:/foo", "X:/", NULL },
+ { "A:/foo", "A:/foo/bar", "bar" },
+ { "A:/foo", "A:/foot", NULL },
+ { "A:/foo/bar/zig", "A:/foo", NULL },
+ { "A:/foo/bar/zig", "A:/foo/ba", NULL },
+ { "A:/foo/bar/zig", "A:/foo/bar/zi", NULL },
+ { "//srv", "//srv/share", NULL },
+ { "//srv", "//srv/shr/fld", NULL },
+ { "//srv/shr", "//srv", NULL },
+ { "//srv/share", "//vrs/share", NULL },
+ { "//srv/share", "//srv/share/foo", "foo" },
+ { "//srv/shr", "//srv/shr/fld", "fld" },
+ { "//srv/s r", "//srv/s r/fld", "fld" },
+ { "//srv/shr/fld", "//srv/shr", NULL },
+ { "//srv/shr/fld", "//srv2/shr/fld", NULL },
+ { "//srv/shr/fld", "//srv/shr/f", NULL },
+ { "/", "//srv/share", NULL },
+#else /* !SVN_USE_DOS_PATHS */
+ { "", "C:", "C:" },
+ { "", "C:/foo", "C:/foo" },
+ { "X:", "X:foo", NULL },
+#endif
+ };
+
+static svn_error_t *
+test_dirent_is_ancestor(apr_pool_t *pool)
+{
+ const testcase_ancestor_t *t;
+
+ for (t = dirent_ancestor_tests;
+ t < dirent_ancestor_tests + COUNT_OF(dirent_ancestor_tests);
+ t++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_dirent_is_ancestor(t->path1, t->path2);
+ if (!!t->result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval ? "TRUE" : "FALSE",
+ t->result ? "TRUE" : "FALSE");
+ }
+ return SVN_NO_ERROR;
+}
+
+static const testcase_ancestor_t relpath_ancestor_tests[] =
+ {
+ { "", "", "" },
+ { "", "foo", "foo" },
+ { "", ".bar", ".bar" },
+ { "", "bar/bla", "bar/bla" },
+ { "foo", "foo", "" },
+ { "foo", "foo/bar", "bar" },
+ { "foo", "foot", NULL },
+ { "foo.", "foo./.bar", ".bar" },
+ { "foot", "foo", NULL },
+ { "foo/bar", "foo", NULL },
+ { "foo/bar", "foo/bar/bla", "bla" },
+ { "foo/bar", "foot/bar", NULL },
+ { "foo/bar/bla", "foo/bar", NULL },
+ { "food/bar", "foo/bar", NULL },
+ { "http:/server", "http:/server/q", "q" },
+ { "svn:/server", "http:/server/q", NULL },
+ /* These are relpaths so a colon is not special. */
+ { "", "C:", "C:" },
+ { "X:", "X:foo", NULL },
+ { "X:", "X:/foo", "foo" },
+ { "X:foo", "X:bar", NULL },
+ };
+
+static const testcase_ancestor_t uri_ancestor_tests[] =
+ {
+ { "http://test", "http://test", "" },
+ { "http://test", "http://taste", NULL },
+ { "http://test", "http://test/foo", "foo" },
+ { "http://test", "file://test/foo", NULL },
+ { "http://test", "http://testf", NULL },
+ { "http://", "http://test", NULL },
+ { "http://server", "http://server/q", "q" },
+ { "svn://server", "http://server/q", NULL },
+ { "http://foo/bar", "http://foo", NULL },
+ { "http://foo/bar", "http://foo/ba", NULL },
+ };
+
+static svn_error_t *
+test_uri_is_ancestor(apr_pool_t *pool)
+{
+ const testcase_ancestor_t *t;
+
+ for (t = uri_ancestor_tests;
+ t < uri_ancestor_tests + COUNT_OF(uri_ancestor_tests);
+ t++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_uri__is_ancestor(t->path1, t->path2);
+ if (!!t->result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_is_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval ? "TRUE" : "FALSE",
+ t->result ? "TRUE" : "FALSE");
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_skip_ancestor(apr_pool_t *pool)
+{
+ const testcase_ancestor_t *t;
+
+ for (t = dirent_ancestor_tests;
+ t < dirent_ancestor_tests + COUNT_OF(dirent_ancestor_tests);
+ t++)
+ {
+ const char* retval;
+
+ retval = svn_dirent_skip_ancestor(t->path1, t->path2);
+ if ((t->result == NULL)
+ ? (retval != NULL)
+ : (retval == NULL || strcmp(t->result, retval) != 0))
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_skip_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_skip_ancestor(apr_pool_t *pool)
+{
+ const testcase_ancestor_t *t;
+
+ for (t = relpath_ancestor_tests;
+ t < relpath_ancestor_tests + COUNT_OF(relpath_ancestor_tests);
+ t++)
+ {
+ const char* retval;
+
+ retval = svn_relpath_skip_ancestor(t->path1, t->path2);
+ if ((t->result == NULL)
+ ? (retval != NULL)
+ : (retval == NULL || strcmp(t->result, retval) != 0))
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_skip_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_skip_ancestor(apr_pool_t *pool)
+{
+ const testcase_ancestor_t *t;
+
+ for (t = uri_ancestor_tests;
+ t < uri_ancestor_tests + COUNT_OF(uri_ancestor_tests);
+ t++)
+ {
+ const char* retval;
+
+ retval = svn_uri_skip_ancestor(t->path1, t->path2, pool);
+ if ((t->result == NULL)
+ ? (retval != NULL)
+ : (retval == NULL || strcmp(t->result, retval) != 0))
+ return svn_error_createf(
+ SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_skip_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Paths to test and the expected result, for get_longest_ancestor tests. */
+typedef struct testcase_get_longest_ancestor_t {
+ const char *path1;
+ const char *path2;
+ const char *result;
+} testcase_get_longest_ancestor_t;
+
+static svn_error_t *
+test_dirent_get_longest_ancestor(apr_pool_t *pool)
+{
+ const testcase_get_longest_ancestor_t *t;
+ static const testcase_get_longest_ancestor_t tests[] = {
+ { "/foo", "/foo/bar", "/foo"},
+ { "/foo/bar", "foo/bar", SVN_EMPTY_PATH},
+ { "/", "/foo", "/"},
+ { SVN_EMPTY_PATH, "foo", SVN_EMPTY_PATH},
+ { SVN_EMPTY_PATH, ".bar", SVN_EMPTY_PATH},
+ { "/.bar", "/", "/"},
+ { "foo/bar", "foo", "foo"},
+ { "/foo/bar", "/foo", "/foo"},
+ { "/rif", "/raf", "/"},
+ { "foo", "bar", SVN_EMPTY_PATH},
+ { "foo", "foo/bar", "foo"},
+ { "foo.", "foo./.bar", "foo."},
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH},
+ { "/", "/", "/"},
+ { "X:foo", "Y:foo", SVN_EMPTY_PATH},
+ { "X:/folder1", "Y:/folder2", SVN_EMPTY_PATH},
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", "X:/", "X:/"},
+ { "X:/foo/bar/A/D/H/psi", "X:/foo/bar/A/B", "X:/foo/bar/A" },
+ { "X:/foo/bar/boo", "X:/foo/bar/baz/boz", "X:/foo/bar"},
+ { "X:foo/bar", "X:foo/bar/boo", "X:foo/bar"},
+ { "//srv/shr", "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld", "//srv/shr", "//srv/shr" },
+ { "//srv/shr/fld", "//srv2/shr/fld", SVN_EMPTY_PATH },
+ { "X:/foo", "X:/", "X:/"},
+ { "X:/folder1", "X:/folder2", "X:/"},
+ { "X:/", "X:/foo", "X:/"},
+ { "X:", "X:foo", "X:"},
+ { "X:", "X:/", SVN_EMPTY_PATH},
+ { "X:foo", "X:bar", "X:"},
+#else /* !SVN_USE_DOS_PATHS */
+ { "X:/foo", "X:", "X:"},
+ { "X:/folder1", "X:/folder2", "X:"},
+ { "X:", "X:foo", SVN_EMPTY_PATH},
+ { "X:foo", "X:bar", SVN_EMPTY_PATH},
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *retval;
+
+ retval = svn_dirent_get_longest_ancestor(t->path1, t->path2, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval, t->result);
+
+ /* changing the order of the paths should return the same results */
+ retval = svn_dirent_get_longest_ancestor(t->path2, t->path1, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ t->path2, t->path1, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_get_longest_ancestor(apr_pool_t *pool)
+{
+ const testcase_get_longest_ancestor_t *t;
+ static const testcase_get_longest_ancestor_t tests[] = {
+ { "foo", "foo/bar", "foo"},
+ { "foo/bar", "foo/bar", "foo/bar"},
+ { "", "foo", ""},
+ { "", "foo", ""},
+ { "", ".bar", ""},
+ { ".bar", "", ""},
+ { "foo/bar", "foo", "foo"},
+ { "foo/bar", "foo", "foo"},
+ { "rif", "raf", ""},
+ { "foo", "bar", ""},
+ { "foo", "foo/bar", "foo"},
+ { "foo.", "foo./.bar", "foo."},
+ { "", "", ""},
+ { "http:/test", "http:/test", "http:/test"},
+ { "http:/test", "http:/taste", "http:"},
+ { "http:/test", "http:/test/foo", "http:/test"},
+ { "http:/test", "file:/test/foo", ""},
+ { "http:/test", "http:/testF", "http:"},
+ { "file:/A/C", "file:/B/D", "file:"},
+ { "file:/A/C", "file:/A/D", "file:/A"},
+ { "X:/foo", "X:", "X:"},
+ { "X:/folder1", "X:/folder2", "X:"},
+ { "X:", "X:foo", ""},
+ { "X:foo", "X:bar", ""},
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *retval;
+
+ retval = svn_relpath_get_longest_ancestor(t->path1, t->path2, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_get_longest_ancestor (%s, %s) returned "
+ "%s instead of %s",
+ t->path1, t->path2, retval, t->result);
+
+ /* changing the order of the paths should return the same results */
+ retval = svn_relpath_get_longest_ancestor(t->path2, t->path1, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath_get_longest_ancestor (%s, %s) returned "
+ "%s instead of %s",
+ t->path2, t->path1, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_get_longest_ancestor(apr_pool_t *pool)
+{
+ const testcase_get_longest_ancestor_t *t;
+ static const testcase_get_longest_ancestor_t tests[] = {
+ { "http://test", "http://test", "http://test"},
+ { "http://test", "http://taste", SVN_EMPTY_PATH},
+ { "http://test", "http://test/foo", "http://test"},
+ { "http://test", "file://test/foo", SVN_EMPTY_PATH},
+ { "http://test", "http://testf", SVN_EMPTY_PATH},
+ { "http://", "http://test", SVN_EMPTY_PATH},
+ { "file:///A/C", "file:///B/D", SVN_EMPTY_PATH},
+ { "file:///A/C", "file:///A/D", "file:///A"},
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *retval;
+
+ retval = svn_uri_get_longest_ancestor(t->path1, t->path2, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ t->path1, t->path2, retval, t->result);
+
+ /* changing the order of the paths should return the same results */
+ retval = svn_uri_get_longest_ancestor(t->path2, t->path1, pool);
+
+ if (strcmp(t->result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ t->path2, t->path1, retval, t->result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_is_child(apr_pool_t *pool)
+{
+ int i, j;
+
+ static const char * const paths[] = {
+ "/foo/bar",
+ "/foo/bars",
+ "/foo/baz",
+ "/foo/bar/baz",
+ "/flu/blar/blaz",
+ "/foo/bar/baz/bing/boom",
+ SVN_EMPTY_PATH,
+ "foo",
+ ".foo",
+ "/",
+ "foo2",
+#ifdef SVN_USE_DOS_PATHS
+ "//srv",
+ "//srv2",
+ "//srv/shr",
+ "//srv/shr/fld",
+ "H:/foo/bar",
+ "H:/foo/baz",
+ "H:/foo/bar/baz",
+ "H:/flu/blar/blaz",
+ "H:/foo/bar/baz/bing/boom",
+ "H:/",
+ "H:/iota",
+ "H:",
+ "H:foo",
+ "H:foo/baz",
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ /* Maximum number of path[] items for all platforms */
+#define MAX_PATHS 32
+
+ static const char * const
+ remainders[COUNT_OF(paths)][MAX_PATHS] = {
+ { 0, 0, 0, "baz", 0, "baz/bing/boom", 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, "bing/boom", 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, "foo", ".foo", 0, "foo2",
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { "foo/bar", "foo/bars", "foo/baz", "foo/bar/baz", "flu/blar/blaz",
+ "foo/bar/baz/bing/boom", 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+#ifdef SVN_USE_DOS_PATHS
+ /* //srv paths */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, "shr", "shr/fld", 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, "fld", 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ /* H:/ paths */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, "baz", 0, "baz/bing/boom", 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, "bing/boom", 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, "foo/bar", "foo/baz", "foo/bar/baz", "flu/blar/blaz",
+ "foo/bar/baz/bing/boom", 0, "iota", 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ /* H: paths */
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "foo", "foo/baz" },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, "baz" },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ for (i = 0; i < COUNT_OF(paths); i++)
+ {
+ for (j = 0; j < COUNT_OF(paths); j++)
+ {
+ const char *remainder;
+
+ remainder = svn_dirent_is_child(paths[i], paths[j], pool);
+
+ if (((remainder) && (! remainders[i][j]))
+ || ((! remainder) && (remainders[i][j]))
+ || (remainder && strcmp(remainder, remainders[i][j])))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_child (%s, %s) returned '%s' instead of '%s'",
+ paths[i], paths[j],
+ remainder ? remainder : "(null)",
+ remainders[i][j] ? remainders[i][j] : "(null)" );
+ }
+ }
+
+#undef NUM_TEST_PATHS
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_get_absolute(apr_pool_t *pool)
+{
+ int i;
+ const char *curdir;
+ char buf[8192];
+#ifdef SVN_USE_DOS_PATHS
+ const char *curdironc;
+ char curdrive[3] = "C:";
+#endif /* SVN_USE_DOS_PATHS */
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ /* '%' will be replaced by the current working dir. */
+ { "abc", "%/abc" },
+ { SVN_EMPTY_PATH, "%" },
+#ifdef SVN_USE_DOS_PATHS
+ /* '@' will be replaced by the current working dir on C:\. */
+ /* '$' will be replaced by the current drive */
+ { "C:/", "C:/" },
+ { "C:/abc", "C:/abc" },
+ { "C:abc", "@/abc" },
+ { "C:", "@" },
+ { "/", "$/" },
+ { "/x/abc", "$/x/abc" },
+ { "c:/", "C:/" },
+ { "c:/AbC", "C:/AbC" },
+ { "c:abc", "@/abc" },
+ /* svn_dirent_get_absolute will check existence of this UNC shares on the
+ test machine, so we can't really test this.
+ { "//srv/shr", "//srv/shr" },
+ { "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld/subfld", "//srv/shr/fld" }, */
+#else /* !SVN_USE_DOS_PATHS */
+ { "/abc", "/abc" },
+ { "/x/abc", "/x/abc" },
+ { "X:", "%/X:" },
+ { "X:abc", "%/X:abc" },
+#endif /* SVN_USE_DOS_PATHS */
+ };
+
+ if (! getcwd(buf, sizeof(buf)))
+ return svn_error_create(SVN_ERR_BASE, NULL, "getcwd() failed");
+
+ curdir = svn_dirent_internal_style(buf, pool);
+
+#ifdef SVN_USE_DOS_PATHS
+ if (! getdcwd(3, buf, sizeof(buf))) /* 3 stands for drive C: */
+ return svn_error_create(SVN_ERR_BASE, NULL, "getdcwd() failed");
+
+ curdironc = svn_dirent_internal_style(buf, pool);
+ curdrive[0] = curdir[0];
+#endif /* SVN_USE_DOS_PATHS */
+
+ for (i = 0 ; i < COUNT_OF(tests) ; i++ )
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+ const char *expect_abs, *result;
+
+ expect_abs = expect;
+ if (*expect == '%')
+ expect_abs = apr_pstrcat(pool, curdir, expect + 1, SVN_VA_NULL);
+#ifdef SVN_USE_DOS_PATHS
+ if (*expect == '@')
+ expect_abs = apr_pstrcat(pool, curdironc, expect + 1, SVN_VA_NULL);
+
+ if (*expect == '$')
+ expect_abs = apr_pstrcat(pool, curdrive, expect + 1, SVN_VA_NULL);
+
+ /* Remove double '/' when CWD was the root dir (E.g. C:/) */
+ expect_abs = svn_dirent_canonicalize(expect_abs, pool);
+#endif /* SVN_USE_DOS_PATHS */
+
+ SVN_ERR(svn_dirent_get_absolute(&result, path, pool));
+ if (strcmp(result, expect_abs))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_get_absolute(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect_abs);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#ifdef WIN32
+static svn_error_t *
+test_dirent_get_absolute_from_lc_drive(apr_pool_t *pool)
+{
+ char current_dir[1024];
+ char current_dir_on_C[1024];
+ char *dir_on_c;
+ svn_error_t *err;
+ apr_hash_t *dirents;
+ apr_hash_index_t *hi;
+ const char *some_dir_on_C = NULL;
+
+ if (! getcwd(current_dir, sizeof(current_dir)))
+ return svn_error_create(SVN_ERR_BASE, NULL, "getcwd() failed");
+
+ /* 3 stands for drive C: */
+ if (! getdcwd(3, current_dir_on_C, sizeof(current_dir_on_C)))
+ return svn_error_create(SVN_ERR_BASE, NULL, "getdcwd() failed");
+
+ SVN_ERR(svn_io_get_dirents3(&dirents, "C:\\", TRUE, pool, pool));
+
+ /* We need a directory on 'C:\' to switch to lower case and back.
+ We use the first directory we can find that is not the CWD and
+ where we can chdir to */
+
+ for (hi = apr_hash_first(pool, dirents); hi; hi = apr_hash_next(hi))
+ {
+ const char *dir = apr_hash_this_key(hi);
+ svn_io_dirent2_t *de = apr_hash_this_val(hi);
+
+ if (de->kind == svn_node_dir &&
+ strcmp(dir, current_dir_on_C))
+ {
+ dir = svn_dirent_join("C:/", dir, pool);
+ if (!chdir(dir))
+ {
+ chdir(current_dir_on_C); /* Switch back to old CWD */
+ some_dir_on_C = dir;
+ break;
+ }
+ }
+ }
+
+ if (!some_dir_on_C)
+ return svn_error_create(SVN_ERR_BASE, NULL,
+ "No usable test directory found in C:\\");
+
+ /* Use the test path, but now with a lower case driveletter */
+ dir_on_c = apr_pstrdup(pool, some_dir_on_C);
+ dir_on_c[0] = (char)tolower(dir_on_c[0]);
+
+ chdir(dir_on_c);
+
+ err = test_dirent_get_absolute(pool);
+
+ /* Change back to original directory for next tests */
+ chdir("C:\\"); /* Switch to upper case */
+ chdir(current_dir_on_C); /* Switch cwd on C: */
+ chdir(current_dir); /* Switch back to original cwd */
+ return err;
+}
+#endif
+
+static svn_error_t *
+test_dirent_condense_targets(apr_pool_t *pool)
+{
+ int i;
+ struct {
+ const char *paths[8];
+ const char *common;
+ const char *results[8]; /* must be same size as paths */
+ } tests[] = {
+ { { "/dir", "/dir/file", NULL }, NULL, { "", "file" } },
+ { { "/dir1", "/dir2", NULL }, NULL, { "dir1", "dir2" } },
+ { { "dir1", "dir2", NULL }, NULL, { "dir1", "dir2" } },
+#ifdef SVN_USE_DOS_PATHS
+ { {"C:/", "C:/zeta", NULL}, "C:/", {"", "zeta"} },
+ { {"C:/dir", "C:/dir/zeta", NULL}, "C:/dir", {"", "zeta"} },
+ { {"C:/dir/omega", "C:/dir/zeta", NULL}, "C:/dir", {"omega", "zeta" } },
+ { {"C:/dir", "D:/dir", NULL}, "", {"C:/dir", "D:/dir"} },
+ { {"C:A", "C:dir/b", NULL}, NULL, {"A", "dir/b"} },
+#else
+ { { "/dir", "/dir/file", NULL }, "/dir", { "", "file" } },
+ { { "/dir1", "/dir2", NULL }, "/", { "dir1", "dir2" } },
+#endif
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ int j;
+ const char* common;
+ apr_array_header_t *hdr = apr_array_make(pool, 8, sizeof(const char*));
+ apr_array_header_t *condensed;
+ svn_boolean_t skip = FALSE;
+
+ for (j = 0; j < COUNT_OF(tests[i].paths); j++)
+ {
+ if (tests[i].paths[j] != NULL)
+ {
+ APR_ARRAY_PUSH(hdr, const char*) = tests[i].paths[j];
+#ifdef SVN_USE_DOS_PATHS
+ /* For tests that are referencing a D: drive, specifically test
+ if such a drive exists on the system. If not, skip the test
+ (svn_dirent_condense_targets will fail, because
+ apr_filepath_merge will produce an APR_EBADPATH error). */
+ if (strncmp(tests[i].paths[j], "D:", 2) == 0
+ && GetDriveType("D:\\") == DRIVE_NO_ROOT_DIR)
+ {
+ /* There is no D: drive, skip this. */
+ skip = TRUE;
+ break;
+ }
+#endif
+ }
+ else
+ break;
+ }
+
+ if (skip)
+ continue;
+
+ SVN_ERR(svn_dirent_condense_targets(&common, &condensed, hdr,
+ FALSE, pool, pool));
+
+ if (tests[i].common != NULL && strcmp(common, tests[i].common))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_condense_targets returned common "
+ "\"%s\". expected \"%s\"",
+ common, tests[i].common);
+
+ for (j = 0; j < COUNT_OF(tests[i].paths); j++)
+ {
+ if (tests[i].paths[j] == NULL || tests[i].results[j] == NULL)
+ break;
+
+ if (strcmp(APR_ARRAY_IDX(condensed, j, const char*),
+ tests[i].results[j]))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_condense_targets returned first"
+ "\"%s\". expected \"%s\"",
+ APR_ARRAY_IDX(condensed, j, const char*),
+ tests[i].results[j]);
+ }
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_condense_targets(apr_pool_t *pool)
+{
+ int i;
+ struct {
+ const char *paths[8];
+ const char *common;
+ const char *results[8]; /* must be same size as paths */
+ } tests[] = {
+ /* { { url1, url2, url3 },
+ common_url,
+ { relpath1, relpath2, relpath3 } }
+ */
+ { { "sc://s/A", "sc://s/B", "sc://s" },
+ "sc://s",
+ { "A", "B", "" } },
+ { { "sc://S/A", "sc://S/B", "sc://S" },
+ "sc://s",
+ { "A", "B", "" } },
+ { { "sc://A/A", "sc://B/B", "sc://s" },
+ "",
+ { "sc://a/A", "sc://b/B", "sc://s" } },
+ { { "sc://A/A", "sc://A/a/B", "sc://a/Q" },
+ "sc://a",
+ { "A", "a/B", "Q"} },
+ { { "sc://server/foo%20bar", "sc://server/baz", "sc://server/blarg" },
+ "sc://server",
+ { "foo bar", "baz", "blarg"} },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ int j;
+ const char* common;
+ apr_array_header_t *hdr = apr_array_make(pool, 8, sizeof(const char*));
+ apr_array_header_t *condensed;
+
+ for (j = 0; j < COUNT_OF(tests[i].paths); j++)
+ {
+ if (tests[i].paths[j] != NULL)
+ APR_ARRAY_PUSH(hdr, const char*) = tests[i].paths[j];
+ else
+ break;
+ }
+
+ SVN_ERR(svn_uri_condense_targets(&common, &condensed, hdr,
+ FALSE, pool, pool));
+
+ if (tests[i].common != NULL && strcmp(common, tests[i].common))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_condense_targets returned common "
+ "\"%s\". expected \"%s\"",
+ common, tests[i].common);
+
+ for (j = 0; j < COUNT_OF(tests[i].paths); j++)
+ {
+ if (tests[i].paths[j] == NULL || tests[i].results[j] == NULL)
+ break;
+
+ if (strcmp(APR_ARRAY_IDX(condensed, j, const char*),
+ tests[i].results[j]))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_condense_targets returned first"
+ "\"%s\". expected \"%s\"",
+ APR_ARRAY_IDX(condensed, j, const char*),
+ tests[i].results[j]);
+ }
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_local_style(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "." },
+ { ".", "." },
+#ifdef SVN_USE_DOS_PATHS
+ { "A:/", "A:\\" },
+ { "A:/file", "A:\\file" },
+ { "dir/file", "dir\\file" },
+ { "/", "\\" },
+ { "//server/share/dir", "\\\\server\\share\\dir" },
+ { "//server/sh re/dir", "\\\\server\\sh re\\dir" },
+#else
+ { "a:/file", "a:/file" },
+ { "dir/file", "dir/file" },
+ { "/", "/" },
+#endif
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *local = svn_dirent_local_style(tests[i].path, pool);
+
+ if (strcmp(local, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_local_style(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, local, tests[i].result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_internal_style(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "/", "/" },
+ { "file", "file" },
+ { "dir/file", "dir/file" },
+ { "dir/file/./.", "dir/file" },
+#ifdef SVN_USE_DOS_PATHS
+ { "A:\\", "A:/" },
+ { "A:\\file", "A:/file" },
+ { "A:file", "A:file" },
+ { "a:\\", "A:/" },
+ { "a:/", "A:/" },
+ { "a:\\file", "A:/file" },
+ { "a:file", "A:file" },
+ { "dir\\file", "dir/file" },
+ { "\\\\srv\\shr\\dir", "//srv/shr/dir" },
+ { "\\\\srv\\shr\\", "//srv/shr" },
+ { "\\\\srv\\s r\\", "//srv/s r" },
+ { "//srv/shr", "//srv/shr" },
+ { "//srv/s r", "//srv/s r" },
+ { "//srv/s r", "//srv/s r" },
+#else
+ { "a:/", "a:" }, /* Wrong but expected for svn_path_*() */
+ { "a:/file", "a:/file" },
+ { "dir/file", "dir/file" },
+ { "/", "/" },
+ { "//server/share/dir", "/server/share/dir" },
+#endif
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *internal = svn_dirent_internal_style(tests[i].path, pool);
+
+ if (strcmp(internal, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_internal_style(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, internal, tests[i].result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_relpath_internal_style(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "/", "" },
+ { "file", "file" },
+ { "dir/file", "dir/file" },
+ { "a:/", "a:" },
+ { "a:/file", "a:/file" },
+ { "dir/file", "dir/file" },
+ { "//server/share/dir", "server/share/dir" },
+ { "a/./.", "a" },
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *internal = svn_relpath__internal_style(tests[i].path, pool);
+
+ if (strcmp(internal, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_relpath__internal_style(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, internal, tests[i].result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_from_file_url(apr_pool_t *pool)
+{
+ struct {
+ const char *url;
+ const char *result;
+ } tests[] = {
+ { "file://", "/" },
+ { "file:///dir", "/dir" },
+ { "file:///dir/path", "/dir/path" },
+ { "file://localhost", "/" },
+ { "file://localhost/dir", "/dir" },
+ { "file://localhost/dir/path", "/dir/path" },
+#ifdef SVN_USE_DOS_PATHS
+ { "file://server/share", "//server/share" },
+ { "file://server/share/dir", "//server/share/dir" },
+ { "file:///A:", "A:/" },
+ { "file:///A:/dir", "A:/dir" },
+ { "file:///A:dir", "A:dir" },
+ { "file:///A%7C", "A:/" },
+ { "file:///A%7C/dir", "A:/dir" },
+ { "file:///A%7Cdir", "A:dir" },
+ { "file:///A%7C%5Cdir", "A:/dir" },
+ { "file:///A%7C%5Cdir%5Cfile", "A:/dir\\file" },
+ { "file:///A:%5Cdir", "A:/dir" },
+ { "file:///A:%5Cdir%5Cfile", "A:/dir\\file" },
+ { "file://localhost/A:%5Cfile","A:/file"},
+ { "file://localhost/A:file", "A:file"}
+#else
+ { "file:///A:", "/A:" },
+ { "file:///A:/dir", "/A:/dir" },
+ { "file:///A:dir", "/A:dir" },
+ { "file:///A%7C", "/A|" },
+ { "file:///A%7C/dir", "/A|/dir" },
+ { "file:///A%7Cdir", "/A|dir" },
+ { "file:///A%7C%5Cdir", "/A|\\dir" },
+ { "file:///A%7C%5Cdir%5Cfile", "/A|\\dir\\file" },
+ { "file:///A:%5Cdir", "/A:\\dir" },
+ { "file:///A:%5Cdir%5Cfile", "/A:\\dir\\file" },
+ { "file://localhost/A:%5Cfile","/A:\\file" },
+ { "file://localhost/A:file", "/A:file" }
+#endif
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *result;
+
+ SVN_ERR(svn_uri_get_dirent_from_file_url(&result, tests[i].url, pool));
+
+ if (strcmp(result, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_dirent_from_file_url(\"%s\") "
+ "returned \"%s\" expected \"%s\"",
+ tests[i].url, result, tests[i].result);
+ if (!svn_dirent_is_canonical(result, pool))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_dirent_from_file_url(\"%s\") "
+ "returned \"%s\", which is not canonical.",
+ tests[i].url, result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_from_file_url_errors(apr_pool_t *pool)
+{
+ const char *bad_file_urls[] = {
+ /* error if scheme is not "file" */
+ "http://localhost/dir",
+ "file+ssh://localhost/dir",
+#ifndef SVN_USE_DOS_PATHS
+ "file://localhostwrongname/dir", /* error if host name not "localhost" */
+#endif
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(bad_file_urls); i++)
+ {
+ const char *result;
+ svn_error_t *err;
+
+ err = svn_uri_get_dirent_from_file_url(&result, bad_file_urls[i],
+ pool);
+
+ if (err == NULL)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_dirent_from_file_url(\"%s\") "
+ "didn't return an error.",
+ bad_file_urls[i]);
+ svn_error_clear(err);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_url_from_dirent(apr_pool_t *pool)
+{
+ struct {
+ const char *dirent;
+ const char *result;
+ } tests[] = {
+#ifdef SVN_USE_DOS_PATHS
+ { "C:/file", "file:///C:/file" },
+ { "C:/", "file:///C:" },
+ { "C:/File#$", "file:///C:/File%23$" },
+ /* We can't check these as svn_dirent_get_absolute() won't work
+ on shares that don't exist */
+ /*{ "//server/share", "file://server/share" },
+ { "//server/share/file", "file://server/share/file" },*/
+#else
+ { "/a/b", "file:///a/b" },
+ { "/a", "file:///a" },
+ { "/", "file://" },
+ { "/File#$", "file:///File%23$" },
+#endif
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *result;
+
+ SVN_ERR(svn_uri_get_file_url_from_dirent(&result, tests[i].dirent,
+ pool));
+
+ if (strcmp(result, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_uri_get_file_url_from_dirent(\"%s\") "
+ "returned \"%s\" expected \"%s\"",
+ tests[i].dirent, result, tests[i].result);
+
+ SVN_TEST_ASSERT(svn_uri_is_canonical(result, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_dirent_is_under_root(apr_pool_t *pool)
+{
+ struct {
+ const char *base_path;
+ const char *path;
+ svn_boolean_t under_root;
+ const char *result;
+ } tests[] = {
+ { "", "", TRUE, ""},
+ { "", "r", TRUE, "r"},
+ { "", "r/..", TRUE, ""},
+ { "", "r/../..", FALSE},
+ { "", "r/../../b", FALSE},
+ { "", "..", FALSE},
+ { "", "../r", FALSE},
+ { "", "/", FALSE},
+ { "", "/r", FALSE},
+ { "", "/..", FALSE},
+ { "b", "", TRUE, "b"},
+ { "b", "r", TRUE, "b/r"},
+ { "b", "r/..", TRUE, "b"},
+ { "b", "r/../..", FALSE},
+ { "b", "r/../../b", FALSE},
+ { "b", "..", FALSE},
+ { "b", "../r", FALSE},
+ { "b", "../b", FALSE},
+ { "b", "/", FALSE},
+ { "b", "/r", FALSE},
+ { "b", "/..", FALSE},
+ { "/", "", TRUE, "/"},
+ { "/", "r", TRUE, "/r"},
+ { "/", "r/..", TRUE, "/"},
+ { "/", "r/../..", FALSE},
+ { "/", "r/../../b", FALSE},
+ { "/", "..", FALSE},
+ { "/", "../r", FALSE},
+ { "/", "/", FALSE},
+ { "/", "/r", FALSE},
+ { "/", "/..", FALSE},
+ { "/b", "", TRUE, "/b"},
+ { "/b", "r", TRUE, "/b/r"},
+ { "/b", "r/..", TRUE, "/b"},
+ { "/b", "r/../..", FALSE},
+ { "/b", "r/../../b", FALSE},
+ { "/b", "..", FALSE},
+ { "/b", "../r", FALSE},
+ { "/b", "../b", FALSE},
+ { "/b", "/", FALSE},
+ { "/b", "/r", FALSE},
+ { "/b", "/..", FALSE},
+ { "/", "/base", FALSE},
+ { "/aa", "/aa/bb", FALSE},
+ { "/base", "/base2", FALSE},
+ { "/b", "bb", TRUE, "/b/bb"},
+ { "/b", "../bb", FALSE},
+ { "/b", "r/./bb", TRUE, "/b/r/bb"},
+ { "/b", "r/../bb", TRUE, "/b/bb"},
+ { "/b", "r/../../bb", FALSE},
+ { "/b", "./bb", TRUE, "/b/bb"},
+ { "/b", ".", TRUE, "/b"},
+ { "/b", "", TRUE, "/b"},
+ { "b", "b", TRUE, "b/b"},
+#ifdef SVN_USE_DOS_PATHS
+ { "C:/file", "a\\d", TRUE, "C:/file/a/d"},
+ { "C:/file", "aa\\..\\d", TRUE, "C:/file/d"},
+ { "C:/file", "aa\\..\\..\\d", FALSE},
+#else
+ { "C:/file", "a\\d", TRUE, "C:/file/a\\d"},
+ { "C:/file", "aa\\..\\d", TRUE, "C:/file/aa\\..\\d"},
+ { "C:/file", "aa\\..\\..\\d", TRUE, "C:/file/aa\\..\\..\\d"},
+#endif /* SVN_USE_DOS_PATHS */
+ };
+ int i;
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ svn_boolean_t under_root;
+ const char *result;
+
+ SVN_ERR(svn_dirent_is_under_root(&under_root,
+ &result,
+ tests[i].base_path,
+ tests[i].path,
+ pool));
+
+ if (under_root != tests[i].under_root)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_under_root(..\"%s\", \"%s\"..)"
+ " returned %s expected %s.",
+ tests[i].base_path,
+ tests[i].path,
+ under_root ? "TRUE" : "FALSE",
+ tests[i].under_root ? "TRUE" : "FALSE");
+
+ if (under_root
+ && strcmp(result, tests[i].result) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_dirent_is_under_root(..\"%s\", \"%s\"..)"
+ " found \"%s\" expected \"%s\".",
+ tests[i].base_path,
+ tests[i].path,
+ result,
+ tests[i].result);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_is_canonical(apr_pool_t *pool)
+{
+ const testcase_is_canonical_t *t;
+ static const testcase_is_canonical_t tests[] = {
+ { "", FALSE },
+ { ".", FALSE },
+ { "/", TRUE },
+ { "/a", TRUE },
+ { "/a/", FALSE },
+ { "//a", FALSE },
+ { "/a/b", TRUE },
+ { "/a//b", FALSE },
+ { "\\", FALSE },
+ { "\\a", FALSE },
+ { "/\\a", TRUE }, /* a single component */
+ { "/a\\", TRUE }, /* a single component */
+ { "/a\\b", TRUE }, /* a single component */
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ svn_boolean_t canonical
+ = svn_fspath__is_canonical(t->path);
+
+ if (t->canonical != canonical)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_fspath__is_canonical(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ t->path,
+ canonical ? "TRUE" : "FALSE",
+ t->canonical ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_join(apr_pool_t *pool)
+{
+ int i;
+
+ static const char * const joins[][3] = {
+ { "/", "", "/" },
+ { "/", "d", "/d" },
+ { "/", "d/e", "/d/e" },
+ { "/abc", "", "/abc" },
+ { "/abc", "d", "/abc/d" },
+ { "/abc", "d/e", "/abc/d/e" },
+ };
+
+ for (i = 0; i < COUNT_OF(joins); i++ )
+ {
+ char *result = svn_fspath__join(joins[i][0], joins[i][1], pool);
+
+ SVN_TEST_STRING_ASSERT(result, joins[i][2]);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_skip_ancestor(apr_pool_t *pool)
+{
+ int i, j;
+
+ static const char * const paths[] = {
+ "/",
+ "/f",
+ "/foo",
+ "/foo/bar",
+ "/foo/bars",
+ "/foo/bar/baz",
+ };
+
+ static const char * const
+ remainders[COUNT_OF(paths)][COUNT_OF(paths)] = {
+ { "", "f", "foo", "foo/bar", "foo/bars", "foo/bar/baz" },
+ { 0, "", 0, 0, 0, 0 },
+ { 0, 0, "", "bar", "bars", "bar/baz" },
+ { 0, 0, 0, "", 0, "baz" },
+ { 0, 0, 0, 0, "", 0 },
+ { 0, 0, 0, 0, 0, "" },
+ };
+
+ for (i = 0; i < COUNT_OF(paths); i++)
+ {
+ for (j = 0; j < COUNT_OF(paths); j++)
+ {
+ const char *remainder
+ = svn_fspath__skip_ancestor(paths[i], paths[j]);
+
+ SVN_TEST_STRING_ASSERT(remainder, remainders[i][j]);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_dirname_basename_split(apr_pool_t *pool)
+{
+ int i;
+
+ static const struct {
+ const char *path;
+ const char *dirname;
+ const char *basename;
+ } tests[] = {
+ { "/", "/", "" },
+ { "/a", "/", "a" },
+ { "/abc", "/", "abc" },
+ { "/x/abc", "/x", "abc" },
+ { "/x/y/abc", "/x/y", "abc" },
+ };
+
+ for (i = 0; i < COUNT_OF(tests); i++)
+ {
+ const char *result_dirname, *result_basename;
+
+ result_dirname = svn_fspath__dirname(tests[i].path, pool);
+ SVN_TEST_STRING_ASSERT(result_dirname, tests[i].dirname);
+
+ result_basename = svn_fspath__basename(tests[i].path, pool);
+ SVN_TEST_STRING_ASSERT(result_basename, tests[i].basename);
+
+ svn_fspath__split(&result_dirname, &result_basename, tests[i].path,
+ pool);
+ SVN_TEST_STRING_ASSERT(result_dirname, tests[i].dirname);
+ SVN_TEST_STRING_ASSERT(result_basename, tests[i].basename);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_fspath_get_longest_ancestor(apr_pool_t *pool)
+{
+ const testcase_get_longest_ancestor_t *t;
+
+ /* Paths to test and their expected results. Same as in
+ * test_relpath_get_longest_ancestor() but with '/' prefix. */
+ static const testcase_get_longest_ancestor_t tests[] = {
+ { "/foo", "/foo/bar", "/foo" },
+ { "/foo/bar", "/foo/bar", "/foo/bar" },
+ { "/", "/foo", "/" },
+ { "/", "/foo", "/" },
+ { "/", "/.bar", "/" },
+ { "/.bar", "/", "/" },
+ { "/foo/bar", "/foo", "/foo" },
+ { "/foo/bar", "/foo", "/foo" },
+ { "/rif", "/raf", "/" },
+ { "/foo", "/bar", "/" },
+ { "/foo", "/foo/bar", "/foo" },
+ { "/foo.", "/foo./.bar", "/foo." },
+ { "/", "/", "/" },
+ { "/http:/test", "/http:/test", "/http:/test" },
+ { "/http:/test", "/http:/taste", "/http:" },
+ { "/http:/test", "/http:/test/foo", "/http:/test" },
+ { "/http:/test", "/file:/test/foo", "/" },
+ { "/http:/test", "/http:/testF", "/http:" },
+ { "/file:/A/C", "/file:/B/D", "/file:" },
+ { "/file:/A/C", "/file:/A/D", "/file:/A" },
+ { "/X:/foo", "/X:", "/X:" },
+ { "/X:/folder1", "/X:/folder2", "/X:" },
+ { "/X:", "/X:foo", "/" },
+ { "/X:foo", "/X:bar", "/" },
+ };
+
+ for (t = tests; t < tests + COUNT_OF(tests); t++)
+ {
+ const char *result;
+
+ result = svn_fspath__get_longest_ancestor(t->path1, t->path2, pool);
+ SVN_TEST_STRING_ASSERT(t->result, result);
+
+ /* changing the order of the paths should return the same result */
+ result = svn_fspath__get_longest_ancestor(t->path2, t->path1, pool);
+ SVN_TEST_STRING_ASSERT(t->result, result);
+ }
+ return SVN_NO_ERROR;
+}
+
+struct cert_match_dns_test {
+ const char *pattern;
+ const char *hostname;
+ svn_boolean_t expected;
+};
+
+static svn_error_t *
+run_cert_match_dns_tests(struct cert_match_dns_test *tests, apr_pool_t *pool)
+{
+ struct cert_match_dns_test *ct;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ for (ct = tests; ct->pattern; ct++)
+ {
+ svn_boolean_t result;
+ svn_string_t *pattern, *hostname;
+
+ svn_pool_clear(iterpool);
+
+ pattern = svn_string_create(ct->pattern, iterpool);
+ hostname = svn_string_create(ct->hostname, iterpool);
+
+ result = svn_cert__match_dns_identity(pattern, hostname);
+ if (result != ct->expected)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected %s but got %s for pattern '%s' on "
+ "hostname '%s'",
+ ct->expected ? "match" : "no match",
+ result ? "match" : "no match",
+ pattern->data, hostname->data);
+
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static struct cert_match_dns_test cert_match_dns_tests[] = {
+ { "foo.example.com", "foo.example.com", TRUE }, /* exact match */
+ { "foo.example.com", "FOO.EXAMPLE.COM", TRUE }, /* case differences */
+ { "FOO.EXAMPLE.COM", "foo.example.com", TRUE },
+ { "*.example.com", "FoO.ExAmPlE.CoM", TRUE },
+ { "*.ExAmPlE.CoM", "foo.example.com", TRUE },
+ { "ABCDEFGHIJKLMNOPQRSTUVWXYZ", "abcdefghijklmnopqrstuvwxyz", TRUE },
+ { "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZ", TRUE },
+ { "foo.example.com", "bar.example.com", FALSE }, /* difference at start */
+ { "foo.example.com", "foo.example.net", FALSE }, /* difference at end */
+ { "foo.example.com", "foo.example.commercial", FALSE }, /* hostname longer */
+ { "foo.example.commercial", "foo.example.com", FALSE }, /* pattern longer */
+ { "foo.example.comcom", "foo.example.com", FALSE }, /* repeated suffix */
+ { "foo.example.com", "foo.example.comcom", FALSE },
+ { "foo.example.com.com", "foo.example.com", FALSE },
+ { "foo.example.com", "foo.example.com.com", FALSE },
+ { "foofoo.example.com", "foo.example.com", FALSE }, /* repeated prefix */
+ { "foo.example.com", "foofoo.example.com", FALSE },
+ { "foo.foo.example.com", "foo.example.com", FALSE },
+ { "foo.example.com", "foo.foo.example.com", FALSE },
+ { "foo.*.example.com", "foo.bar.example.com", FALSE }, /* RFC 6125 s. 6.4.3
+ Rule 1 */
+ { "*.example.com", "foo.example.com", TRUE }, /* RFC 6125 s. 6.4.3 Rule 2 */
+ { "*.example.com", "bar.foo.example.com", FALSE }, /* Rule 2 */
+ { "*.example.com", "example.com", FALSE }, /* Rule 2 */
+ { "*.example.com", ".example.com", FALSE }, /* RFC doesn't say what to do
+ here and a leading period on
+ a hostname doesn't make sense
+ so we'll just reject this. */
+ { "*", "foo.example.com", FALSE }, /* wildcard must be left-most label,
+ implies that there must be more than
+ one label. */
+ { "*", "example.com", FALSE },
+ { "*", "com", FALSE },
+ { "*.example.com", "foo.example.net", FALSE }, /* difference in literal text
+ with a wildcard. */
+ { "*.com", "example.com", TRUE }, /* See Errata ID 3090 for RFC 6125,
+ probably shouldn't allow this but
+ we do for now. */
+ { "*.", "example.com", FALSE }, /* test some dubious 2 character wildcard
+ patterns */
+ { "*.", "example.", TRUE }, /* This one feels questionable */
+ { "*.", "example", FALSE },
+ { "*.", ".", FALSE },
+ { "a", "a", TRUE }, /* check that single letter exact matches work */
+ { "a", "b", FALSE }, /* and single letter not matches shouldn't */
+ { "*.*.com", "foo.example.com", FALSE }, /* unsupported wildcards */
+ { "*.*.com", "example.com", FALSE },
+ { "**.example.com", "foo.example.com", FALSE },
+ { "**.example.com", "example.com", FALSE },
+ { "f*.example.com", "foo.example.com", FALSE },
+ { "f*.example.com", "bar.example.com", FALSE },
+ { "*o.example.com", "foo.example.com", FALSE },
+ { "*o.example.com", "bar.example.com", FALSE },
+ { "f*o.example.com", "foo.example.com", FALSE },
+ { "f*o.example.com", "bar.example.com", FALSE },
+ { "foo.e*.com", "foo.example.com", FALSE },
+ { "foo.*e.com", "foo.example.com", FALSE },
+ { "foo.e*e.com", "foo.example.com", FALSE },
+ { "foo.example.com", "foo.example.com.", TRUE }, /* trailing dot */
+ { "*.example.com", "foo.example.com.", TRUE },
+ { "foo", "foo.", TRUE },
+ { "foo.example.com.", "foo.example.com", FALSE },
+ { "*.example.com.", "foo.example.com", FALSE },
+ { "foo.", "foo", FALSE },
+ { "foo.example.com", "foo.example.com..", FALSE },
+ { "*.example.com", "foo.example.com..", FALSE },
+ { "foo", "foo..", FALSE },
+ { "foo.example.com..", "foo.example.com", FALSE },
+ { "*.example.com..", "foo.example.com", FALSE },
+ { "foo..", "foo", FALSE },
+ { NULL }
+};
+
+static svn_error_t *
+test_cert_match_dns_identity(apr_pool_t *pool)
+{
+ return run_cert_match_dns_tests(cert_match_dns_tests, pool);
+}
+
+/* This test table implements results that should happen if we supported
+ * RFC 6125 s. 6.4.3 Rule 3. We don't so it's expected to fail for now. */
+static struct cert_match_dns_test rule3_tests[] = {
+ { "baz*.example.net", "baz1.example.net", TRUE },
+ { "*baz.example.net", "foobaz.example.net", TRUE },
+ { "b*z.example.net", "buuz.example.net", TRUE },
+ { "b*z.example.net", "bz.example.net", FALSE }, /* presume wildcard can't
+ match nothing */
+ { "baz*.example.net", "baz.example.net", FALSE },
+ { "*baz.example.net", "baz.example.net", FALSE },
+ { "b*z.example.net", "buuzuuz.example.net", TRUE }, /* presume wildcard
+ should be greedy */
+ { NULL }
+};
+
+static svn_error_t *
+test_rule3(apr_pool_t *pool)
+{
+ return run_cert_match_dns_tests(rule3_tests, pool);
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_dirent_is_root,
+ "test svn_dirent_is_root"),
+ SVN_TEST_PASS2(test_uri_is_root,
+ "test svn_uri_is_root"),
+ SVN_TEST_PASS2(test_dirent_is_absolute,
+ "test svn_dirent_is_absolute"),
+ SVN_TEST_PASS2(test_dirent_join,
+ "test svn_dirent_join(_many)"),
+ SVN_TEST_PASS2(test_relpath_join,
+ "test svn_relpath_join"),
+ SVN_TEST_PASS2(test_dirent_basename,
+ "test svn_dirent_basename"),
+ SVN_TEST_PASS2(test_relpath_basename,
+ "test svn_relpath_basename"),
+ SVN_TEST_PASS2(test_uri_basename,
+ "test svn_uri_basename"),
+ SVN_TEST_PASS2(test_relpath_dirname,
+ "test svn_relpath_dirname"),
+ SVN_TEST_PASS2(test_dirent_dirname,
+ "test svn_dirent_dirname"),
+ SVN_TEST_PASS2(test_uri_dirname,
+ "test svn_dirent_dirname"),
+ SVN_TEST_PASS2(test_dirent_canonicalize,
+ "test svn_dirent_canonicalize"),
+ SVN_TEST_PASS2(test_relpath_canonicalize,
+ "test svn_relpath_canonicalize"),
+ SVN_TEST_PASS2(test_uri_canonicalize,
+ "test svn_uri_canonicalize"),
+ SVN_TEST_PASS2(test_dirent_is_canonical,
+ "test svn_dirent_is_canonical"),
+ SVN_TEST_PASS2(test_relpath_is_canonical,
+ "test svn_relpath_is_canonical"),
+ SVN_TEST_PASS2(test_uri_is_canonical,
+ "test svn_uri_is_canonical"),
+ SVN_TEST_PASS2(test_dirent_split,
+ "test svn_dirent_split"),
+ SVN_TEST_PASS2(test_relpath_split,
+ "test svn_relpath_split"),
+ SVN_TEST_PASS2(test_uri_split,
+ "test svn_uri_split"),
+ SVN_TEST_PASS2(test_dirent_get_longest_ancestor,
+ "test svn_dirent_get_longest_ancestor"),
+ SVN_TEST_PASS2(test_relpath_get_longest_ancestor,
+ "test svn_relpath_get_longest_ancestor"),
+ SVN_TEST_PASS2(test_uri_get_longest_ancestor,
+ "test svn_uri_get_longest_ancestor"),
+ SVN_TEST_PASS2(test_dirent_is_child,
+ "test svn_dirent_is_child"),
+ SVN_TEST_PASS2(test_dirent_is_ancestor,
+ "test svn_dirent_is_ancestor"),
+ SVN_TEST_PASS2(test_uri_is_ancestor,
+ "test svn_uri_is_ancestor"),
+ SVN_TEST_PASS2(test_dirent_skip_ancestor,
+ "test svn_dirent_skip_ancestor"),
+ SVN_TEST_PASS2(test_relpath_skip_ancestor,
+ "test svn_relpath_skip_ancestor"),
+ SVN_TEST_PASS2(test_uri_skip_ancestor,
+ "test svn_uri_skip_ancestor"),
+ SVN_TEST_PASS2(test_dirent_get_absolute,
+ "test svn_dirent_get_absolute"),
+#ifdef WIN32
+ SVN_TEST_PASS2(test_dirent_get_absolute_from_lc_drive,
+ "test svn_dirent_get_absolute (needs recent apr)"),
+#endif
+ SVN_TEST_PASS2(test_dirent_condense_targets,
+ "test svn_dirent_condense_targets"),
+ SVN_TEST_PASS2(test_uri_condense_targets,
+ "test svn_uri_condense_targets"),
+ SVN_TEST_PASS2(test_dirent_local_style,
+ "test svn_dirent_local_style"),
+ SVN_TEST_PASS2(test_dirent_internal_style,
+ "test svn_dirent_internal_style"),
+ SVN_TEST_PASS2(test_relpath_internal_style,
+ "test svn_relpath_internal_style"),
+ SVN_TEST_PASS2(test_dirent_from_file_url,
+ "test svn_uri_get_dirent_from_file_url"),
+ SVN_TEST_PASS2(test_dirent_from_file_url_errors,
+ "test svn_uri_get_dirent_from_file_url errors"),
+ SVN_TEST_PASS2(test_file_url_from_dirent,
+ "test svn_uri_get_file_url_from_dirent"),
+ SVN_TEST_PASS2(test_dirent_is_under_root,
+ "test svn_dirent_is_under_root"),
+ SVN_TEST_PASS2(test_fspath_is_canonical,
+ "test svn_fspath__is_canonical"),
+ SVN_TEST_PASS2(test_fspath_join,
+ "test svn_fspath__join"),
+ SVN_TEST_PASS2(test_fspath_skip_ancestor,
+ "test svn_fspath__skip_ancestor"),
+ SVN_TEST_PASS2(test_fspath_dirname_basename_split,
+ "test svn_fspath__dirname/basename/split"),
+ SVN_TEST_PASS2(test_fspath_get_longest_ancestor,
+ "test svn_fspath__get_longest_ancestor"),
+ SVN_TEST_PASS2(test_cert_match_dns_identity,
+ "test svn_cert__match_dns_identity"),
+ SVN_TEST_XFAIL2(test_rule3,
+ "test match with RFC 6125 s. 6.4.3 Rule 3"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/error-code-test.c b/subversion/tests/libsvn_subr/error-code-test.c
new file mode 100644
index 0000000..8dada36
--- /dev/null
+++ b/subversion/tests/libsvn_subr/error-code-test.c
@@ -0,0 +1,87 @@
+/*
+ * error-code-test.c -- tests for error codes
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+
+#include "svn_error.h"
+
+/* Duplicate of the same typedef in libsvn_subr/error.c */
+typedef struct err_defn {
+ svn_errno_t errcode; /* 160004 */
+ const char *errname; /* SVN_ERR_FS_CORRUPT */
+ const char *errdesc; /* default message */
+} err_defn;
+
+/* To understand what is going on here, read svn_error_codes.h. */
+#define SVN_ERROR_BUILD_ARRAY
+#include "svn_error_codes.h"
+
+#include "../svn_test.h"
+
+#define NUM_ERRORS (sizeof(error_table)/sizeof(error_table[0]))
+
+static svn_error_t *
+check_error_codes_unique(apr_pool_t *pool)
+{
+ int i;
+ struct err_defn e = error_table[0];
+
+ /* Ensure error codes are strictly monotonically increasing. */
+ for (i = 1; i < NUM_ERRORS; i++)
+ {
+ struct err_defn e2 = error_table[i];
+
+ /* Don't fail the test if there is an odd number of errors.
+ * The error array's sentinel has an error code of zero. */
+ if (i == NUM_ERRORS - 1 && e2.errcode == 0)
+ break;
+
+ /* SVN_ERR_WC_NOT_DIRECTORY is an alias for SVN_ERR_WC_NOT_WORKING_COPY
+ * and shares the same error code. */
+ if (e.errcode != SVN_ERR_WC_NOT_DIRECTORY &&
+ e.errcode >= e2.errcode)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Error 0x%x (%s) is not < 0x%x (%s)\n",
+ e.errcode, e.errdesc, e2.errcode, e2.errdesc);
+ e = e2;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(check_error_codes_unique,
+ "check that error codes are unique"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/error-test.c b/subversion/tests/libsvn_subr/error-test.c
new file mode 100644
index 0000000..ea3291b
--- /dev/null
+++ b/subversion/tests/libsvn_subr/error-test.c
@@ -0,0 +1,246 @@
+/*
+ * error-test.c -- test the error functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+
+#include "svn_error_codes.h"
+#include "svn_error.h"
+#include "private/svn_error_private.h"
+
+#include "../svn_test.h"
+
+static svn_error_t *
+test_error_root_cause(apr_pool_t *pool)
+{
+ apr_status_t secondary_err_codes[] = { SVN_ERR_STREAM_UNRECOGNIZED_DATA,
+ SVN_ERR_STREAM_MALFORMED_DATA };
+ apr_status_t root_cause_err_code = SVN_ERR_STREAM_UNEXPECTED_EOF;
+ int i;
+ svn_error_t *err, *root_err;
+
+ /* Nest several errors. */
+ err = svn_error_create(root_cause_err_code, NULL, "root cause");
+ for (i = 0; i < 2; i++)
+ err = svn_error_create(secondary_err_codes[i], err, NULL);
+
+ /* Verify that the error is detected at the proper location in the
+ error chain. */
+ root_err = svn_error_root_cause(err);
+ if (root_err == NULL)
+ {
+ svn_error_clear(err);
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_error_root_cause failed to locate any "
+ "root error in the chain");
+ }
+
+ for (i = 0; i < 2; i++)
+ {
+ if (root_err->apr_err == secondary_err_codes[i])
+ {
+ svn_error_clear(err);
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_error_root_cause returned the "
+ "wrong error from the chain");
+ }
+ }
+
+ if (root_err->apr_err != root_cause_err_code)
+ {
+ svn_error_clear(err);
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_error_root_cause failed to locate the "
+ "correct error from the chain");
+ }
+
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_error_purge_tracing(apr_pool_t *pool)
+{
+ svn_error_t *err, *err2, *child;
+
+ if (SVN_NO_ERROR != svn_error_purge_tracing(SVN_NO_ERROR))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "svn_error_purge_tracing() didn't return "
+ "SVN_NO_ERROR after being passed a "
+ "SVN_NO_ERROR.");
+
+ err = svn_error_trace(svn_error_create(SVN_ERR_BASE, NULL, "root error"));
+#ifdef SVN_ERR__TRACING
+ if (! svn_error__is_tracing_link(err))
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "The top error is not a tracing link:");
+ }
+#endif
+ err = svn_error_trace(svn_error_create(SVN_ERR_BASE, err, "other error"));
+#ifdef SVN_ERR__TRACING
+ if (! svn_error__is_tracing_link(err))
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "The top error is not a tracing link:");
+ }
+#endif
+
+ err2 = svn_error_purge_tracing(err);
+ for (child = err2; child; child = child->child)
+ if (svn_error__is_tracing_link(child))
+ {
+ return svn_error_create(SVN_ERR_TEST_FAILED, err,
+ "Tracing link found after purging the "
+ "following chain:");
+ }
+ svn_error_clear(err);
+
+#ifdef SVN_ERR__TRACING
+ /* Make an error chain containing only tracing errors and check that
+ svn_error_purge_tracing() asserts on it. */
+ {
+ svn_error_t err_copy;
+ svn_error_malfunction_handler_t orig_handler;
+
+ /* For this test, use a random error status. */
+ err = svn_error_create(SVN_ERR_BAD_UUID, NULL, "");
+ err = svn_error_trace(err);
+ err->child->message = err->message;
+
+ /* Register a malfunction handler that doesn't call abort() to
+ check that a new error chain with an assertion error is
+ returned. */
+ orig_handler =
+ svn_error_set_malfunction_handler(svn_error_raise_on_malfunction);
+ err2 = svn_error_purge_tracing(err);
+ svn_error_set_malfunction_handler(orig_handler);
+
+ err_copy = *err;
+
+ if (err2)
+ {
+ /* If err2 does share the same pool as err, then make a copy
+ of err2 and err3 before err is cleared. */
+ svn_error_t err2_copy = *err2;
+ svn_error_t *err3 = err2;
+ svn_error_t err3_copy;
+
+ while (err3 && svn_error__is_tracing_link(err3))
+ err3 = err3->child;
+ if (err3)
+ err3_copy = *err3;
+ else
+ err3_copy.apr_err = APR_SUCCESS;
+
+ svn_error_clear(err);
+
+ /* The returned error is only safe to clear if this assertion
+ holds, otherwise it has the same pool as the original
+ error. */
+ SVN_TEST_ASSERT(err_copy.pool != err2_copy.pool);
+
+ svn_error_clear(err2);
+
+ SVN_TEST_ASSERT(err3);
+
+ SVN_TEST_ASSERT(SVN_ERROR_IN_CATEGORY(err2_copy.apr_err,
+ SVN_ERR_MALFUNC_CATEGORY_START));
+ SVN_TEST_ASSERT(err3_copy.apr_err == err2_copy.apr_err);
+ SVN_TEST_ASSERT(
+ SVN_ERR_ASSERTION_ONLY_TRACING_LINKS == err3_copy.apr_err);
+ }
+ else
+ {
+ svn_error_clear(err);
+ SVN_TEST_ASSERT(err2);
+ }
+ }
+#endif
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_error_symbolic_name(apr_pool_t *pool)
+{
+ struct {
+ svn_errno_t errcode;
+ const char *errname;
+ } errors[] = {
+ { SVN_ERR_BAD_CONTAINING_POOL, "SVN_ERR_BAD_CONTAINING_POOL" },
+ { SVN_ERR_BAD_FILENAME, "SVN_ERR_BAD_FILENAME" },
+ { SVN_ERR_XML_ATTRIB_NOT_FOUND, "SVN_ERR_XML_ATTRIB_NOT_FOUND" },
+ { SVN_ERR_ENTRY_NOT_FOUND, "SVN_ERR_ENTRY_NOT_FOUND" },
+ { SVN_ERR_ENTRY_CATEGORY_START + 1, NULL }, /* unused slot */
+ { SVN_ERR_ENTRY_EXISTS, "SVN_ERR_ENTRY_EXISTS" },
+ { SVN_ERR_ASSERTION_ONLY_TRACING_LINKS, "SVN_ERR_ASSERTION_ONLY_TRACING_LINKS" },
+ { SVN_ERR_FS_CORRUPT, "SVN_ERR_FS_CORRUPT" },
+ /* The following two error codes can return either of their names
+ as the string. For simplicity, test what the current implementation
+ returns; but if it starts returning "SVN_ERR_WC_NOT_DIRECTORY",
+ that's fine (and permitted by the API contract). */
+ { SVN_ERR_WC_NOT_DIRECTORY, "SVN_ERR_WC_NOT_WORKING_COPY" },
+ { SVN_ERR_WC_NOT_WORKING_COPY, "SVN_ERR_WC_NOT_WORKING_COPY" },
+ /* Test an implementation detail. */
+ { SVN_ERR_BAD_CATEGORY_START, "SVN_ERR_BAD_CONTAINING_POOL" },
+#ifdef SVN_DEBUG
+ { ENOENT, "ENOENT" },
+ { APR_ENOPOOL, "APR_ENOPOOL" },
+#endif
+ /* Test non-errors. */
+ { -1, NULL },
+ { SVN_ERR_WC_CATEGORY_START - 1, NULL },
+ /* Whitebox-test exceptional cases. */
+ { SVN_WARNING, "SVN_WARNING" },
+ { 0, "SVN_NO_ERROR" }
+ /* No sentinel. */
+ };
+ int i;
+
+ for (i = 0; i < sizeof(errors) / sizeof(errors[0]); i++)
+ SVN_TEST_STRING_ASSERT(svn_error_symbolic_name(errors[i].errcode),
+ errors[i].errname);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_error_root_cause,
+ "test svn_error_root_cause"),
+ SVN_TEST_PASS2(test_error_purge_tracing,
+ "test svn_error_purge_tracing"),
+ SVN_TEST_PASS2(test_error_symbolic_name,
+ "test svn_error_symbolic_name"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/hashdump-test.c b/subversion/tests/libsvn_subr/hashdump-test.c
new file mode 100644
index 0000000..eec9a00
--- /dev/null
+++ b/subversion/tests/libsvn_subr/hashdump-test.c
@@ -0,0 +1,258 @@
+/*
+ * hashdump-test.c : testing the reading/writing of hashes
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#include <stdio.h> /* for sprintf() */
+#include <stdlib.h>
+#include <apr_pools.h>
+#include <apr_hash.h>
+#include <apr_file_io.h>
+
+#include "../svn_test.h"
+
+#include "svn_types.h"
+#include "svn_string.h"
+#include "svn_error.h"
+#include "svn_hash.h"
+
+
+/* Our own global variables */
+static apr_hash_t *proplist, *new_proplist;
+
+static const char *review =
+"A forthright entrance, yet coquettish on the tongue, its deceptively\n"
+"fruity exterior hides the warm mahagony undercurrent that is the\n"
+"hallmark of Chateau Fraisant-Pitre. Connoisseurs of the region will\n"
+"be pleased to note the familiar, subtle hints of mulberries and\n"
+"carburator fluid. Its confident finish is marred only by a barely\n"
+"detectable suggestion of rancid squid ink.";
+
+
+
+
+static svn_error_t *
+test1(apr_pool_t *pool)
+{
+ svn_error_t *result;
+ svn_stringbuf_t *key;
+ apr_file_t *f;
+
+ /* Build a hash in memory, and fill it with test data. */
+ proplist = apr_hash_make(pool);
+
+ key = svn_stringbuf_create("color", pool);
+ apr_hash_set(proplist, key->data, key->len,
+ svn_string_create("red", pool));
+
+ key = svn_stringbuf_create("wine review", pool);
+ apr_hash_set(proplist, key->data, key->len,
+ svn_string_create(review, pool));
+
+ key = svn_stringbuf_create("price", pool);
+ apr_hash_set(proplist, key->data, key->len,
+ svn_string_create("US $6.50", pool));
+
+ /* Test overwriting: same key both times, but different values. */
+ key = svn_stringbuf_create("twice-used property name", pool);
+ apr_hash_set(proplist, key->data, key->len,
+ svn_string_create("This is the FIRST value.", pool));
+ apr_hash_set(proplist, key->data, key->len,
+ svn_string_create("This is the SECOND value.", pool));
+
+ /* Dump the hash to a file. */
+ apr_file_open(&f, "hashdump.out",
+ (APR_WRITE | APR_CREATE),
+ APR_OS_DEFAULT, pool);
+
+ result = svn_hash_write2(proplist, svn_stream_from_aprfile(f, pool),
+ SVN_HASH_TERMINATOR, pool);
+
+ apr_file_close(f);
+
+ return result;
+}
+
+
+
+
+static svn_error_t *
+test2(apr_pool_t *pool)
+{
+ svn_error_t *result;
+ apr_file_t *f;
+
+ new_proplist = apr_hash_make(pool);
+
+ apr_file_open(&f, "hashdump.out", APR_READ, APR_OS_DEFAULT, pool);
+
+ result = svn_hash_read2(new_proplist, svn_stream_from_aprfile(f, pool),
+ SVN_HASH_TERMINATOR, pool);
+
+ apr_file_close(f);
+
+ apr_file_remove("hashdump.out", pool);
+
+ return result;
+}
+
+
+
+static svn_error_t *
+test3(apr_pool_t *pool)
+{
+ apr_hash_index_t *this;
+ int found_discrepancy = 0;
+
+ /* Build a hash in global variable "proplist", then write to a file. */
+ SVN_ERR(test1(pool));
+
+ /* Read this file back into global variable "new_proplist" */
+ SVN_ERR(test2(pool));
+
+ /* Now let's make sure that proplist and new_proplist contain the
+ same data. */
+
+ /* Loop over our original hash */
+ for (this = apr_hash_first(pool, proplist);
+ this;
+ this = apr_hash_next(this))
+ {
+ const void *key;
+ apr_ssize_t keylen;
+ void *val;
+ svn_string_t *orig_str, *new_str;
+
+ /* Get a key and val. */
+ apr_hash_this(this, &key, &keylen, &val);
+ orig_str = val;
+
+ /* Look up the key in the new hash */
+ new_str = apr_hash_get(new_proplist, key, keylen);
+
+ /* Does the new hash contain the key at all? */
+ if (new_str == NULL)
+ found_discrepancy = 1;
+
+ /* Do the two strings contain identical data? */
+ else if (! svn_string_compare(orig_str, new_str))
+ found_discrepancy = 1;
+ }
+
+
+ if (found_discrepancy)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, 0,
+ "found discrepancy reading back hash table");
+
+ return SVN_NO_ERROR;
+}
+
+
+static const char *
+hash_gets_stringt(apr_hash_t *ht, const char *key)
+{
+ svn_string_t *str = svn_hash_gets(ht, key);
+ if (str)
+ return str->data;
+ else
+ return NULL;
+}
+
+static svn_error_t *
+read_hash_buffered_test(apr_pool_t *pool)
+{
+ apr_file_t *file;
+ apr_hash_t *ht;
+
+ /* Write hash table to file. */
+ ht = apr_hash_make(pool);
+ svn_hash_sets(ht, "key1", svn_string_create("value1", pool));
+ svn_hash_sets(ht, "key2", svn_string_create("value2", pool));
+ svn_hash_sets(ht, "key3", svn_string_create("value3", pool));
+ svn_hash_sets(ht, "key4", svn_string_create("value4", pool));
+ svn_hash_sets(ht, "key5", svn_string_create("value5", pool));
+ svn_hash_sets(ht, "key6", svn_string_create("value6", pool));
+ svn_hash_sets(ht, "key7", svn_string_create("value7", pool));
+ svn_hash_sets(ht, "key8", svn_string_create("value8", pool));
+
+ SVN_ERR(svn_io_file_open(&file, "hashdump.out",
+ APR_FOPEN_CREATE | APR_FOPEN_WRITE | APR_FOPEN_BUFFERED,
+ APR_OS_DEFAULT, pool));
+
+ SVN_ERR(svn_hash_write2(ht, svn_stream_from_aprfile2(file, FALSE, pool),
+ SVN_HASH_TERMINATOR, pool));
+
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ /* Read hash table using buffered APR file. */
+ ht = apr_hash_make(pool);
+ SVN_ERR(svn_io_file_open(&file, "hashdump.out",
+ APR_FOPEN_READ | APR_FOPEN_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ SVN_ERR(svn_hash_read2(ht, svn_stream_from_aprfile(file, pool),
+ SVN_HASH_TERMINATOR, pool));
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ /* Check result. */
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key1"), "value1");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key2"), "value2");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key3"), "value3");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key4"), "value4");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key5"), "value5");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key6"), "value6");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key7"), "value7");
+ SVN_TEST_STRING_ASSERT(hash_gets_stringt(ht, "key8"), "value8");
+
+ SVN_TEST_ASSERT(apr_hash_count(ht) == 8);
+
+ SVN_ERR(svn_io_remove_file2("hashdump.out", TRUE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/*
+ ====================================================================
+ If you add a new test to this file, update this array.
+
+*/
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test1,
+ "write a hash to a file"),
+ SVN_TEST_PASS2(test2,
+ "read a file into a hash"),
+ SVN_TEST_PASS2(test3,
+ "write hash out, read back in, compare"),
+ SVN_TEST_PASS2(read_hash_buffered_test,
+ "read hash from buffered file"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/io-test.c b/subversion/tests/libsvn_subr/io-test.c
new file mode 100644
index 0000000..f5be05c
--- /dev/null
+++ b/subversion/tests/libsvn_subr/io-test.c
@@ -0,0 +1,1190 @@
+/* io-test.c --- tests for some i/o functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <stdio.h>
+
+#include <apr.h>
+#include <apr_version.h>
+
+#include "svn_pools.h"
+#include "svn_string.h"
+#include "svn_io.h"
+#include "private/svn_skel.h"
+#include "private/svn_dep_compat.h"
+#include "private/svn_io_private.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+
+
+/* Helpers to create the test data directory. */
+
+#define TEST_DIR_PREFIX "io-test-temp"
+
+/* The definition for the test data files. */
+struct test_file_definition_t
+ {
+ /* The name of the test data file. */
+ const char* const name;
+
+ /* The string needs to contain up to 5 bytes, they
+ * are interpreded as:
+ * - first byte
+ * - filler between first and medium byte
+ * - medium byte (the byte in the middle of the file)
+ * - filler between medium and last byte
+ * - last byte.
+ * If the string is shorter than the file length,
+ * the test will fail. */
+ const char* const data;
+
+ /* The size of the file actually to create. */
+ const apr_off_t size;
+
+ /* The created path of the file. Will be filled in
+ * by create_test_file() */
+ char* created_path;
+ };
+
+static struct test_file_definition_t test_file_definitions_template[] =
+ {
+ {"empty", "", 0},
+ {"single_a", "a", 1},
+ {"single_b", "b", 1},
+ {"hundred_a", "aaaaa", 100},
+ {"hundred_b", "bbbbb", 100},
+ {"hundred_b1", "baaaa", 100},
+ {"hundred_b2", "abaaa", 100},
+ {"hundred_b3", "aabaa", 100},
+ {"hundred_b4", "aaaba", 100},
+ {"hundred_b5", "aaaab", 100},
+ {"chunk_minus_one_a", "aaaaa", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_minus_one_b1", "baaaa", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_minus_one_b2", "abaaa", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_minus_one_b3", "aabaa", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_minus_one_b4", "aaaba", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_minus_one_b5", "aaaab", SVN__STREAM_CHUNK_SIZE - 1},
+ {"chunk_a", "aaaaa", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_b1", "baaaa", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_b2", "abaaa", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_b3", "aabaa", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_b4", "aaaba", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_b5", "aaaab", SVN__STREAM_CHUNK_SIZE},
+ {"chunk_plus_one_a", "aaaaa", SVN__STREAM_CHUNK_SIZE + 1},
+ {"chunk_plus_one_b1", "baaaa", SVN__STREAM_CHUNK_SIZE + 1},
+ {"chunk_plus_one_b2", "abaaa", SVN__STREAM_CHUNK_SIZE + 1},
+ {"chunk_plus_one_b3", "aabaa", SVN__STREAM_CHUNK_SIZE + 1},
+ {"chunk_plus_one_b4", "aaaba", SVN__STREAM_CHUNK_SIZE + 1},
+ {"chunk_plus_one_b5", "aaaab", SVN__STREAM_CHUNK_SIZE + 1},
+ {"twochunk_minus_one_a", "aaaaa", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_minus_one_b1", "baaaa", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_minus_one_b2", "abaaa", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_minus_one_b3", "aabaa", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_minus_one_b4", "aaaba", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_minus_one_b5", "aaaab", SVN__STREAM_CHUNK_SIZE*2 - 1},
+ {"twochunk_a", "aaaaa", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_b1", "baaaa", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_b2", "abaaa", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_b3", "aabaa", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_b4", "aaaba", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_b5", "aaaab", SVN__STREAM_CHUNK_SIZE*2},
+ {"twochunk_plus_one_a", "aaaaa", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {"twochunk_plus_one_b1", "baaaa", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {"twochunk_plus_one_b2", "abaaa", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {"twochunk_plus_one_b3", "aabaa", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {"twochunk_plus_one_b4", "aaaba", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {"twochunk_plus_one_b5", "aaaab", SVN__STREAM_CHUNK_SIZE*2 + 1},
+ {0},
+ };
+
+/* Function to prepare a single test file */
+
+static svn_error_t *
+create_test_file(struct test_file_definition_t* definition,
+ const char *test_dir,
+ apr_pool_t *pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_status_t status = 0;
+ apr_file_t *file_h;
+ apr_off_t midpos = definition->size / 2;
+ svn_error_t *err = NULL;
+ int i;
+
+ if (definition->size < 5)
+ SVN_ERR_ASSERT(strlen(definition->data) >= (apr_size_t)definition->size);
+ else
+ SVN_ERR_ASSERT(strlen(definition->data) >= 5);
+
+
+ definition->created_path = svn_dirent_join(test_dir,
+ definition->name,
+ pool);
+
+ SVN_ERR(svn_io_file_open(&file_h,
+ definition->created_path,
+ (APR_WRITE | APR_CREATE | APR_EXCL | APR_BUFFERED),
+ APR_OS_DEFAULT,
+ scratch_pool));
+
+ for (i=1; i <= definition->size; i += 1)
+ {
+ char c;
+ if (i == 1)
+ c = definition->data[0];
+ else if (i < midpos)
+ c = definition->data[1];
+ else if (i == midpos)
+ c = definition->data[2];
+ else if (i < definition->size)
+ c = definition->data[3];
+ else
+ c = definition->data[4];
+
+ status = apr_file_putc(c, file_h);
+
+ if (status)
+ break;
+ }
+
+ if (status)
+ err = svn_error_wrap_apr(status, "Can't write to file '%s'",
+ definition->name);
+
+ return svn_error_compose_create(err,
+ svn_io_file_close(file_h, scratch_pool));
+}
+
+/* Function to prepare the whole set of on-disk files to be compared. */
+static svn_error_t *
+create_comparison_candidates(struct test_file_definition_t **definitions,
+ const char *testname,
+ apr_pool_t *pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ struct test_file_definition_t *candidate;
+ svn_error_t *err = SVN_NO_ERROR;
+ apr_size_t count = 0;
+ const char *test_dir;
+
+ SVN_ERR(svn_test_make_sandbox_dir(&test_dir, testname, pool));
+
+ for (candidate = test_file_definitions_template;
+ candidate->name != NULL;
+ candidate += 1)
+ count++;
+
+ *definitions = apr_pmemdup(pool, test_file_definitions_template,
+ (count + 1) * sizeof(**definitions));
+ for (candidate = *definitions; candidate->name != NULL; candidate += 1)
+ {
+ svn_pool_clear(iterpool);
+ err = create_test_file(candidate, test_dir, pool, iterpool);
+ if (err)
+ break;
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return err;
+}
+
+
+/* Functions to check the 2-way and 3-way file comparison functions. */
+
+/* Test 2-way file size checking */
+static svn_error_t *
+test_two_file_size_comparison(apr_pool_t *scratch_pool)
+{
+ struct test_file_definition_t *inner, *outer;
+ svn_boolean_t actual;
+ svn_boolean_t expected;
+ svn_error_t *err = SVN_NO_ERROR;
+ svn_error_t *cmp_err;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ struct test_file_definition_t *test_file_definitions;
+
+ SVN_ERR(create_comparison_candidates(&test_file_definitions,
+ "test_two_file_size_comparison",
+ scratch_pool));
+
+ for (outer = test_file_definitions; outer->name != NULL; outer += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ inner = test_file_definitions;
+#else
+ inner = outer;
+#endif
+ for (; inner->name != NULL; inner += 1)
+ {
+ svn_pool_clear(iterpool);
+
+ expected = inner->size != outer->size;
+
+ cmp_err = svn_io_filesizes_different_p(&actual,
+ inner->created_path,
+ outer->created_path,
+ iterpool);
+
+ if (cmp_err)
+ {
+ err = svn_error_compose_create(err, cmp_err);
+ }
+ else if (expected != actual)
+ {
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ inner->created_path,
+ outer->created_path));
+ }
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return err;
+}
+
+
+/* Test 2-way file content checking */
+static svn_error_t *
+test_two_file_content_comparison(apr_pool_t *scratch_pool)
+{
+ struct test_file_definition_t *inner, *outer;
+ svn_boolean_t actual;
+ svn_boolean_t expected;
+ svn_error_t *err = SVN_NO_ERROR;
+ svn_error_t *cmp_err;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ struct test_file_definition_t *test_file_definitions;
+
+ SVN_ERR(create_comparison_candidates(&test_file_definitions,
+ "test_two_file_content_comparison",
+ scratch_pool));
+
+ for (outer = test_file_definitions; outer->name != NULL; outer += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ inner = test_file_definitions;
+#else
+ inner = outer;
+#endif
+ for (; inner->name != NULL; inner += 1)
+ {
+ svn_pool_clear(iterpool);
+
+ expected = inner->size == outer->size
+ && strcmp(inner->data, outer->data) == 0;
+
+ cmp_err = svn_io_files_contents_same_p(&actual,
+ inner->created_path,
+ outer->created_path,
+ iterpool);
+
+ if (cmp_err)
+ {
+ err = svn_error_compose_create(err, cmp_err);
+ }
+ else
+ {
+ if (expected != actual)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "content comparison problem: '%s' and '%s'",
+ inner->created_path,
+ outer->created_path));
+ }
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return err;
+}
+
+
+/* Test 3-way file size checking */
+static svn_error_t *
+test_three_file_size_comparison(apr_pool_t *scratch_pool)
+{
+ struct test_file_definition_t *inner, *middle, *outer;
+ svn_boolean_t actual12, actual23, actual13;
+ svn_boolean_t expected12, expected23, expected13;
+ svn_error_t *err = SVN_NO_ERROR;
+ svn_error_t *cmp_err;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ struct test_file_definition_t *test_file_definitions;
+
+ SVN_ERR(create_comparison_candidates(&test_file_definitions,
+ "test_three_file_size_comparison",
+ scratch_pool));
+
+ for (outer = test_file_definitions; outer->name != NULL; outer += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ middle = test_file_definitions;
+#else
+ middle = outer;
+#endif
+ for (; middle->name != NULL; middle += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ inner = test_file_definitions;
+#else
+ inner = middle;
+#endif
+ for (; inner->name != NULL; inner += 1)
+ {
+ svn_pool_clear(iterpool);
+
+ expected12 = inner->size != middle->size;
+ expected23 = middle->size != outer->size;
+ expected13 = inner->size != outer->size;
+
+ cmp_err = svn_io_filesizes_three_different_p(&actual12,
+ &actual23,
+ &actual13,
+ inner->created_path,
+ middle->created_path,
+ outer->created_path,
+ iterpool);
+
+ if (cmp_err)
+ {
+ err = svn_error_compose_create(err, cmp_err);
+ }
+ else
+ {
+ if (expected12 != actual12)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ inner->created_path,
+ middle->created_path));
+
+ if (expected23 != actual23)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ middle->created_path,
+ outer->created_path));
+
+ if (expected13 != actual13)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ inner->created_path,
+ outer->created_path));
+ }
+ }
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return err;
+}
+
+
+/* Test 3-way file content checking */
+static svn_error_t *
+test_three_file_content_comparison(apr_pool_t *scratch_pool)
+{
+ struct test_file_definition_t *inner, *middle, *outer;
+ svn_boolean_t actual12, actual23, actual13;
+ svn_boolean_t expected12, expected23, expected13;
+ svn_error_t *err = SVN_NO_ERROR;
+ svn_error_t *cmp_err;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ struct test_file_definition_t *test_file_definitions;
+
+ SVN_ERR(create_comparison_candidates(&test_file_definitions,
+ "test_three_file_content_comparison",
+ scratch_pool));
+
+ for (outer = test_file_definitions; outer->name != NULL; outer += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ middle = test_file_definitions;
+#else
+ middle = outer;
+#endif
+ for (; middle->name != NULL; middle += 1)
+ {
+#ifdef SVN_IO_TEST_ALL_PERMUTATIONS
+ inner = test_file_definitions;
+#else
+ inner = middle;
+#endif
+ for (; inner->name != NULL; inner += 1)
+ {
+ svn_pool_clear(iterpool);
+
+ expected12 = outer->size == middle->size
+ && strcmp(outer->data, middle->data) == 0;
+ expected23 = middle->size == inner->size
+ && strcmp(middle->data, inner->data) == 0;
+ expected13 = outer->size == inner->size
+ && strcmp(outer->data, inner->data) == 0;
+
+ cmp_err = svn_io_files_contents_three_same_p(&actual12,
+ &actual23,
+ &actual13,
+ outer->created_path,
+ middle->created_path,
+ inner->created_path,
+ iterpool);
+
+ if (cmp_err)
+ {
+ err = svn_error_compose_create(err, cmp_err);
+ }
+ else
+ {
+ if (expected12 != actual12)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ inner->created_path,
+ middle->created_path));
+
+ if (expected23 != actual23)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ middle->created_path,
+ outer->created_path));
+
+ if (expected13 != actual13)
+ err = svn_error_compose_create(err,
+ svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "size comparison problem: '%s' and '%s'",
+ inner->created_path,
+ outer->created_path));
+ }
+ }
+ }
+ }
+
+ return err;
+}
+
+static svn_error_t *
+read_length_line_shouldnt_loop(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *tmp_file;
+ char buffer[4];
+ apr_size_t buffer_limit = sizeof(buffer);
+ apr_file_t *f;
+
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "read_length_tmp", pool));
+
+ SVN_ERR(svn_io_write_unique(&tmp_file, tmp_dir, "1234\r\n", 6,
+ svn_io_file_del_on_pool_cleanup, pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ, APR_OS_DEFAULT, pool));
+
+ SVN_TEST_ASSERT_ERROR(svn_io_read_length_line(f, buffer, &buffer_limit,
+ pool), SVN_ERR_MALFORMED_FILE);
+ SVN_TEST_ASSERT(buffer_limit == 4);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_read_length_line(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *tmp_file;
+ char buffer[80];
+ apr_size_t buffer_limit;
+ apr_file_t *f;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_read_length_line",
+ pool));
+
+ /* Test 1: Read empty file. */
+ tmp_file = svn_dirent_join(tmp_dir, "empty", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "", pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ /* Test 2: Read empty line.*/
+ tmp_file = svn_dirent_join(tmp_dir, "empty-line", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "\n", pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_ERR(err);
+ SVN_TEST_ASSERT(buffer_limit == 0);
+ SVN_TEST_STRING_ASSERT(buffer, "");
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ /* Test 3: Read two lines.*/
+ tmp_file = svn_dirent_join(tmp_dir, "lines", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "first\nsecond\n", pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_ERR(err);
+ SVN_TEST_ASSERT(buffer_limit == 5);
+ SVN_TEST_STRING_ASSERT(buffer, "first");
+
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_ERR(err);
+ SVN_TEST_ASSERT(buffer_limit == 6);
+ SVN_TEST_STRING_ASSERT(buffer, "second");
+
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ /* Test 4: Content without end-of-line.*/
+ tmp_file = svn_dirent_join(tmp_dir, "no-eol", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "text", pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+
+ buffer_limit = sizeof(buffer);
+ err = svn_io_read_length_line(f, buffer, &buffer_limit, pool);
+ SVN_TEST_ASSERT_ERROR(err, APR_EOF);
+
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_readline(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *tmp_file;
+ svn_stringbuf_t *buf;
+ apr_file_t *f;
+ svn_error_t *err;
+ const char *eol;
+ svn_boolean_t eof;
+ apr_off_t pos;
+
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_readline",
+ pool));
+
+ tmp_file = svn_dirent_join(tmp_dir, "foo", pool);
+
+ SVN_ERR(svn_io_file_create(tmp_file, "CR\rLF\nCRLF\r\nno-eol", pool));
+
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+ SVN_ERR(err);
+ SVN_TEST_STRING_ASSERT(buf->data, "CR");
+ SVN_TEST_STRING_ASSERT(eol, "\r");
+ SVN_TEST_ASSERT(!eof);
+
+ /* Check that APR file reports correct offset. See r1719196 why it's
+ important. */
+ SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+ SVN_TEST_INT_ASSERT(pos, 3);
+
+ err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+ SVN_ERR(err);
+ SVN_TEST_STRING_ASSERT(buf->data, "LF");
+ SVN_TEST_STRING_ASSERT(eol, "\n");
+ SVN_TEST_ASSERT(!eof);
+
+ /* Check that APR file reports correct offset. See r1719196 why it's
+ important. */
+ SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+ SVN_TEST_INT_ASSERT(pos, 6);
+
+ err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+ SVN_ERR(err);
+ SVN_TEST_STRING_ASSERT(buf->data, "CRLF");
+ SVN_TEST_STRING_ASSERT(eol, "\r\n");
+ SVN_TEST_ASSERT(!eof);
+
+ /* Check that APR file reports correct offset. See r1719196 why it's
+ important. */
+ SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+ SVN_TEST_INT_ASSERT(pos, 12);
+
+ err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+ SVN_ERR(err);
+ SVN_TEST_STRING_ASSERT(buf->data, "no-eol");
+ SVN_TEST_STRING_ASSERT(eol, NULL);
+ SVN_TEST_ASSERT(eof);
+
+ /* Check that APR file reports correct offset. See r1719196 why it's
+ important. */
+ SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+ SVN_TEST_INT_ASSERT(pos, 18);
+
+ /* Further reads still returns EOF. */
+ err = svn_io_file_readline(f, &buf, &eol, &eof, APR_SIZE_MAX, pool, pool);
+ SVN_ERR(err);
+ SVN_TEST_STRING_ASSERT(buf->data, "");
+ SVN_TEST_STRING_ASSERT(eol, NULL);
+ SVN_TEST_ASSERT(eof);
+
+ /* Check that APR file reports correct offset. See r1719196 why it's
+ important. */
+ SVN_ERR(svn_io_file_get_offset(&pos, f, pool));
+ SVN_TEST_INT_ASSERT(pos, 18);
+
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_open_uniquely_named(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ apr_file_t *file;
+ const char *path;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_open_uniquely_named",
+ pool));
+
+ /* Test #1: File 'foo.tmp' doesn't exist. */
+ SVN_ERR(svn_io_open_uniquely_named(&file, &path, tmp_dir, "foo", ".tmp",
+ svn_io_file_del_none, pool, pool));
+ SVN_TEST_STRING_ASSERT(path, svn_dirent_join(tmp_dir, "foo.tmp", pool));
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ /* Test #2: File 'foo.tmp' is already exist. */
+ SVN_ERR(svn_io_open_uniquely_named(NULL, &path, tmp_dir, "foo", ".tmp",
+ svn_io_file_del_none, pool, pool));
+ SVN_TEST_STRING_ASSERT(path, svn_dirent_join(tmp_dir, "foo.2.tmp", pool));
+
+ /* Test #3: Directory named 'bar.tmp' is already exist. */
+ SVN_ERR(svn_io_dir_make(svn_dirent_join(tmp_dir, "bar.tmp", pool),
+ APR_OS_DEFAULT, pool));
+ SVN_ERR(svn_io_open_uniquely_named(NULL, &path, tmp_dir, "bar", ".tmp",
+ svn_io_file_del_none, pool, pool));
+ SVN_TEST_STRING_ASSERT(path, svn_dirent_join(tmp_dir, "bar.2.tmp", pool));
+
+
+ /* Test #4: Attempt create file in non-existing directory. */
+ err = svn_io_open_uniquely_named(NULL, &path,
+ svn_dirent_join(tmp_dir, "non-existing", pool),
+ NULL, NULL, svn_io_file_del_none, pool, pool);
+ if (err && APR_STATUS_IS_ENOENT(err->apr_err))
+ {
+ svn_error_clear(err);
+ }
+ else if (err)
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected error APR_STATUS_IS_ENOTDIR() but "
+ "got %s",
+ svn_error_symbolic_name(err->apr_err));
+ }
+ else
+ {
+ SVN_TEST_ASSERT_ANY_ERROR(err);
+ }
+
+ /* Test #5: File 'yota.tmp' is already exist and readonly. */
+ SVN_ERR(svn_io_file_create_empty(svn_dirent_join(tmp_dir, "yota.tmp", pool),
+ pool));
+ SVN_ERR(svn_io_set_file_read_only(svn_dirent_join(tmp_dir, "yota.tmp", pool),
+ FALSE, pool));
+ SVN_ERR(svn_io_open_uniquely_named(NULL, &path, tmp_dir, "yota", ".tmp",
+ svn_io_file_del_none, pool, pool));
+ SVN_TEST_STRING_ASSERT(path, svn_dirent_join(tmp_dir, "yota.2.tmp", pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Move the read pointer in FILE to absolute position OFFSET and align
+ * the read buffer to multiples of BLOCK_SIZE. BUFFERED is set only if
+ * FILE actually uses a read buffer. Use POOL for allocations.
+ */
+static svn_error_t *
+aligned_seek(apr_file_t *file,
+ apr_size_t block_size,
+ apr_size_t offset,
+ svn_boolean_t buffered,
+ apr_pool_t *pool)
+{
+ apr_off_t block_start;
+ apr_off_t current;
+
+ SVN_ERR(svn_io_file_aligned_seek(file, (apr_off_t)block_size,
+ &block_start, (apr_off_t)offset, pool));
+
+ /* block start shall be aligned to multiples of block_size.
+ If it isn't, it must be aligned to APR's default block size(pre-1.3 APR)
+ */
+ if (buffered)
+ {
+ SVN_TEST_ASSERT(block_start % block_size == 0);
+ SVN_TEST_ASSERT(offset - block_start < block_size);
+ }
+
+ /* we must be at the desired offset */
+ SVN_ERR(svn_io_file_get_offset(&current, file, pool));
+ SVN_TEST_ASSERT(current == (apr_off_t)offset);
+
+ return SVN_NO_ERROR;
+}
+
+/* Move the read pointer in FILE to absolute position OFFSET, align the
+ * read buffer to multiples of BLOCK_SIZE and read one byte from that
+ * position. Verify that it matches the CONTENTS for that offset.
+ * BUFFERED is set only if FILE actually uses a read buffer.
+ * Use POOL for allocations.
+ */
+static svn_error_t *
+aligned_read_at(apr_file_t *file,
+ svn_stringbuf_t *contents,
+ apr_size_t block_size,
+ apr_size_t offset,
+ svn_boolean_t buffered,
+ apr_pool_t *pool)
+{
+ char c;
+ SVN_ERR(aligned_seek(file, block_size, offset, buffered, pool));
+
+ /* the data we read must match whatever we wrote there */
+ SVN_ERR(svn_io_file_getc(&c, file, pool));
+ SVN_TEST_ASSERT(c == contents->data[offset]);
+
+ return SVN_NO_ERROR;
+}
+
+/* Verify that aligned seek with the given BLOCK_SIZE works for FILE.
+ * CONTENTS is the data expected from FILE. BUFFERED is set only if FILE
+ * actually uses a read buffer. Use POOL for allocations.
+ */
+static svn_error_t *
+aligned_read(apr_file_t *file,
+ svn_stringbuf_t *contents,
+ apr_size_t block_size,
+ svn_boolean_t buffered,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+ apr_size_t offset = 0;
+ const apr_size_t prime = 78427;
+
+ /* "random" access to different offsets */
+ for (i = 0, offset = prime; i < 10; ++i, offset += prime)
+ SVN_ERR(aligned_read_at(file, contents, block_size,
+ offset % contents->len, buffered, pool));
+
+ /* we can seek to EOF */
+ SVN_ERR(aligned_seek(file, contents->len, block_size, buffered, pool));
+
+ /* reversed order access to all bytes */
+ for (i = contents->len; i > 0; --i)
+ SVN_ERR(aligned_read_at(file, contents, block_size, i - 1, buffered,
+ pool));
+
+ /* forward order access to all bytes */
+ for (i = 0; i < contents->len; ++i)
+ SVN_ERR(aligned_read_at(file, contents, block_size, i, buffered, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+aligned_seek_test(apr_pool_t *pool)
+{
+ apr_size_t i;
+ const char *tmp_dir;
+ const char *tmp_file;
+ apr_file_t *f;
+ svn_stringbuf_t *contents;
+ const apr_size_t file_size = 100000;
+
+ /* create a temp folder & schedule it for automatic cleanup */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "aligned_seek_tmp", pool));
+
+ /* create a temp file with know contents */
+
+ contents = svn_stringbuf_create_ensure(file_size, pool);
+ for (i = 0; i < file_size; ++i)
+ svn_stringbuf_appendbyte(contents, (char)rand());
+
+ SVN_ERR(svn_io_write_unique(&tmp_file, tmp_dir, contents->data,
+ contents->len,
+ svn_io_file_del_on_pool_cleanup, pool));
+
+ /* now, access read data with varying alignment sizes */
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ SVN_ERR(aligned_read(f, contents, 0x1000, TRUE, pool)); /* APR default */
+ SVN_ERR(aligned_read(f, contents, 0x8000, TRUE, pool)); /* "unusual" 32K */
+ SVN_ERR(aligned_read(f, contents, 0x10000, TRUE, pool)); /* FSX default */
+ SVN_ERR(aligned_read(f, contents, 0x100000, TRUE, pool)); /* larger than file */
+ SVN_ERR(aligned_read(f, contents, 10001, TRUE, pool)); /* odd, larger than
+ APR default */
+ SVN_ERR(aligned_read(f, contents, 1003, TRUE, pool)); /* odd, smaller than
+ APR default */
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ /* now, try read data with buffering disabled.
+ That is a special case because APR reports a buffer size of 0. */
+ SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ, APR_OS_DEFAULT, pool));
+ SVN_ERR(aligned_read(f, contents, 0x1000, FALSE, pool));
+ SVN_ERR(aligned_read(f, contents, 0x8000, FALSE, pool));
+ SVN_ERR(aligned_read(f, contents, 0x10000, FALSE, pool));
+ SVN_ERR(aligned_read(f, contents, 0x100000, FALSE, pool));
+ SVN_ERR(aligned_read(f, contents, 10001, FALSE, pool));
+ SVN_ERR(aligned_read(f, contents, 1003, FALSE, pool));
+ SVN_ERR(svn_io_file_close(f, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+ignore_enoent(apr_pool_t *pool)
+{
+ const char *tmp_dir, *path;
+ const svn_io_dirent2_t *dirent_p;
+ apr_file_t *file;
+
+ /* Create an empty directory. */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "ignore_enoent", pool));
+
+ /* Path does not exist. */
+ path = svn_dirent_join(tmp_dir, "not-present", pool);
+ SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_remove_file2(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool));
+
+ /* Neither path nor parent exists. */
+ path = svn_dirent_join(path, "not-present", pool);
+ SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_remove_file2(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool));
+
+ /* File does exist. */
+ path = svn_dirent_join(tmp_dir, "present", pool);
+ SVN_ERR(svn_io_file_open(&file, path,
+ APR_WRITE | APR_CREATE | APR_TRUNCATE,
+ APR_OS_DEFAULT,
+ pool));
+ SVN_ERR(svn_io_file_close(file, pool));
+
+ /* Path does not exist as child of file. */
+ path = svn_dirent_join(path, "not-present", pool);
+ SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_remove_file2(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool));
+ SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool));
+ SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_install_stream_to_longpath(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *final_abspath;
+ const char *deep_dir;
+ svn_stream_t *stream;
+ svn_stringbuf_t *actual_content;
+ int i;
+
+ /* Create an empty directory. */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir,
+ "test_install_stream_to_longpath",
+ pool));
+
+ deep_dir = tmp_dir;
+
+ /* Generate very long path (> 260 symbols) */
+ for (i = 0; i < 26; i++)
+ {
+ deep_dir = svn_dirent_join(deep_dir, "1234567890", pool);
+ SVN_ERR(svn_io_make_dir_recursively(deep_dir, pool));
+ }
+
+ final_abspath = svn_dirent_join(deep_dir, "stream1", pool);
+ SVN_ERR(svn_stream__create_for_install(&stream, deep_dir, pool, pool));
+ SVN_ERR(svn_stream_puts(stream, "stream1 content"));
+ SVN_ERR(svn_stream_close(stream));
+ SVN_ERR(svn_stream__install_stream(stream,
+ final_abspath,
+ TRUE,
+ pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_content,
+ final_abspath,
+ pool));
+
+ SVN_TEST_STRING_ASSERT(actual_content->data, "stream1 content");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_install_stream_over_readonly_file(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *final_abspath;
+ svn_stream_t *stream;
+ svn_stringbuf_t *actual_content;
+
+ /* Create an empty directory. */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir,
+ "test_install_stream_over_readonly_file",
+ pool));
+
+ final_abspath = svn_dirent_join(tmp_dir, "stream1", pool);
+
+ /* Create empty read-only file. */
+ SVN_ERR(svn_io_file_create_empty(final_abspath, pool));
+ SVN_ERR(svn_io_set_file_read_only(final_abspath, FALSE, pool));
+
+ SVN_ERR(svn_stream__create_for_install(&stream, tmp_dir, pool, pool));
+ SVN_ERR(svn_stream_puts(stream, "stream1 content"));
+ SVN_ERR(svn_stream_close(stream));
+ SVN_ERR(svn_stream__install_stream(stream,
+ final_abspath,
+ TRUE,
+ pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_content,
+ final_abspath,
+ pool));
+
+ SVN_TEST_STRING_ASSERT(actual_content->data, "stream1 content");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_size_get(apr_pool_t *pool)
+{
+ const char *tmp_dir, *path;
+ apr_file_t *file;
+ svn_filesize_t filesize;
+
+ /* Create an empty directory. */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_size_get", pool));
+
+ /* Path does not exist. */
+ path = svn_dirent_join(tmp_dir, "file", pool);
+
+ /* Create a file.*/
+ SVN_ERR(svn_io_file_open(&file, path,
+ APR_WRITE | APR_CREATE | APR_BUFFERED,
+ APR_OS_DEFAULT, pool));
+ SVN_ERR(svn_io_file_size_get(&filesize, file, pool));
+ SVN_TEST_ASSERT(filesize == 0);
+
+ /* Write 8 bytes and check new size. */
+ SVN_ERR(svn_io_file_write_full(file, "12345678", 8, NULL, pool));
+
+ SVN_ERR(svn_io_file_size_get(&filesize, file, pool));
+ SVN_TEST_ASSERT(filesize == 8);
+
+ /* Truncate to 2 bytes. */
+ SVN_ERR(svn_io_file_trunc(file, 2, pool));
+
+ SVN_ERR(svn_io_file_size_get(&filesize, file, pool));
+ SVN_TEST_ASSERT(filesize == 2);
+
+ /* Close the file. */
+ SVN_ERR(svn_io_file_close(file, pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_file_rename2(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *foo_path;
+ const char *bar_path;
+ svn_stringbuf_t *actual_content;
+ svn_node_kind_t actual_kind;
+
+ /* Create an empty directory. */
+ SVN_ERR(svn_test_make_sandbox_dir(&tmp_dir, "test_file_rename2", pool));
+
+ foo_path = svn_dirent_join(tmp_dir, "foo", pool);
+ bar_path = svn_dirent_join(tmp_dir, "bar", pool);
+
+ /* Test 1: Simple file rename. */
+ SVN_ERR(svn_io_file_create(foo_path, "file content", pool));
+
+ SVN_ERR(svn_io_file_rename2(foo_path, bar_path, FALSE, pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_content, bar_path, pool));
+ SVN_TEST_STRING_ASSERT(actual_content->data, "file content");
+
+ SVN_ERR(svn_io_check_path(foo_path, &actual_kind, pool));
+ SVN_TEST_ASSERT(actual_kind == svn_node_none);
+ SVN_ERR(svn_io_remove_file2(bar_path, FALSE, pool));
+
+ /* Test 2: Rename file with flush_to_disk flag. */
+ SVN_ERR(svn_io_file_create(foo_path, "file content", pool));
+
+ SVN_ERR(svn_io_file_rename2(foo_path, bar_path, TRUE, pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_content, bar_path, pool));
+ SVN_TEST_STRING_ASSERT(actual_content->data, "file content");
+ SVN_ERR(svn_io_check_path(foo_path, &actual_kind, pool));
+ SVN_TEST_ASSERT(actual_kind == svn_node_none);
+
+ SVN_ERR(svn_io_remove_file2(bar_path, FALSE, pool));
+
+ /* Test 3: Rename file over existing read-only file. */
+ SVN_ERR(svn_io_file_create(foo_path, "file content", pool));
+ SVN_ERR(svn_io_file_create(bar_path, "bar content", pool));
+ SVN_ERR(svn_io_set_file_read_only(bar_path, FALSE, pool));
+
+ SVN_ERR(svn_io_file_rename2(foo_path, bar_path, FALSE, pool));
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_content, bar_path, pool));
+ SVN_TEST_STRING_ASSERT(actual_content->data, "file content");
+ SVN_ERR(svn_io_check_path(foo_path, &actual_kind, pool));
+ SVN_TEST_ASSERT(actual_kind == svn_node_none);
+ SVN_ERR(svn_io_remove_file2(bar_path, FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_apr_trunc_workaround(apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *tmp_file;
+ apr_file_t *f;
+ apr_size_t len;
+ apr_off_t offset;
+ char dummy;
+
+ /* create a temp folder & schedule it for automatic cleanup */
+ SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_apr_trunc_workaround",
+ pool));
+ SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
+ svn_test_add_dir_cleanup(tmp_dir);
+
+ /* create an r/w file */
+ tmp_file = svn_dirent_join(tmp_dir, "file", pool);
+ SVN_ERR(svn_io_file_open(&f, tmp_file,
+ APR_READ | APR_WRITE | APR_BUFFERED | APR_CREATE |
+ APR_TRUNCATE,
+ APR_OS_DEFAULT, pool));
+
+ /* write some content and put it internally into read mode */
+ len = 10;
+ SVN_ERR(svn_io_file_write(f, "0123456789", &len, pool));
+
+ offset = 0;
+ SVN_ERR(svn_io_file_seek(f, APR_SET, &offset, pool));
+ SVN_ERR(svn_io_file_getc(&dummy, f, pool));
+
+ /* clear the file and write some new content */
+ SVN_ERR(svn_io_file_trunc(f, 0, pool));
+ len = 3;
+ SVN_ERR(svn_io_file_write(f, "abc", &len, pool));
+
+ /* we should now be positioned at the end of the new content */
+ offset = 0;
+ SVN_ERR(svn_io_file_seek(f, APR_CUR, &offset, pool));
+ SVN_TEST_ASSERT(offset == (int)len);
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 3;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_two_file_size_comparison,
+ "two file size comparison"),
+ SVN_TEST_PASS2(test_two_file_content_comparison,
+ "two file content comparison"),
+ SVN_TEST_PASS2(test_three_file_size_comparison,
+ "three file size comparison"),
+ SVN_TEST_PASS2(test_three_file_content_comparison,
+ "three file content comparison"),
+ SVN_TEST_PASS2(read_length_line_shouldnt_loop,
+ "svn_io_read_length_line() shouldn't loop"),
+ SVN_TEST_PASS2(aligned_seek_test,
+ "test aligned seek"),
+ SVN_TEST_PASS2(ignore_enoent,
+ "test ignore-enoent"),
+ SVN_TEST_PASS2(test_install_stream_to_longpath,
+ "test svn_stream__install_stream to long path"),
+ SVN_TEST_PASS2(test_install_stream_over_readonly_file,
+ "test svn_stream__install_stream over RO file"),
+ SVN_TEST_PASS2(test_file_size_get,
+ "test svn_io_file_size_get"),
+ SVN_TEST_PASS2(test_file_rename2,
+ "test svn_io_file_rename2"),
+ SVN_TEST_PASS2(test_read_length_line,
+ "test svn_io_read_length_line()"),
+ SVN_TEST_PASS2(test_file_readline,
+ "test svn_io_file_readline()"),
+ SVN_TEST_PASS2(test_open_uniquely_named,
+ "test svn_io_open_uniquely_named()"),
+ SVN_TEST_PASS2(test_apr_trunc_workaround,
+ "test workaround for APR in svn_io_file_trunc"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/mergeinfo-test.c b/subversion/tests/libsvn_subr/mergeinfo-test.c
new file mode 100644
index 0000000..e9a57a2
--- /dev/null
+++ b/subversion/tests/libsvn_subr/mergeinfo-test.c
@@ -0,0 +1,1837 @@
+/*
+ * mergeinfo-test.c -- test the mergeinfo functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_hash.h>
+#include <apr_tables.h>
+
+#define SVN_DEPRECATED
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "svn_types.h"
+#include "svn_mergeinfo.h"
+#include "private/svn_mergeinfo_private.h"
+#include "../svn_test.h"
+
+/* A quick way to create error messages. */
+static svn_error_t *
+fail(apr_pool_t *pool, const char *fmt, ...)
+{
+ va_list ap;
+ char *msg;
+
+ va_start(ap, fmt);
+ msg = apr_pvsprintf(pool, fmt, ap);
+ va_end(ap);
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0, msg);
+}
+
+#define MAX_NBR_RANGES 5
+
+/* Verify that INPUT is parsed properly, and returns an error if
+ parsing fails, or incorret parsing is detected. Assumes that INPUT
+ contains only one path -> ranges mapping, and that EXPECTED_RANGES points
+ to the first range in an array whose size is greater than or equal to
+ the number of ranges in INPUTS path -> ranges mapping but less than
+ MAX_NBR_RANGES. If fewer than MAX_NBR_RANGES ranges are present, then the
+ trailing expected_ranges should be have their end revision set to 0. */
+static svn_error_t *
+verify_mergeinfo_parse(const char *input,
+ const char *expected_path,
+ const svn_merge_range_t *expected_ranges,
+ apr_pool_t *pool)
+{
+ svn_error_t *err;
+ apr_hash_t *path_to_merge_ranges;
+ apr_hash_index_t *hi;
+
+ /* Test valid input. */
+ err = svn_mergeinfo_parse(&path_to_merge_ranges, input, pool);
+ if (err || apr_hash_count(path_to_merge_ranges) != 1)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "svn_mergeinfo_parse (%s) failed unexpectedly",
+ input);
+ for (hi = apr_hash_first(pool, path_to_merge_ranges); hi;
+ hi = apr_hash_next(hi))
+ {
+ const void *path;
+ void *val;
+ svn_rangelist_t *ranges;
+ svn_merge_range_t *range;
+ int j;
+
+ apr_hash_this(hi, &path, NULL, &val);
+ ranges = val;
+ if (strcmp((const char *) path, expected_path) != 0)
+ return fail(pool, "svn_mergeinfo_parse (%s) failed to parse the "
+ "correct path (%s)", input, expected_path);
+
+ /* Test each parsed range. */
+ for (j = 0; j < ranges->nelts; j++)
+ {
+ range = APR_ARRAY_IDX(ranges, j, svn_merge_range_t *);
+ if (range->start != expected_ranges[j].start
+ || range->end != expected_ranges[j].end
+ || range->inheritable != expected_ranges[j].inheritable)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_mergeinfo_parse (%s) failed to "
+ "parse the correct range",
+ input);
+ }
+
+ /* Were we expecting any more ranges? */
+ if (j < MAX_NBR_RANGES - 1
+ && expected_ranges[j].end != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_mergeinfo_parse (%s) failed to "
+ "produce the expected number of ranges",
+ input);
+ }
+ return SVN_NO_ERROR;
+}
+
+
+#define NBR_MERGEINFO_VALS 25
+
+/* Valid mergeinfo values. */
+static const char * const mergeinfo_vals[NBR_MERGEINFO_VALS] =
+ {
+ "/trunk:1",
+ "/trunk/foo:1-6",
+ "/trunk: 5,7-9,10,11,13,14",
+ "/trunk: 3-10,11*,13,14",
+ "/branch: 1,2-18*,33*",
+ /* Path names containing ':'s */
+ "patch-common::netasq-bpf.c:25381",
+ "patch-common_netasq-bpf.c::25381",
+ ":patch:common:netasq:bpf.c:25381",
+ /* Unordered rangelists */
+ "/trunk:3-6,15,18,9,22",
+ "/trunk:5,3",
+ "/trunk:3-6*,15*,18*,9,22*",
+ "/trunk:5,3*",
+ "/trunk:100,3-7,50,99,1-2",
+ /* Overlapping rangelists */
+ "/gunther_branch:5-10,7-12",
+ "/gunther_branch:5-10*,7-12*",
+ "/branches/branch1:43832-45742,49990-53669,43832-49987",
+ /* Unordered and overlapping rangelists */
+ "/gunther_branch:7-12,1,5-10",
+ "/gunther_branch:7-12*,1,5-10*",
+ /* Adjacent rangelists of differing inheritability. */
+ "/b5:5-53,1-4,54-90*",
+ "/c0:1-77,12-44",
+ /* Non-canonical paths. */
+ "/A/:7-8",
+ "/A///:7-8",
+ "/A/.:7-8",
+ "/A/./B:7-8",
+ ":7-8",
+ };
+/* Paths corresponding to mergeinfo_vals. */
+static const char * const mergeinfo_paths[NBR_MERGEINFO_VALS] =
+ {
+ "/trunk",
+ "/trunk/foo",
+ "/trunk",
+ "/trunk",
+ "/branch",
+
+ /* svn_mergeinfo_parse converts relative merge soure paths to absolute. */
+ "/patch-common::netasq-bpf.c",
+ "/patch-common_netasq-bpf.c:",
+ "/:patch:common:netasq:bpf.c",
+
+ "/trunk",
+ "/trunk",
+ "/trunk",
+ "/trunk",
+ "/trunk",
+ "/gunther_branch",
+ "/gunther_branch",
+ "/branches/branch1",
+ "/gunther_branch",
+ "/gunther_branch",
+ "/b5",
+ "/c0",
+
+ /* non-canonical paths converted to canonical */
+ "/A",
+ "/A",
+ "/A",
+ "/A/B",
+ "/",
+ };
+/* First ranges from the paths identified by mergeinfo_paths. */
+static svn_merge_range_t mergeinfo_ranges[NBR_MERGEINFO_VALS][MAX_NBR_RANGES] =
+ {
+ { {0, 1, TRUE} },
+ { {0, 6, TRUE} },
+ { {4, 5, TRUE}, { 6, 11, TRUE }, {12, 14, TRUE } },
+ { {2, 10, TRUE}, {10, 11, FALSE}, {12, 14, TRUE } },
+ { {0, 1, TRUE}, { 1, 18, FALSE}, {32, 33, FALSE} },
+ { {25380, 25381, TRUE } },
+ { {25380, 25381, TRUE } },
+ { {25380, 25381, TRUE } },
+ { {2, 6, TRUE}, {8, 9, TRUE}, {14, 15, TRUE}, {17, 18, TRUE},
+ {21, 22, TRUE} },
+ { {2, 3, TRUE}, {4, 5, TRUE} },
+ { {2, 6, FALSE}, {8, 9, TRUE}, {14, 15, FALSE}, {17, 18, FALSE},
+ {21, 22, FALSE} },
+ { {2, 3, FALSE}, {4, 5, TRUE} },
+ { {0, 7, TRUE}, {49, 50, TRUE}, {98, 100, TRUE} },
+ { {4, 12, TRUE} },
+ { {4, 12, FALSE} },
+ { {43831, 49987, TRUE}, {49989, 53669, TRUE} },
+ { {0, 1, TRUE}, {4, 12, TRUE} },
+ { {0, 1, TRUE}, {4, 12, FALSE} },
+ { {0, 53, TRUE}, {53, 90, FALSE} },
+ { {0, 77, TRUE} },
+ { {6, 8, TRUE} },
+ { {6, 8, TRUE} },
+ { {6, 8, TRUE} },
+ { {6, 8, TRUE} },
+ { {6, 8, TRUE} },
+ };
+
+static svn_error_t *
+test_parse_single_line_mergeinfo(apr_pool_t *pool)
+{
+ int i;
+
+ for (i = 0; i < NBR_MERGEINFO_VALS; i++)
+ SVN_ERR(verify_mergeinfo_parse(mergeinfo_vals[i], mergeinfo_paths[i],
+ mergeinfo_ranges[i], pool));
+
+ return SVN_NO_ERROR;
+}
+
+static const char *single_mergeinfo = "/trunk: 5,7-9,10,11,13,14";
+
+static svn_error_t *
+test_mergeinfo_dup(apr_pool_t *pool)
+{
+ apr_hash_t *orig_mergeinfo, *copied_mergeinfo;
+ apr_pool_t *subpool;
+ svn_rangelist_t *rangelist;
+
+ /* Assure that copies which should be empty turn out that way. */
+ subpool = svn_pool_create(pool);
+ orig_mergeinfo = apr_hash_make(subpool);
+ copied_mergeinfo = svn_mergeinfo_dup(orig_mergeinfo, subpool);
+ if (apr_hash_count(copied_mergeinfo) != 0)
+ return fail(pool, "Copied mergeinfo should be empty");
+
+ /* Create some mergeinfo, copy it using another pool, then destroy
+ the pool with which the original mergeinfo was created. */
+ SVN_ERR(svn_mergeinfo_parse(&orig_mergeinfo, single_mergeinfo, subpool));
+ copied_mergeinfo = svn_mergeinfo_dup(orig_mergeinfo, pool);
+ svn_pool_destroy(subpool);
+ if (apr_hash_count(copied_mergeinfo) != 1)
+ return fail(pool, "Copied mergeinfo should contain one merge source");
+ rangelist = apr_hash_get(copied_mergeinfo, "/trunk", APR_HASH_KEY_STRING);
+ if (! rangelist)
+ return fail(pool, "Expected copied mergeinfo; got nothing");
+ if (rangelist->nelts != 3)
+ return fail(pool, "Copied mergeinfo should contain 3 revision ranges, "
+ "rather than the %d it contains", rangelist->nelts);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_combine_rangeinfo(apr_pool_t *pool)
+{
+ apr_array_header_t *result;
+ svn_merge_range_t *resultrange;
+ apr_hash_t *info1;
+
+ SVN_ERR(svn_mergeinfo_parse(&info1, single_mergeinfo, pool));
+
+ if (apr_hash_count(info1) != 1)
+ return fail(pool, "Wrong number of paths in parsed mergeinfo");
+
+ result = apr_hash_get(info1, "/trunk", APR_HASH_KEY_STRING);
+ if (!result)
+ return fail(pool, "Missing path in parsed mergeinfo");
+
+ /* /trunk should have three ranges, 5-5, 7-11, 13-14 */
+ if (result->nelts != 3)
+ return fail(pool, "Parsing failed to combine ranges");
+
+ resultrange = APR_ARRAY_IDX(result, 0, svn_merge_range_t *);
+
+ if (resultrange->start != 4 || resultrange->end != 5)
+ return fail(pool, "Range combining produced wrong result");
+
+ resultrange = APR_ARRAY_IDX(result, 1, svn_merge_range_t *);
+
+ if (resultrange->start != 6 || resultrange->end != 11)
+ return fail(pool, "Range combining produced wrong result");
+
+ resultrange = APR_ARRAY_IDX(result, 2, svn_merge_range_t *);
+
+ if (resultrange->start != 12 || resultrange->end != 14)
+ return fail(pool, "Range combining produced wrong result");
+
+ return SVN_NO_ERROR;
+}
+
+
+#define NBR_BROKEN_MERGEINFO_VALS 26
+/* Invalid mergeinfo values. */
+static const char * const broken_mergeinfo_vals[NBR_BROKEN_MERGEINFO_VALS] =
+ {
+ /* Invalid grammar */
+ "/missing-revs",
+ "/trunk: 5,7-9,10,11,13,14,",
+ "/trunk 5,7-9,10,11,13,14",
+ "/trunk:5 7--9 10 11 13 14",
+ /* Overlapping revs differing inheritability */
+ "/trunk:5-9*,9",
+ "/trunk:5,5-9*",
+ "/trunk:5-9,9*",
+ "/trunk:5*,5-9",
+ "/trunk:4,4*",
+ "/trunk:4*,4",
+ "/trunk:3-7*,4-23",
+ "/trunk:3-7,4-23*",
+ /* Reversed revision ranges */
+ "/trunk:22-20",
+ "/trunk:22-20*",
+ "/trunk:3,7-12,22-20,25",
+ "/trunk:3,7,22-20*,25-30",
+ /* Range with same start and end revision */
+ "/trunk:22-22",
+ "/trunk:22-22*",
+ "/trunk:3,7-12,20-20,25",
+ "/trunk:3,7,20-20*,25-30",
+ /* path mapped to range with no revisions */
+ "/trunk:",
+ "/trunk:2-9\n/branch:",
+ "::",
+ /* Invalid revisions */
+ "trunk:a-3",
+ "branch:3-four",
+ "trunk:yadayadayada"
+ };
+
+static svn_error_t *
+test_parse_broken_mergeinfo(apr_pool_t *pool)
+{
+ int i;
+ svn_error_t *err;
+ apr_hash_t *info1;
+
+ /* Trigger some error(s) with mal-formed input. */
+ for (i = 0; i < NBR_BROKEN_MERGEINFO_VALS; i++)
+ {
+ err = svn_mergeinfo_parse(&info1, broken_mergeinfo_vals[i], pool);
+ if (err == SVN_NO_ERROR)
+ {
+ return fail(pool, "svn_mergeinfo_parse (%s) failed to detect an error",
+ broken_mergeinfo_vals[i]);
+ }
+ else if (err->apr_err != SVN_ERR_MERGEINFO_PARSE_ERROR)
+ {
+ svn_error_clear(err);
+ return fail(pool, "svn_mergeinfo_parse (%s) returned some error other"
+ " than SVN_ERR_MERGEINFO_PARSE_ERROR",
+ broken_mergeinfo_vals[i]);
+ }
+ else
+ {
+ svn_error_clear(err);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static const char *mergeinfo1 = "/trunk: 3,5,7-9,10,11,13,14\n/fred:8-10";
+
+#define NBR_RANGELIST_DELTAS 4
+
+
+/* Convert a single svn_merge_range_t * back into an svn_stringbuf_t *. */
+static char *
+range_to_string(svn_merge_range_t *range,
+ apr_pool_t *pool)
+{
+ if (range->start == range->end - 1)
+ return apr_psprintf(pool, "%ld%s", range->end,
+ range->inheritable
+ ? "" : SVN_MERGEINFO_NONINHERITABLE_STR);
+ else
+ return apr_psprintf(pool, "%ld-%ld%s", range->start + 1,
+ range->end, range->inheritable
+ ? "" : SVN_MERGEINFO_NONINHERITABLE_STR);
+}
+
+
+/* Verify that ACTUAL_RANGELIST matches EXPECTED_RANGES (an array of
+ NBR_EXPECTED length). Return an error based careful examination if
+ they do not match. FUNC_VERIFIED is the name of the API being
+ verified (e.g. "svn_rangelist_intersect"), while TYPE is a word
+ describing what the ranges being examined represent. */
+static svn_error_t *
+verify_ranges_match(const svn_rangelist_t *actual_rangelist,
+ svn_merge_range_t *expected_ranges, int nbr_expected,
+ const char *func_verified, const char *type,
+ apr_pool_t *pool)
+{
+ int i;
+
+ if (actual_rangelist->nelts != nbr_expected)
+ return fail(pool, "%s should report %d range %ss, but found %d",
+ func_verified, nbr_expected, type, actual_rangelist->nelts);
+
+ for (i = 0; i < actual_rangelist->nelts; i++)
+ {
+ svn_merge_range_t *range = APR_ARRAY_IDX(actual_rangelist, i,
+ svn_merge_range_t *);
+ if (range->start != expected_ranges[i].start
+ || range->end != expected_ranges[i].end
+ || range->inheritable != expected_ranges[i].inheritable)
+ return fail(pool, "%s should report range %s, but found %s",
+ func_verified,
+ range_to_string(&expected_ranges[i], pool),
+ range_to_string(range, pool));
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Verify that DELTAS matches EXPECTED_DELTAS (both expected to
+ contain only a rangelist for "/trunk"). Return an error based
+ careful examination if they do not match. FUNC_VERIFIED is the
+ name of the API being verified (e.g. "svn_mergeinfo_diff"), while
+ TYPE is a word describing what the deltas being examined
+ represent. */
+static svn_error_t *
+verify_mergeinfo_deltas(apr_hash_t *deltas, svn_merge_range_t *expected_deltas,
+ const char *func_verified, const char *type,
+ apr_pool_t *pool)
+{
+ svn_rangelist_t *rangelist;
+
+ if (apr_hash_count(deltas) != 1)
+ /* Deltas on "/trunk" expected. */
+ return fail(pool, "%s should report 1 path %s, but found %d",
+ func_verified, type, apr_hash_count(deltas));
+
+ rangelist = apr_hash_get(deltas, "/trunk", APR_HASH_KEY_STRING);
+ if (rangelist == NULL)
+ return fail(pool, "%s failed to produce a rangelist for /trunk",
+ func_verified);
+
+ return verify_ranges_match(rangelist, expected_deltas, NBR_RANGELIST_DELTAS,
+ func_verified, type, pool);
+}
+
+static svn_error_t *
+test_diff_mergeinfo(apr_pool_t *pool)
+{
+ apr_hash_t *deleted, *added, *from, *to;
+ svn_merge_range_t expected_rangelist_deletions[NBR_RANGELIST_DELTAS] =
+ { {6, 7, TRUE}, {8, 9, TRUE}, {10, 11, TRUE}, {32, 34, TRUE} };
+ svn_merge_range_t expected_rangelist_additions[NBR_RANGELIST_DELTAS] =
+ { {1, 2, TRUE}, {4, 6, TRUE}, {12, 16, TRUE}, {29, 30, TRUE} };
+
+ SVN_ERR(svn_mergeinfo_parse(&from, "/trunk: 1,3-4,7,9,11-12,31-34", pool));
+ SVN_ERR(svn_mergeinfo_parse(&to, "/trunk: 1-6,12-16,30-32", pool));
+ /* On /trunk: deleted (7, 9, 11, 33-34) and added (2, 5-6, 13-16, 30) */
+ SVN_ERR(svn_mergeinfo_diff(&deleted, &added, from, to,
+ FALSE, pool));
+
+ /* Verify calculation of range list deltas. */
+ SVN_ERR(verify_mergeinfo_deltas(deleted, expected_rangelist_deletions,
+ "svn_mergeinfo_diff", "deletion", pool));
+ SVN_ERR(verify_mergeinfo_deltas(added, expected_rangelist_additions,
+ "svn_mergeinfo_diff", "addition", pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rangelist_reverse(apr_pool_t *pool)
+{
+ svn_rangelist_t *rangelist;
+ svn_merge_range_t expected_rangelist[3] =
+ { {10, 9, TRUE}, {7, 4, TRUE}, {3, 2, TRUE} };
+
+ SVN_ERR(svn_rangelist__parse(&rangelist, "3,5-7,10", pool));
+
+ SVN_ERR(svn_rangelist_reverse(rangelist, pool));
+
+ return verify_ranges_match(rangelist, expected_rangelist, 3,
+ "svn_rangelist_reverse", "reversal", pool);
+}
+
+static svn_error_t *
+test_rangelist_intersect(apr_pool_t *pool)
+{
+ svn_rangelist_t *rangelist1, *rangelist2, *intersection;
+
+ /* Expected intersection when considering inheritance. */
+ svn_merge_range_t intersection_consider_inheritance[] =
+ { {0, 1, TRUE}, {11, 12, TRUE}, {30, 32, FALSE}, {39, 42, TRUE} };
+
+ /* Expected intersection when ignoring inheritance. */
+ svn_merge_range_t intersection_ignore_inheritance[] =
+ { {0, 1, TRUE}, {2, 4, TRUE}, {11, 12, TRUE}, {30, 32, FALSE},
+ {39, 42, TRUE} };
+
+ SVN_ERR(svn_rangelist__parse(&rangelist1, "1-6,12-16,30-32*,40-42", pool));
+ SVN_ERR(svn_rangelist__parse(&rangelist2, "1,3-4*,7,9,11-12,31-34*,38-44",
+ pool));
+
+ /* Check the intersection while considering inheritance twice, reversing
+ the order of the rangelist arguments on the second call to
+ svn_rangelist_intersection. The order *should* have no effect on
+ the result -- see http://svn.haxx.se/dev/archive-2010-03/0351.shtml.
+
+ '3-4*' has different inheritance than '1-6', so no intersection is
+ expected. '30-32*' and '31-34*' have the same inheritance, so intersect
+ at '31-32*'. Per the svn_rangelist_intersect API, since both ranges
+ are non-inheritable, so is the result. */
+ SVN_ERR(svn_rangelist_intersect(&intersection, rangelist1, rangelist2,
+ TRUE, pool));
+
+ SVN_ERR(verify_ranges_match(intersection,
+ intersection_consider_inheritance,
+ 4, "svn_rangelist_intersect", "intersect",
+ pool));
+
+ SVN_ERR(svn_rangelist_intersect(&intersection, rangelist2, rangelist1,
+ TRUE, pool));
+
+ SVN_ERR(verify_ranges_match(intersection,
+ intersection_consider_inheritance,
+ 4, "svn_rangelist_intersect", "intersect",
+ pool));
+
+ /* Check the intersection while ignoring inheritance. The one difference
+ from when we consider inheritance is that '3-4*' and '1-6' now intersect,
+ since we don't care about inheritability, just the start and end ranges.
+ Per the svn_rangelist_intersect API, since only one range is
+ non-inheritable the result is inheritable. */
+ SVN_ERR(svn_rangelist_intersect(&intersection, rangelist1, rangelist2,
+ FALSE, pool));
+
+ SVN_ERR(verify_ranges_match(intersection,
+ intersection_ignore_inheritance,
+ 5, "svn_rangelist_intersect", "intersect",
+ pool));
+
+ SVN_ERR(svn_rangelist_intersect(&intersection, rangelist2, rangelist1,
+ FALSE, pool));
+
+ SVN_ERR(verify_ranges_match(intersection,
+ intersection_ignore_inheritance,
+ 5, "svn_rangelist_intersect", "intersect",
+ pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_mergeinfo_intersect(apr_pool_t *pool)
+{
+ svn_merge_range_t expected_intersection[3] =
+ { {0, 1, TRUE}, {2, 4, TRUE}, {11, 12, TRUE} };
+ svn_rangelist_t *rangelist;
+ apr_hash_t *intersection;
+ apr_hash_t *info1, *info2;
+
+ SVN_ERR(svn_mergeinfo_parse(&info1, "/trunk: 1-6,12-16\n/foo: 31", pool));
+ SVN_ERR(svn_mergeinfo_parse(&info2, "/trunk: 1,3-4,7,9,11-12", pool));
+
+ SVN_ERR(svn_mergeinfo_intersect(&intersection, info1, info2, pool));
+ if (apr_hash_count(intersection) != 1)
+ return fail(pool, "Unexpected number of rangelists in mergeinfo "
+ "intersection: Expected %d, found %d", 1,
+ apr_hash_count(intersection));
+
+ rangelist = apr_hash_get(intersection, "/trunk", APR_HASH_KEY_STRING);
+ return verify_ranges_match(rangelist, expected_intersection, 3,
+ "svn_rangelist_intersect", "intersect", pool);
+}
+
+static svn_error_t *
+test_merge_mergeinfo(apr_pool_t *pool)
+{
+ int i;
+
+ /* Structures and constants for test_merge_mergeinfo() */
+ /* Number of svn_mergeinfo_merge test sets */
+ #define NBR_MERGEINFO_MERGES 12
+
+ /* Maximum number of expected paths in the results
+ of the svn_mergeinfo_merge tests */
+ #define MAX_NBR_MERGEINFO_PATHS 4
+
+ /* Maximum number of expected ranges in the results
+ of the svn_mergeinfo_merge tests */
+ #define MAX_NBR_MERGEINFO_RANGES 10
+
+ /* Struct to store a path and it's expected ranges,
+ i.e. the expected result of an svn_mergeinfo_merge
+ test. */
+ struct mergeinfo_merge_path_range
+ {
+ const char *path;
+ int expected_n;
+ svn_merge_range_t expected_rngs[MAX_NBR_MERGEINFO_RANGES];
+ };
+
+ /* Struct for svn_mergeinfo_merge test data.
+ If MERGEINFO1 and MERGEINFO2 are parsed to a hash with
+ svn_mergeinfo_parse() and then merged with svn_mergeinfo_merge(),
+ the resulting hash should have EXPECTED_PATHS number of paths
+ mapped to rangelists and each mapping is described by PATH_RNGS
+ where PATH_RNGS->PATH is not NULL. */
+ struct mergeinfo_merge_test_data
+ {
+ const char *mergeinfo1;
+ const char *mergeinfo2;
+ int expected_paths;
+ struct mergeinfo_merge_path_range path_rngs[MAX_NBR_MERGEINFO_PATHS];
+ };
+
+ static struct mergeinfo_merge_test_data mergeinfo[NBR_MERGEINFO_MERGES] =
+ {
+ /* One path, intersecting inheritable ranges */
+ { "/trunk: 5-10",
+ "/trunk: 6", 1,
+ { {"/trunk", 1, { {4, 10, TRUE} } } } },
+
+ /* One path, intersecting non-inheritable ranges */
+ { "/trunk: 5-10*",
+ "/trunk: 6*", 1,
+ { {"/trunk", 1, { {4, 10, FALSE} } } } },
+
+ /* One path, intersecting ranges with different inheritability */
+ { "/trunk: 5-10",
+ "/trunk: 6*", 1,
+ { {"/trunk", 1, { {4, 10, TRUE} } } } },
+
+ /* One path, intersecting ranges with different inheritability */
+ { "/trunk: 5-10*",
+ "/trunk: 6", 1,
+ { {"/trunk", 3, { {4, 5, FALSE}, {5, 6, TRUE}, {6, 10, FALSE} } } } },
+
+ /* Adjacent ranges all inheritable ranges */
+ { "/trunk: 1,3,5-11,13",
+ "/trunk: 2,4,12,14-22", 1,
+ { {"/trunk", 1, { {0, 22, TRUE} } } } },
+
+ /* Adjacent ranges all non-inheritable ranges */
+ { "/trunk: 1*,3*,5-11*,13*",
+ "/trunk: 2*,4*,12*,14-22*", 1,
+ { {"/trunk", 1, { {0, 22, FALSE} } } } },
+
+ /* Adjacent ranges differing inheritability */
+ { "/trunk: 1*,3*,5-11*,13*",
+ "/trunk: 2,4,12,14-22", 1,
+ { {"/trunk", 8, { { 0, 1, FALSE}, { 1, 2, TRUE},
+ { 2, 3, FALSE}, { 3, 4, TRUE},
+ { 4, 11, FALSE}, {11, 12, TRUE},
+ {12, 13, FALSE}, {13, 22, TRUE} } } } },
+
+ /* Adjacent ranges differing inheritability */
+ { "/trunk: 1,3,5-11,13",
+ "/trunk: 2*,4*,12*,14-22*", 1,
+ { {"/trunk", 8, { { 0, 1, TRUE}, { 1, 2, FALSE},
+ { 2, 3, TRUE}, { 3, 4, FALSE},
+ { 4, 11, TRUE}, {11, 12, FALSE},
+ {12, 13, TRUE}, {13, 22, FALSE} } } } },
+
+ /* Two paths all inheritable ranges */
+ { "/trunk::1: 3,5,7-9,10,11,13,14\n/fred:8-10",
+ "/trunk::1: 1-4,6\n/fred:9-12", 2,
+ { {"/trunk::1", 2, { {0, 11, TRUE}, {12, 14, TRUE} } },
+ {"/fred", 1, { {7, 12, TRUE} } } } },
+
+ /* Two paths all non-inheritable ranges */
+ { "/trunk: 3*,5*,7-9*,10*,11*,13*,14*\n/fred:8-10*",
+ "/trunk: 1-4*,6*\n/fred:9-12*", 2,
+ { {"/trunk", 2, { {0, 11, FALSE}, {12, 14, FALSE} } },
+ {"/fred", 1, { {7, 12, FALSE} } } } },
+
+ /* Two paths mixed inheritability */
+ { "/trunk: 3,5*,7-9,10,11*,13,14\n/fred:8-10",
+ "/trunk: 1-4,6\n/fred:9-12*", 2,
+ { {"/trunk", 5, { { 0, 4, TRUE }, { 4, 5, FALSE}, {5, 10, TRUE},
+ {10, 11, FALSE}, {12, 14, TRUE } } },
+ {"/fred", 2, { { 7, 10, TRUE }, {10, 12, FALSE} } } } },
+
+ /* A slew of different paths but no ranges to be merged */
+ { "/trunk: 3,5-9*\n/betty: 2-4",
+ "/fred: 1-18\n/:barney: 1,3-43", 4,
+ { {"/trunk", 2, { {2, 3, TRUE}, {4, 9, FALSE} } },
+ {"/betty", 1, { {1, 4, TRUE} } },
+ {"/:barney", 2, { {0, 1, TRUE}, {2, 43, TRUE} } },
+ {"/fred", 1, { {0, 18, TRUE} } } } }
+ };
+
+ for (i = 0; i < NBR_MERGEINFO_MERGES; i++)
+ {
+ int j;
+ svn_string_t *info2_starting, *info2_ending;
+ apr_hash_t *info1, *info2;
+
+ SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo[i].mergeinfo1, pool));
+ SVN_ERR(svn_mergeinfo_parse(&info2, mergeinfo[i].mergeinfo2, pool));
+
+ /* Make a copy of info2. We will merge it into info1, but info2
+ should remain unchanged. Store the mergeinfo as a svn_string_t
+ rather than making a copy and using svn_mergeinfo_diff(). Since
+ that API uses some of the underlying code as svn_mergeinfo_merge
+ we might mask potential errors. */
+ SVN_ERR(svn_mergeinfo_to_string(&info2_starting, info2, pool));
+
+ SVN_ERR(svn_mergeinfo_merge(info1, info2, pool));
+ if (mergeinfo[i].expected_paths != (int)apr_hash_count(info1))
+ return fail(pool, "Wrong number of paths in merged mergeinfo");
+
+ /* Check that info2 remained unchanged. */
+ SVN_ERR(svn_mergeinfo_to_string(&info2_ending, info2, pool));
+
+ if (strcmp(info2_ending->data, info2_starting->data))
+ return fail(pool,
+ apr_psprintf(pool,
+ "svn_mergeinfo_merge case %i "
+ "modified its CHANGES arg from "
+ "%s to %s", i, info2_starting->data,
+ info2_ending->data));
+
+ for (j = 0; j < mergeinfo[i].expected_paths; j++)
+ {
+ svn_rangelist_t *rangelist =
+ apr_hash_get(info1, mergeinfo[i].path_rngs[j].path,
+ APR_HASH_KEY_STRING);
+ if (!rangelist)
+ return fail(pool, "Missing path '%s' in merged mergeinfo",
+ mergeinfo[i].path_rngs[j].path);
+ SVN_ERR(verify_ranges_match(
+ rangelist,
+ mergeinfo[i].path_rngs[j].expected_rngs,
+ mergeinfo[i].path_rngs[j].expected_n,
+ apr_psprintf(pool, "svn_rangelist_merge case %i:%i", i, j),
+ "merge", pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_remove_rangelist(apr_pool_t *pool)
+{
+ int i, j;
+ svn_error_t *err, *child_err;
+ svn_rangelist_t *output, *eraser, *whiteboard;
+
+ /* Struct for svn_rangelist_remove test data.
+ Parse WHITEBOARD and ERASER to hashes and then get the rangelist for
+ path 'A' from both.
+
+ Remove ERASER's rangelist from WHITEBOARD's twice, once while
+ considering inheritance and once while not. In the first case the
+ resulting rangelist should have EXPECTED_RANGES_CONSIDER_INHERITANCE
+ number of ranges and these ranges should match the ranges in
+ EXPECTED_REMOVED_CONSIDER_INHERITANCE. In the second case there
+ should be EXPECTED_RANGES_IGNORE_INHERITANCE number of ranges and
+ these should match EXPECTED_REMOVED_IGNORE_INHERITANCE */
+ struct rangelist_remove_test_data
+ {
+ const char *whiteboard;
+ const char *eraser;
+ int expected_ranges_consider_inheritance;
+ svn_merge_range_t expected_removed_consider_inheritance[10];
+ int expected_ranges_ignore_inheritance;
+ svn_merge_range_t expected_removed_ignore_inheritance[10];
+ };
+
+ #define SIZE_OF_RANGE_REMOVE_TEST_ARRAY 15
+
+ /* The actual test data */
+ struct rangelist_remove_test_data test_data[SIZE_OF_RANGE_REMOVE_TEST_ARRAY] =
+ {
+ /* Eraser is a proper subset of whiteboard */
+ {"1-44", "5", 2, { {0, 4, TRUE }, {5, 44, TRUE }},
+ 2, { {0, 4, TRUE }, {5, 44, TRUE }}},
+ {"1-44*", "5", 1, { {0, 44, FALSE} },
+ 2, { {0, 4, FALSE}, {5, 44, FALSE}}},
+ {"1-44", "5*", 1, { {0, 44, TRUE } },
+ 2, { {0, 4, TRUE }, {5, 44, TRUE }}},
+ {"1-44*", "5*", 2, { {0, 4, FALSE}, {5, 44, FALSE}},
+ 2, { {0, 4, FALSE}, {5, 44, FALSE}}},
+ /* Non-intersecting ranges...nothing is removed */
+ {"2-9,14-19", "12", 2, { {1, 9, TRUE }, {13, 19, TRUE }},
+ 2, { {1, 9, TRUE }, {13, 19, TRUE }}},
+ {"2-9*,14-19*", "12", 2, { {1, 9, FALSE}, {13, 19, FALSE}},
+ 2, { {1, 9, FALSE}, {13, 19, FALSE}}},
+ {"2-9,14-19", "12*", 2, { {1, 9, TRUE }, {13, 19, TRUE }},
+ 2, { {1, 9, TRUE }, {13, 19, TRUE }}},
+ {"2-9*,14-19*", "12*", 2, { {1, 9, FALSE}, {13, 19, FALSE}},
+ 2, { {1, 9, FALSE}, {13, 19, FALSE}}},
+ /* Eraser overlaps whiteboard */
+ {"1,9-17", "12-20", 2, { {0, 1, TRUE }, {8, 11, TRUE }},
+ 2, { {0, 1, TRUE }, {8, 11, TRUE }}},
+ {"1,9-17*", "12-20", 2, { {0, 1, TRUE }, {8, 17, FALSE}},
+ 2, { {0, 1, TRUE }, {8, 11, FALSE}}},
+ {"1,9-17", "12-20*", 2, { {0, 1, TRUE }, {8, 17, TRUE }},
+ 2, { {0, 1, TRUE }, {8, 11, TRUE }}},
+ {"1,9-17*", "12-20*", 2, { {0, 1, TRUE }, {8, 11, FALSE}},
+ 2, { {0, 1, TRUE }, {8, 11, FALSE}}},
+ /* Empty rangelist */
+ {"", "", 0, { {0, 0, FALSE}},
+ 0, { {0, 0, FALSE}}},
+ {"", "5-8,10-100", 0, { {0, 0, FALSE}},
+ 0, { {0, 0, FALSE}}},
+ {"5-8,10-100", "", 2, { {4, 8, TRUE }, {9, 100, TRUE }},
+ 2, { {4, 8, TRUE }, {9, 100, TRUE }}}
+ };
+
+ err = child_err = SVN_NO_ERROR;
+ for (j = 0; j < 2; j++)
+ {
+ for (i = 0; i < SIZE_OF_RANGE_REMOVE_TEST_ARRAY; i++)
+ {
+ int expected_nbr_ranges;
+ svn_merge_range_t *expected_ranges;
+ svn_string_t *eraser_starting;
+ svn_string_t *eraser_ending;
+ svn_string_t *whiteboard_starting;
+ svn_string_t *whiteboard_ending;
+
+ SVN_ERR(svn_rangelist__parse(&eraser, test_data[i].eraser, pool));
+ SVN_ERR(svn_rangelist__parse(&whiteboard, test_data[i].whiteboard, pool));
+
+ /* Represent empty mergeinfo with an empty rangelist. */
+ if (eraser == NULL)
+ eraser = apr_array_make(pool, 0, sizeof(*eraser));
+ if (whiteboard == NULL)
+ whiteboard = apr_array_make(pool, 0, sizeof(*whiteboard));
+
+ /* First pass try removal considering inheritance, on the
+ second pass ignore it. */
+ if (j == 0)
+ {
+ expected_nbr_ranges = (test_data[i]).expected_ranges_consider_inheritance;
+ expected_ranges = (test_data[i]).expected_removed_consider_inheritance;
+
+ }
+ else
+ {
+ expected_nbr_ranges = (test_data[i]).expected_ranges_ignore_inheritance;
+ expected_ranges = (test_data[i]).expected_removed_ignore_inheritance;
+
+ }
+
+ /* Make a copies of whiteboard and eraser. They should not be
+ modified by svn_rangelist_remove(). */
+ SVN_ERR(svn_rangelist_to_string(&eraser_starting, eraser, pool));
+ SVN_ERR(svn_rangelist_to_string(&whiteboard_starting, whiteboard,
+ pool));
+
+ SVN_ERR(svn_rangelist_remove(&output, eraser, whiteboard,
+ j == 0,
+ pool));
+ child_err = verify_ranges_match(output, expected_ranges,
+ expected_nbr_ranges,
+ apr_psprintf(pool,
+ "svn_rangelist_remove "
+ "case %i", i),
+ "remove", pool);
+
+ /* Collect all the errors rather than returning on the first. */
+ if (child_err)
+ {
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+
+ /* Check that eraser and whiteboard were not modified. */
+ SVN_ERR(svn_rangelist_to_string(&eraser_ending, eraser, pool));
+ SVN_ERR(svn_rangelist_to_string(&whiteboard_ending, whiteboard,
+ pool));
+ if (strcmp(eraser_starting->data, eraser_ending->data))
+ {
+ child_err = fail(pool,
+ apr_psprintf(pool,
+ "svn_rangelist_remove case %i "
+ "modified its ERASER arg from "
+ "%s to %s when %sconsidering "
+ "inheritance", i,
+ eraser_starting->data,
+ eraser_ending->data,
+ j ? "" : "not "));
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+ if (strcmp(whiteboard_starting->data, whiteboard_ending->data))
+ {
+ child_err = fail(pool,
+ apr_psprintf(pool,
+ "svn_rangelist_remove case %i "
+ "modified its WHITEBOARD arg "
+ "from %s to %s when "
+ "%sconsidering inheritance", i,
+ whiteboard_starting->data,
+ whiteboard_ending->data,
+ j ? "" : "not "));
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+ }
+ }
+ return err;
+}
+
+#define RANDOM_REV_ARRAY_LENGTH 100
+
+/* Random number seed. */
+static apr_uint32_t random_rev_array_seed;
+
+/* Set a random 3/4-ish of the elements of array REVS[RANDOM_REV_ARRAY_LENGTH]
+ * to TRUE and the rest to FALSE. */
+static void
+randomly_fill_rev_array(svn_boolean_t *revs)
+{
+ int i;
+ for (i = 0; i < RANDOM_REV_ARRAY_LENGTH; i++)
+ {
+ apr_uint32_t next = svn_test_rand(&random_rev_array_seed);
+ revs[i] = (next < 0x40000000) ? 0 : 1;
+ }
+}
+
+/* Set *RANGELIST to a rangelist representing the revisions that are marked
+ * with TRUE in the array REVS[RANDOM_REV_ARRAY_LENGTH]. */
+static svn_error_t *
+rev_array_to_rangelist(svn_rangelist_t **rangelist,
+ svn_boolean_t *revs,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *buf = svn_stringbuf_create("/trunk: ", pool);
+ svn_boolean_t first = TRUE;
+ apr_hash_t *mergeinfo;
+ int i;
+
+ for (i = 0; i < RANDOM_REV_ARRAY_LENGTH; i++)
+ {
+ if (revs[i])
+ {
+ if (first)
+ first = FALSE;
+ else
+ svn_stringbuf_appendcstr(buf, ",");
+ svn_stringbuf_appendcstr(buf, apr_psprintf(pool, "%d", i));
+ }
+ }
+
+ SVN_ERR(svn_mergeinfo_parse(&mergeinfo, buf->data, pool));
+ *rangelist = apr_hash_get(mergeinfo, "/trunk", APR_HASH_KEY_STRING);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rangelist_remove_randomly(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *iterpool;
+
+ random_rev_array_seed = (apr_uint32_t) apr_time_now();
+
+ iterpool = svn_pool_create(pool);
+
+ for (i = 0; i < 20; i++)
+ {
+ svn_boolean_t first_revs[RANDOM_REV_ARRAY_LENGTH],
+ second_revs[RANDOM_REV_ARRAY_LENGTH],
+ expected_revs[RANDOM_REV_ARRAY_LENGTH];
+ svn_rangelist_t *first_rangelist, *second_rangelist,
+ *expected_rangelist, *actual_rangelist;
+ /* There will be at most RANDOM_REV_ARRAY_LENGTH ranges in
+ expected_rangelist. */
+ svn_merge_range_t expected_range_array[RANDOM_REV_ARRAY_LENGTH];
+ int j;
+
+ svn_pool_clear(iterpool);
+
+ randomly_fill_rev_array(first_revs);
+ randomly_fill_rev_array(second_revs);
+ /* There is no change numbered "r0" */
+ first_revs[0] = FALSE;
+ second_revs[0] = FALSE;
+ for (j = 0; j < RANDOM_REV_ARRAY_LENGTH; j++)
+ expected_revs[j] = second_revs[j] && !first_revs[j];
+
+ SVN_ERR(rev_array_to_rangelist(&first_rangelist, first_revs, iterpool));
+ SVN_ERR(rev_array_to_rangelist(&second_rangelist, second_revs, iterpool));
+ SVN_ERR(rev_array_to_rangelist(&expected_rangelist, expected_revs,
+ iterpool));
+
+ for (j = 0; j < expected_rangelist->nelts; j++)
+ {
+ expected_range_array[j] = *(APR_ARRAY_IDX(expected_rangelist, j,
+ svn_merge_range_t *));
+ }
+
+ SVN_ERR(svn_rangelist_remove(&actual_rangelist, first_rangelist,
+ second_rangelist, TRUE, iterpool));
+
+ SVN_ERR(verify_ranges_match(actual_rangelist,
+ expected_range_array,
+ expected_rangelist->nelts,
+ "svn_rangelist_remove random call",
+ "remove", iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rangelist_intersect_randomly(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *iterpool;
+
+ random_rev_array_seed = (apr_uint32_t) apr_time_now();
+
+ iterpool = svn_pool_create(pool);
+
+ for (i = 0; i < 20; i++)
+ {
+ svn_boolean_t first_revs[RANDOM_REV_ARRAY_LENGTH],
+ second_revs[RANDOM_REV_ARRAY_LENGTH],
+ expected_revs[RANDOM_REV_ARRAY_LENGTH];
+ svn_rangelist_t *first_rangelist, *second_rangelist,
+ *expected_rangelist, *actual_rangelist;
+ /* There will be at most RANDOM_REV_ARRAY_LENGTH ranges in
+ expected_rangelist. */
+ svn_merge_range_t expected_range_array[RANDOM_REV_ARRAY_LENGTH];
+ int j;
+
+ svn_pool_clear(iterpool);
+
+ randomly_fill_rev_array(first_revs);
+ randomly_fill_rev_array(second_revs);
+ /* There is no change numbered "r0" */
+ first_revs[0] = FALSE;
+ second_revs[0] = FALSE;
+ for (j = 0; j < RANDOM_REV_ARRAY_LENGTH; j++)
+ expected_revs[j] = second_revs[j] && first_revs[j];
+
+ SVN_ERR(rev_array_to_rangelist(&first_rangelist, first_revs, iterpool));
+ SVN_ERR(rev_array_to_rangelist(&second_rangelist, second_revs, iterpool));
+ SVN_ERR(rev_array_to_rangelist(&expected_rangelist, expected_revs,
+ iterpool));
+
+ for (j = 0; j < expected_rangelist->nelts; j++)
+ {
+ expected_range_array[j] = *(APR_ARRAY_IDX(expected_rangelist, j,
+ svn_merge_range_t *));
+ }
+
+ SVN_ERR(svn_rangelist_intersect(&actual_rangelist, first_rangelist,
+ second_rangelist, TRUE, iterpool));
+
+ SVN_ERR(verify_ranges_match(actual_rangelist,
+ expected_range_array,
+ expected_rangelist->nelts,
+ "svn_rangelist_intersect random call",
+ "intersect", iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* ### Share code with test_diff_mergeinfo() and test_remove_rangelist(). */
+static svn_error_t *
+test_remove_mergeinfo(apr_pool_t *pool)
+{
+ apr_hash_t *output, *whiteboard, *eraser;
+ svn_merge_range_t expected_rangelist_remainder[NBR_RANGELIST_DELTAS] =
+ { {6, 7, TRUE}, {8, 9, TRUE}, {10, 11, TRUE}, {32, 34, TRUE} };
+
+ SVN_ERR(svn_mergeinfo_parse(&whiteboard,
+ "/trunk: 1,3-4,7,9,11-12,31-34", pool));
+ SVN_ERR(svn_mergeinfo_parse(&eraser, "/trunk: 1-6,12-16,30-32", pool));
+
+ /* Leftover on /trunk should be the set (7, 9, 11, 33-34) */
+ SVN_ERR(svn_mergeinfo_remove(&output, eraser, whiteboard, pool));
+
+ /* Verify calculation of range list remainder. */
+ return verify_mergeinfo_deltas(output, expected_rangelist_remainder,
+ "svn_mergeinfo_remove", "leftover", pool);
+}
+#undef NBR_RANGELIST_DELTAS
+
+static svn_error_t *
+test_rangelist_to_string(apr_pool_t *pool)
+{
+ svn_rangelist_t *result;
+ svn_string_t *output;
+ svn_string_t *expected = svn_string_create("3,5,7-11,13-14", pool);
+ apr_hash_t *info1;
+
+ SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo1, pool));
+
+ result = apr_hash_get(info1, "/trunk", APR_HASH_KEY_STRING);
+ if (!result)
+ return fail(pool, "Missing path in parsed mergeinfo");
+
+ SVN_ERR(svn_rangelist_to_string(&output, result, pool));
+
+ if (!svn_string_compare(expected, output))
+ return fail(pool, "Rangelist string not what we expected");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_mergeinfo_to_string(apr_pool_t *pool)
+{
+ svn_string_t *output;
+ svn_string_t *expected;
+ apr_hash_t *info1, *info2;
+ expected = svn_string_create("/fred:8-10\n/trunk:3,5,7-11,13-14", pool);
+
+ SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo1, pool));
+
+ SVN_ERR(svn_mergeinfo_to_string(&output, info1, pool));
+
+ if (!svn_string_compare(expected, output))
+ return fail(pool, "Mergeinfo string not what we expected");
+
+ /* Manually construct some mergeinfo with relative path
+ merge source keys. These should be tolerated as input
+ to svn_mergeinfo_to_string(), but the resulting svn_string_t
+ should have absolute keys. */
+ info2 = apr_hash_make(pool);
+ apr_hash_set(info2, "fred",
+ APR_HASH_KEY_STRING,
+ apr_hash_get(info1, "/fred", APR_HASH_KEY_STRING));
+ apr_hash_set(info2, "trunk",
+ APR_HASH_KEY_STRING,
+ apr_hash_get(info1, "/trunk", APR_HASH_KEY_STRING));
+ SVN_ERR(svn_mergeinfo_to_string(&output, info2, pool));
+
+ if (!svn_string_compare(expected, output))
+ return fail(pool, "Mergeinfo string not what we expected");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_rangelist_merge(apr_pool_t *pool)
+{
+ int i;
+ svn_error_t *err, *child_err;
+ svn_rangelist_t *rangelist1, *rangelist2;
+
+ /* Struct for svn_rangelist_merge test data. Similar to
+ mergeinfo_merge_test_data struct in svn_mergeinfo_merge() test. */
+ struct rangelist_merge_test_data
+ {
+ const char *mergeinfo1;
+ const char *mergeinfo2;
+ int expected_ranges;
+ svn_merge_range_t expected_merge[6];
+ };
+
+ #define SIZE_OF_RANGE_MERGE_TEST_ARRAY 68
+ /* The actual test data. */
+ struct rangelist_merge_test_data test_data[SIZE_OF_RANGE_MERGE_TEST_ARRAY] =
+ {
+ /* Non-intersecting ranges */
+ {"1-44", "70-101", 2, {{ 0, 44, TRUE }, {69, 101, TRUE }}},
+ {"1-44*", "70-101", 2, {{ 0, 44, FALSE}, {69, 101, TRUE }}},
+ {"1-44", "70-101*", 2, {{ 0, 44, TRUE }, {69, 101, FALSE}}},
+ {"1-44*", "70-101*", 2, {{ 0, 44, FALSE}, {69, 101, FALSE}}},
+ {"70-101", "1-44", 2, {{ 0, 44, TRUE }, {69, 101, TRUE }}},
+ {"70-101*", "1-44", 2, {{ 0, 44, TRUE }, {69, 101, FALSE}}},
+ {"70-101", "1-44*", 2, {{ 0, 44, FALSE}, {69, 101, TRUE }}},
+ {"70-101*", "1-44*", 2, {{ 0, 44, FALSE}, {69, 101, FALSE}}},
+
+ /* Intersecting ranges with same starting and ending revisions */
+ {"4-20", "4-20", 1, {{3, 20, TRUE }}},
+ {"4-20*", "4-20", 1, {{3, 20, TRUE }}},
+ {"4-20", "4-20*", 1, {{3, 20, TRUE }}},
+ {"4-20*", "4-20*", 1, {{3, 20, FALSE}}},
+
+ /* Intersecting ranges with same starting revision */
+ {"6-17", "6-12", 1, {{5, 17, TRUE}}},
+ {"6-17*", "6-12", 2, {{5, 12, TRUE }, {12, 17, FALSE}}},
+ {"6-17", "6-12*", 1, {{5, 17, TRUE }}},
+ {"6-17*", "6-12*", 1, {{5, 17, FALSE}}},
+ {"6-12", "6-17", 1, {{5, 17, TRUE }}},
+ {"6-12*", "6-17", 1, {{5, 17, TRUE }}},
+ {"6-12", "6-17*", 2, {{5, 12, TRUE }, {12, 17, FALSE}}},
+ {"6-12*", "6-17*", 1, {{5, 17, FALSE}}},
+
+ /* Intersecting ranges with same ending revision */
+ {"5-77", "44-77", 1, {{4, 77, TRUE }}},
+ {"5-77*", "44-77", 2, {{4, 43, FALSE}, {43, 77, TRUE}}},
+ {"5-77", "44-77*", 1, {{4, 77, TRUE }}},
+ {"5-77*", "44-77*", 1, {{4, 77, FALSE}}},
+ {"44-77", "5-77", 1, {{4, 77, TRUE }}},
+ {"44-77*", "5-77", 1, {{4, 77, TRUE }}},
+ {"44-77", "5-77*", 2, {{4, 43, FALSE}, {43, 77, TRUE}}},
+ {"44-77*", "5-77*", 1, {{4, 77, FALSE}}},
+
+ /* Intersecting ranges with different starting and ending revision
+ where one range is a proper subset of the other. */
+ {"12-24", "20-23", 1, {{11, 24, TRUE }}},
+ {"12-24*", "20-23", 3, {{11, 19, FALSE}, {19, 23, TRUE },
+ {23, 24, FALSE}}},
+ {"12-24", "20-23*", 1, {{11, 24, TRUE }}},
+ {"12-24*", "20-23*", 1, {{11, 24, FALSE}}},
+ {"20-23", "12-24", 1, {{11, 24, TRUE }}},
+ {"20-23*", "12-24", 1, {{11, 24, TRUE }}},
+ {"20-23", "12-24*", 3, {{11, 19, FALSE}, {19, 23, TRUE },
+ {23, 24, FALSE}}},
+ {"20-23*", "12-24*", 1, {{11, 24, FALSE}}},
+
+ /* Intersecting ranges with different starting and ending revision
+ where neither range is a proper subset of the other. */
+ {"50-73", "60-99", 1, {{49, 99, TRUE }}},
+ {"50-73*", "60-99", 2, {{49, 59, FALSE}, {59, 99, TRUE }}},
+ {"50-73", "60-99*", 2, {{49, 73, TRUE }, {73, 99, FALSE}}},
+ {"50-73*", "60-99*", 1, {{49, 99, FALSE}}},
+ {"60-99", "50-73", 1, {{49, 99, TRUE }}},
+ {"60-99*", "50-73", 2, {{49, 73, TRUE }, {73, 99, FALSE}}},
+ {"60-99", "50-73*", 2, {{49, 59, FALSE}, {59, 99, TRUE }}},
+ {"60-99*", "50-73*", 1, {{49, 99, FALSE}}},
+
+ /* Multiple ranges. */
+ {"1-5,7,12-13", "2-17", 1, {{0, 17, TRUE }}},
+ {"1-5*,7*,12-13*", "2-17*", 1, {{0, 17, FALSE}}},
+
+ {"1-5,7,12-13", "2-17*", 6,
+ {{0, 5, TRUE }, { 5, 6, FALSE}, { 6, 7, TRUE },
+ {7, 11, FALSE}, {11, 13, TRUE }, {13, 17, FALSE}}},
+
+ {"1-5*,7*,12-13*", "2-17", 2,
+ {{0, 1, FALSE}, {1, 17, TRUE }}},
+
+ {"2-17", "1-5,7,12-13", 1, {{0, 17, TRUE }}},
+ {"2-17*", "1-5*,7*,12-13*", 1, {{0, 17, FALSE}}},
+
+ {"2-17*", "1-5,7,12-13", 6,
+ {{0, 5, TRUE }, { 5, 6, FALSE}, { 6, 7, TRUE },
+ {7, 11, FALSE}, {11, 13, TRUE }, {13, 17, FALSE}}},
+
+ {"2-17", "1-5*,7*,12-13*", 2,
+ {{0, 1, FALSE}, {1, 17, TRUE}}},
+
+ {"3-4*,10-15,20", "5-60*", 5,
+ {{2, 9, FALSE}, {9, 15, TRUE}, {15, 19, FALSE},{19, 20, TRUE},
+ {20, 60, FALSE}}},
+
+ {"5-60*", "3-4*,10-15,20", 5,
+ {{2, 9, FALSE}, {9, 15, TRUE}, {15, 19, FALSE},{19, 20, TRUE},
+ {20, 60, FALSE}}},
+
+ {"3-4*,50-100*", "5-60*", 1, {{2, 100, FALSE}}},
+
+ {"5-60*", "3-4*,50-100*", 1, {{2, 100, FALSE}}},
+
+ {"3-4*,50-100", "5-60*", 2, {{2, 49, FALSE}, {49, 100, TRUE}}},
+
+ {"5-60*", "3-4*,50-100", 2, {{2, 49, FALSE}, {49, 100, TRUE}}},
+
+ {"3-4,50-100*", "5-60", 2, {{2, 60, TRUE}, {60, 100, FALSE}}},
+
+ {"5-60", "3-4,50-100*", 2, {{2, 60, TRUE}, {60, 100, FALSE}}},
+
+ {"5,9,11-15,17,200-300,999", "7-50", 4,
+ {{4, 5, TRUE}, {6, 50, TRUE}, {199, 300, TRUE}, {998, 999, TRUE}}},
+
+ /* A rangelist merged with an empty rangelist should equal the
+ non-empty rangelist but in compacted form. */
+ {"1-44,45,46,47-50", "", 1, {{ 0, 50, TRUE }}},
+ {"1,2,3,4,5,6,7,8", "", 1, {{ 0, 8, TRUE }}},
+ {"6-10,12-13,14,15,16-22", "", 2,
+ {{ 5, 10, TRUE }, { 11, 22, TRUE }}},
+ {"", "1-44,45,46,47-50", 1, {{ 0, 50, TRUE }}},
+ {"", "1,2,3,4,5,6,7,8", 1, {{ 0, 8, TRUE }}},
+ {"", "6-10,12-13,14,15,16-22", 2,
+ {{ 5, 10, TRUE }, { 11, 22, TRUE }}},
+
+ /* An empty rangelist merged with an empty rangelist is, drum-roll
+ please, an empty rangelist. */
+ {"", "", 0, {{0, 0, FALSE}}}
+ };
+
+ err = child_err = SVN_NO_ERROR;
+ for (i = 0; i < SIZE_OF_RANGE_MERGE_TEST_ARRAY; i++)
+ {
+ svn_string_t *rangelist2_starting, *rangelist2_ending;
+
+ SVN_ERR(svn_rangelist__parse(&rangelist1, test_data[i].mergeinfo1, pool));
+ SVN_ERR(svn_rangelist__parse(&rangelist2, test_data[i].mergeinfo2, pool));
+
+ /* Create empty rangelists if necessary. */
+ if (rangelist1 == NULL)
+ rangelist1 = apr_array_make(pool, 0, sizeof(svn_merge_range_t *));
+ if (rangelist2 == NULL)
+ rangelist2 = apr_array_make(pool, 0, sizeof(svn_merge_range_t *));
+
+ /* Make a copy of rangelist2. We will merge it into rangelist1, but
+ rangelist2 should remain unchanged. */
+ SVN_ERR(svn_rangelist_to_string(&rangelist2_starting, rangelist2,
+ pool));
+ SVN_ERR(svn_rangelist_merge(&rangelist1, rangelist2, pool));
+ child_err = verify_ranges_match(rangelist1,
+ (test_data[i]).expected_merge,
+ (test_data[i]).expected_ranges,
+ apr_psprintf(pool,
+ "svn_rangelist_merge "
+ "case %i", i),
+ "merge", pool);
+
+ /* Collect all the errors rather than returning on the first. */
+ if (child_err)
+ {
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+
+ /* Check that rangelist2 remains unchanged. */
+ SVN_ERR(svn_rangelist_to_string(&rangelist2_ending, rangelist2, pool));
+ if (strcmp(rangelist2_ending->data, rangelist2_starting->data))
+ {
+ child_err = fail(pool,
+ apr_psprintf(pool,
+ "svn_rangelist_merge case %i "
+ "modified its CHANGES arg from "
+ "%s to %s", i,
+ rangelist2_starting->data,
+ rangelist2_ending->data));
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+ }
+ return err;
+}
+
+static svn_error_t *
+test_rangelist_diff(apr_pool_t *pool)
+{
+ int i;
+ svn_error_t *err, *child_err;
+ svn_rangelist_t *from, *to, *added, *deleted;
+
+ /* Structure containing two ranges to diff and the expected output of the
+ diff both when considering and ignoring range inheritance. */
+ struct rangelist_diff_test_data
+ {
+ /* svn:mergeinfo string representations */
+ const char *from;
+ const char *to;
+
+ /* Expected results for performing svn_rangelist_diff
+ while considering differences in inheritability to be real
+ differences. */
+ int expected_add_ranges;
+ svn_merge_range_t expected_adds[10];
+ int expected_del_ranges;
+ svn_merge_range_t expected_dels[10];
+
+ /* Expected results for performing svn_rangelist_diff
+ while ignoring differences in inheritability. */
+ int expected_add_ranges_ignore_inheritance;
+ svn_merge_range_t expected_adds_ignore_inheritance[10];
+ int expected_del_ranges_ignore_inheritance;
+ svn_merge_range_t expected_dels_ignore_inheritance[10];
+ };
+
+ #define SIZE_OF_RANGE_DIFF_TEST_ARRAY 16
+ /* The actual test data array.
+
+ 'from' --> {"1,5-8", "1,6,10-12", <-- 'to'
+ Number of adds when --> 1, { { 9, 12, TRUE } },
+ considering inheritance
+
+ Number of dels when --> 2, { { 4, 5, TRUE }, { 6, 8, TRUE } },
+ considering inheritance
+
+ Number of adds when --> 1, { { 9, 12, TRUE } },
+ ignoring inheritance
+
+ Number of dels when --> 2, { { 4, 5, TRUE }, { 6, 8, TRUE } } },
+ ignoring inheritance
+ ^ ^
+ The expected svn_merge_range_t's
+ */
+ struct rangelist_diff_test_data test_data[SIZE_OF_RANGE_DIFF_TEST_ARRAY] =
+ {
+ /* Add and Delete */
+ {"1", "3",
+ 1, { { 2, 3, TRUE } },
+ 1, { { 0, 1, TRUE } },
+ 1, { { 2, 3, TRUE } },
+ 1, { { 0, 1, TRUE } } },
+
+ /* Add only */
+ {"1", "1,3",
+ 1, { { 2, 3, TRUE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 3, TRUE } },
+ 0, { { 0, 0, FALSE } } },
+
+ /* Delete only */
+ {"1,3", "1",
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 3, TRUE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 3, TRUE } } },
+
+ /* No diff */
+ {"1,3", "1,3",
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ {"1,3*", "1,3*",
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ /* Adds and Deletes */
+ {"1,5-8", "1,6,10-12",
+ 1, { { 9, 12, TRUE } },
+ 2, { { 4, 5, TRUE }, { 6, 8, TRUE } },
+ 1, { { 9, 12, TRUE } },
+ 2, { { 4, 5, TRUE }, { 6, 8, TRUE } } },
+
+ {"6*", "6",
+ 1, { { 5, 6, TRUE } },
+ 1, { { 5, 6, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ /* Intersecting range with different inheritability */
+ {"6", "6*",
+ 1, { { 5, 6, FALSE } },
+ 1, { { 5, 6, TRUE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ {"6*", "6",
+ 1, { { 5, 6, TRUE } },
+ 1, { { 5, 6, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ {"1,5-8", "1,6*,10-12",
+ 2, { { 5, 6, FALSE }, { 9, 12, TRUE } },
+ 1, { { 4, 8, TRUE } },
+ 1, { { 9, 12, TRUE } },
+ 2, { { 4, 5, TRUE }, { 6, 8, TRUE } } },
+
+ {"1,5-8*", "1,6,10-12",
+ 2, { { 5, 6, TRUE }, { 9, 12, TRUE } },
+ 1, { { 4, 8, FALSE } },
+ 1, { { 9, 12, TRUE } },
+ 2, { { 4, 5, FALSE }, { 6, 8, FALSE } } },
+
+ /* Empty range diffs */
+ {"3-9", "",
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, TRUE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, TRUE } } },
+
+ {"3-9*", "",
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, FALSE } } },
+
+ {"", "3-9",
+ 1, { { 2, 9, TRUE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, TRUE } },
+ 0, { { 0, 0, FALSE } } },
+
+ {"", "3-9*",
+ 1, { { 2, 9, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 1, { { 2, 9, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+
+ /* Empty range no diff */
+ {"", "",
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } },
+ 0, { { 0, 0, FALSE } } },
+ };
+
+ err = child_err = SVN_NO_ERROR;
+ for (i = 0; i < SIZE_OF_RANGE_DIFF_TEST_ARRAY; i++)
+ {
+ SVN_ERR(svn_rangelist__parse(&to, test_data[i].to, pool));
+ SVN_ERR(svn_rangelist__parse(&from, test_data[i].from, pool));
+
+ /* Represent empty mergeinfo with an empty rangelist. */
+ if (to == NULL)
+ to = apr_array_make(pool, 0, sizeof(*to));
+ if (from == NULL)
+ from = apr_array_make(pool, 0, sizeof(*from));
+
+ /* First diff the ranges while considering
+ differences in inheritance. */
+ SVN_ERR(svn_rangelist_diff(&deleted, &added, from, to, TRUE, pool));
+
+ child_err = verify_ranges_match(added,
+ (test_data[i]).expected_adds,
+ (test_data[i]).expected_add_ranges,
+ apr_psprintf(pool,
+ "svn_rangelist_diff"
+ "case %i", i),
+ "diff", pool);
+ if (!child_err)
+ child_err = verify_ranges_match(deleted,
+ (test_data[i]).expected_dels,
+ (test_data[i]).expected_del_ranges,
+ apr_psprintf(pool,
+ "svn_rangelist_diff"
+ "case %i", i),
+ "diff", pool);
+ if (!child_err)
+ {
+ /* Now do the diff while ignoring differences in inheritance. */
+ SVN_ERR(svn_rangelist_diff(&deleted, &added, from, to, FALSE,
+ pool));
+ child_err = verify_ranges_match(
+ added,
+ (test_data[i]).expected_adds_ignore_inheritance,
+ (test_data[i]).expected_add_ranges_ignore_inheritance,
+ apr_psprintf(pool, "svn_rangelist_diff case %i", i),
+ "diff", pool);
+
+ if (!child_err)
+ child_err = verify_ranges_match(
+ deleted,
+ (test_data[i]).expected_dels_ignore_inheritance,
+ (test_data[i]).expected_del_ranges_ignore_inheritance,
+ apr_psprintf(pool, "svn_rangelist_diff case %i", i),
+ "diff", pool);
+ }
+
+ /* Collect all the errors rather than returning on the first. */
+ if (child_err)
+ {
+ if (err)
+ svn_error_compose(err, child_err);
+ else
+ err = child_err;
+ }
+ }
+ return err;
+}
+
+
+/* Test data structure for test_remove_prefix_from_catalog(). */
+struct catalog_bits
+{
+ const char *orig_path;
+ const char *new_path;
+ const char *mergeinfo;
+};
+
+
+/* Helper for test_remove_prefix_from_catalog(). */
+static svn_error_t *
+remove_prefix_helper(struct catalog_bits *test_data,
+ const char *prefix_path,
+ apr_pool_t *pool)
+{
+ svn_mergeinfo_catalog_t in_catalog, out_catalog, exp_out_catalog;
+ apr_hash_index_t *hi;
+ int i = 0;
+
+ in_catalog = apr_hash_make(pool);
+ exp_out_catalog = apr_hash_make(pool);
+ while (test_data[i].orig_path)
+ {
+ struct catalog_bits data = test_data[i];
+ const char *orig_path = apr_pstrdup(pool, data.orig_path);
+ const char *new_path = apr_pstrdup(pool, data.new_path);
+ svn_mergeinfo_t mergeinfo;
+ SVN_ERR(svn_mergeinfo_parse(&mergeinfo, data.mergeinfo, pool));
+ apr_hash_set(in_catalog, orig_path, APR_HASH_KEY_STRING, mergeinfo);
+ apr_hash_set(exp_out_catalog, new_path, APR_HASH_KEY_STRING, mergeinfo);
+ i++;
+ }
+ SVN_ERR(svn_mergeinfo__remove_prefix_from_catalog(&out_catalog, in_catalog,
+ prefix_path, pool));
+ if (apr_hash_count(exp_out_catalog) != apr_hash_count(out_catalog))
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0,
+ "Got unexpected number of catalog entries");
+ for (hi = apr_hash_first(pool, out_catalog); hi; hi = apr_hash_next(hi))
+ {
+ const void *path;
+ apr_ssize_t path_len;
+ void *out_mergeinfo, *exp_out_mergeinfo;
+ apr_hash_this(hi, &path, &path_len, &out_mergeinfo);
+ exp_out_mergeinfo = apr_hash_get(exp_out_catalog, path, path_len);
+ if (! exp_out_mergeinfo)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, 0,
+ "Found unexpected key '%s' in catalog",
+ (const char *)path);
+ if (exp_out_mergeinfo != out_mergeinfo)
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0,
+ "Detected value tampering in catalog");
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_remove_prefix_from_catalog(apr_pool_t *pool)
+{
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* For testing the remove of the prefix "/trunk" */
+ struct catalog_bits test_data_1[] =
+ {
+ { "/trunk", "", "/A:1" },
+ { "/trunk/foo", "foo", "/A/foo:1,3*" },
+ { "/trunk/foo/bar", "foo/bar", "/A/foo:1-4" },
+ { "/trunk/baz", "baz", "/A/baz:2" },
+ { NULL, NULL, NULL }
+ };
+
+ /* For testing the remove of the prefix "/" */
+ struct catalog_bits test_data_2[] =
+ {
+ { "/", "", "/:2" },
+ { "/trunk", "trunk", "/A:1" },
+ { "/trunk/foo", "trunk/foo", "/A/foo:1,3*" },
+ { "/trunk/foo/bar", "trunk/foo/bar", "/A/foo:1-4" },
+ { "/trunk/baz", "trunk/baz", "/A/baz:2" },
+ { NULL, NULL, NULL }
+ };
+
+ svn_pool_clear(subpool);
+ SVN_ERR(remove_prefix_helper(test_data_1, "/trunk", subpool));
+
+ svn_pool_clear(subpool);
+ SVN_ERR(remove_prefix_helper(test_data_2, "/", subpool));
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rangelist_merge_overlap(apr_pool_t *pool)
+{
+ const char *rangelist_str = "19473-19612*,19615-19630*,19631-19634";
+ const char *changes_str = "15014-20515*";
+ const char *expected_str = "15014-19630*,19631-19634,19635-20515*";
+ /* wrong result: "15014-19630*,19634-19631*,19631-19634,19635-20515*" */
+ svn_rangelist_t *rangelist, *changes;
+ svn_string_t *result_string;
+
+ /* prepare the inputs */
+ SVN_ERR(svn_rangelist__parse(&rangelist, rangelist_str, pool));
+ SVN_ERR(svn_rangelist__parse(&changes, changes_str, pool));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(rangelist));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(changes));
+
+ /* perform the merge */
+ SVN_ERR(svn_rangelist_merge2(rangelist, changes, pool, pool));
+
+ /* check the output */
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(rangelist));
+ SVN_ERR(svn_rangelist_to_string(&result_string, rangelist, pool));
+ SVN_TEST_STRING_ASSERT(result_string->data, expected_str);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_rangelist_loop(apr_pool_t *pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ int x, y;
+
+ for (x = 0; x < 62; x++)
+ for (y = x + 1; y < 63; y++)
+ {
+ svn_rangelist_t *base_list;
+ svn_rangelist_t *change_list;
+ svn_merge_range_t *mrange;
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_rangelist__parse(&base_list,
+ "2,4,7-9,12-15,18-20,"
+ "22*,25*,28-30*,33-35*,"
+ "38-40,43-45*,48-50,52-54,56-59*",
+ iterpool));
+
+ change_list = apr_array_make(iterpool, 1, sizeof(mrange));
+
+ mrange = apr_pcalloc(pool, sizeof(*mrange));
+ mrange->start = x;
+ mrange->end = y;
+ APR_ARRAY_PUSH(change_list, svn_merge_range_t *) = mrange;
+
+ {
+ svn_rangelist_t *bl = svn_rangelist_dup(base_list, iterpool);
+ svn_rangelist_t *cl = svn_rangelist_dup(change_list, iterpool);
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ SVN_ERR(svn_rangelist_merge2(bl, cl, iterpool, iterpool));
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ /* TODO: Verify result */
+ }
+
+ {
+ svn_rangelist_t *bl = svn_rangelist_dup(base_list, iterpool);
+ svn_rangelist_t *cl = svn_rangelist_dup(change_list, iterpool);
+
+ SVN_ERR(svn_rangelist_merge2(cl, bl, iterpool, iterpool));
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ /* TODO: Verify result */
+ }
+
+ mrange->inheritable = TRUE;
+
+ {
+ svn_rangelist_t *bl = svn_rangelist_dup(base_list, iterpool);
+ svn_rangelist_t *cl = svn_rangelist_dup(change_list, iterpool);
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ SVN_ERR(svn_rangelist_merge2(bl, cl, iterpool, iterpool));
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ /* TODO: Verify result */
+ }
+
+ {
+ svn_rangelist_t *bl = svn_rangelist_dup(base_list, iterpool);
+ svn_rangelist_t *cl = svn_rangelist_dup(change_list, iterpool);
+
+ SVN_ERR(svn_rangelist_merge2(cl, bl, iterpool, iterpool));
+
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(bl));
+ SVN_TEST_ASSERT(svn_rangelist__is_canonical(cl));
+
+ /* TODO: Verify result */
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_parse_single_line_mergeinfo,
+ "parse single line mergeinfo"),
+ SVN_TEST_PASS2(test_mergeinfo_dup,
+ "copy a mergeinfo data structure"),
+ SVN_TEST_PASS2(test_parse_combine_rangeinfo,
+ "parse single line mergeinfo and combine ranges"),
+ SVN_TEST_PASS2(test_parse_broken_mergeinfo,
+ "parse broken single line mergeinfo"),
+ SVN_TEST_PASS2(test_remove_rangelist,
+ "remove rangelists"),
+ SVN_TEST_PASS2(test_rangelist_remove_randomly,
+ "test rangelist remove with random data"),
+ SVN_TEST_PASS2(test_remove_mergeinfo,
+ "remove of mergeinfo"),
+ SVN_TEST_PASS2(test_rangelist_reverse,
+ "reversal of rangelist"),
+ SVN_TEST_PASS2(test_rangelist_intersect,
+ "intersection of rangelists"),
+ SVN_TEST_PASS2(test_rangelist_intersect_randomly,
+ "test rangelist intersect with random data"),
+ SVN_TEST_PASS2(test_diff_mergeinfo,
+ "diff of mergeinfo"),
+ SVN_TEST_PASS2(test_merge_mergeinfo,
+ "merging of mergeinfo hashes"),
+ SVN_TEST_PASS2(test_mergeinfo_intersect,
+ "intersection of mergeinfo"),
+ SVN_TEST_PASS2(test_rangelist_to_string,
+ "turning rangelist back into a string"),
+ SVN_TEST_PASS2(test_mergeinfo_to_string,
+ "turning mergeinfo back into a string"),
+ SVN_TEST_PASS2(test_rangelist_merge,
+ "merge of rangelists"),
+ SVN_TEST_PASS2(test_rangelist_diff,
+ "diff of rangelists"),
+ SVN_TEST_PASS2(test_remove_prefix_from_catalog,
+ "removal of prefix paths from catalog keys"),
+ SVN_TEST_PASS2(test_rangelist_merge_overlap,
+ "merge of rangelists with overlaps (issue 4686)"),
+ SVN_TEST_PASS2(test_rangelist_loop,
+ "test rangelist edgecases via loop"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/opt-test.c b/subversion/tests/libsvn_subr/opt-test.c
new file mode 100644
index 0000000..c336d81
--- /dev/null
+++ b/subversion/tests/libsvn_subr/opt-test.c
@@ -0,0 +1,208 @@
+/*
+ * opt-test.c -- test the option functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <string.h>
+#include <apr_general.h>
+
+#include "../svn_test.h"
+
+#include "svn_opt.h"
+
+
+static svn_error_t *
+test_parse_peg_rev(apr_pool_t *pool)
+{
+ apr_size_t i;
+ static struct {
+ const char *input;
+ const char *path; /* NULL means an error is expected. */
+ svn_opt_revision_t peg;
+ } const tests[] = {
+ { "foo/bar", "foo/bar", {svn_opt_revision_unspecified} },
+ { "foo/bar@13", "foo/bar", {svn_opt_revision_number, {13}} },
+ { "foo/bar@HEAD", "foo/bar", {svn_opt_revision_head} },
+ { "foo/bar@{1999-12-31}", "foo/bar", {svn_opt_revision_date, {0}} },
+ { "http://a/b@27", "http://a/b", {svn_opt_revision_number, {27}} },
+ { "http://a/b@COMMITTED", "http://a/b", {svn_opt_revision_committed} },
+ { "http://a/b@{1999-12-31}", "http://a/b",{svn_opt_revision_date, {0}} },
+ { "http://a/b@%7B1999-12-31%7D","http://a/b",{svn_opt_revision_date, {0}} },
+ { "foo/bar@1:2", NULL, {-1} },
+ { "foo/bar@baz", NULL, {-1} },
+ { "foo/bar@", "foo/bar", {svn_opt_revision_unspecified} },
+ { "foo/@bar@", "foo/@bar", {svn_opt_revision_unspecified} },
+ { "foo/bar/@13", "foo/bar/", {svn_opt_revision_number, {13}} },
+ { "foo/bar@@13", "foo/bar@", {svn_opt_revision_number, {13}} },
+ { "foo/@bar@HEAD", "foo/@bar", {svn_opt_revision_head} },
+ { "foo@/bar", "foo@/bar", {svn_opt_revision_unspecified} },
+ { "foo@HEAD/bar", "foo@HEAD/bar", {svn_opt_revision_unspecified} },
+ { "@foo/bar", "@foo/bar", {svn_opt_revision_unspecified} },
+ { "@foo/bar@", "@foo/bar", {svn_opt_revision_unspecified} },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *path;
+ svn_opt_revision_t peg;
+ svn_error_t *err;
+
+ err = svn_opt_parse_path(&peg, &path, tests[i].input, pool);
+ if (err)
+ {
+ svn_error_clear(err);
+ if (tests[i].path)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_opt_parse_path ('%s') returned an error instead of '%s'",
+ tests[i].input, tests[i].path);
+ }
+ }
+ else
+ {
+ if ((path == NULL)
+ || (tests[i].path == NULL)
+ || (strcmp(path, tests[i].path) != 0)
+ || (peg.kind != tests[i].peg.kind)
+ || (peg.kind == svn_opt_revision_number && peg.value.number != tests[i].peg.value.number))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_opt_parse_path ('%s') returned '%s' instead of '%s'", tests[i].input,
+ path ? path : "NULL", tests[i].path ? tests[i].path : "NULL");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_svn_opt_args_to_target_array2(apr_pool_t *pool)
+{
+ apr_size_t i;
+ static struct {
+ const char *input;
+ const char *output; /* NULL means an error is expected. */
+ } const tests[] = {
+ { ".", "" },
+ { ".@BASE", "@BASE" },
+ { "foo///bar", "foo/bar" },
+ { "foo///bar@13", "foo/bar@13" },
+ { "foo///bar@HEAD", "foo/bar@HEAD" },
+ { "foo///bar@{1999-12-31}", "foo/bar@{1999-12-31}" },
+ { "http://a//b////", "http://a/b" },
+ { "http://a///b@27", "http://a/b@27" },
+ { "http://a/b//@COMMITTED", "http://a/b@COMMITTED" },
+ { "foo///bar@1:2", "foo/bar@1:2" },
+ { "foo///bar@baz", "foo/bar@baz" },
+ { "foo///bar@", "foo/bar@" },
+ { "foo///bar///@13", "foo/bar@13" },
+ { "foo///bar@@13", "foo/bar@@13" },
+ { "foo///@bar@HEAD", "foo/@bar@HEAD" },
+ { "foo@///bar", "foo@/bar" },
+ { "foo@HEAD///bar", "foo@HEAD/bar" },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *input = tests[i].input;
+ const char *expected_output = tests[i].output;
+ apr_array_header_t *targets;
+ apr_getopt_t *os;
+ const int argc = 2;
+ const char *argv[3] = { 0 };
+ apr_status_t apr_err;
+ svn_error_t *err;
+
+ argv[0] = "opt-test";
+ argv[1] = input;
+ argv[2] = NULL;
+
+ apr_err = apr_getopt_init(&os, pool, argc, argv);
+ if (apr_err)
+ return svn_error_wrap_apr(apr_err,
+ "Error initializing command line arguments");
+
+ err = svn_opt_args_to_target_array2(&targets, os, NULL, pool);
+
+ if (expected_output)
+ {
+ const char *actual_output;
+
+ if (err)
+ return err;
+ if (argc - 1 != targets->nelts)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Passed %d target(s) to "
+ "svn_opt_args_to_target_array2() but "
+ "got %d back.",
+ argc - 1,
+ targets->nelts);
+
+ actual_output = APR_ARRAY_IDX(targets, 0, const char *);
+
+ if (! svn_path_is_canonical(actual_output, pool))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Input '%s' to "
+ "svn_opt_args_to_target_array2() should "
+ "have returned a canonical path but "
+ "'%s' is not.",
+ input,
+ actual_output);
+
+ if (strcmp(expected_output, actual_output) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Input '%s' to "
+ "svn_opt_args_to_target_array2() should "
+ "have returned '%s' but returned '%s'.",
+ input,
+ expected_output,
+ actual_output);
+ }
+ else
+ {
+ if (! err)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Unexpected success in passing '%s' "
+ "to svn_opt_args_to_target_array2().",
+ input);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_parse_peg_rev,
+ "test svn_opt_parse_path"),
+ SVN_TEST_PASS2(test_svn_opt_args_to_target_array2,
+ "test svn_opt_args_to_target_array2"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/packed-data-test.c b/subversion/tests/libsvn_subr/packed-data-test.c
new file mode 100644
index 0000000..b0e3097
--- /dev/null
+++ b/subversion/tests/libsvn_subr/packed-data-test.c
@@ -0,0 +1,578 @@
+/*
+ * packed-data-test.c: a collection of svn_packed__* tests
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_error.h"
+#include "svn_string.h" /* This includes <apr_*.h> */
+#include "private/svn_packed_data.h"
+
+/* Take the WRITE_ROOT, serialize its contents, parse it again into a new
+ * data root and return it in *READ_ROOT. Allocate it in POOL.
+ */
+static svn_error_t*
+get_read_root(svn_packed__data_root_t **read_root,
+ svn_packed__data_root_t *write_root,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *stream_buffer = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream;
+
+ stream = svn_stream_from_stringbuf(stream_buffer, pool);
+ SVN_ERR(svn_packed__data_write(stream, write_root, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ stream = svn_stream_from_stringbuf(stream_buffer, pool);
+ SVN_ERR(svn_packed__data_read(read_root, stream, pool, pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_empty_container(apr_pool_t *pool)
+{
+ /* create an empty, readable container */
+ svn_packed__data_root_t *root = svn_packed__data_create_root(pool);
+ SVN_ERR(get_read_root(&root, root, pool));
+
+ /* there should be no sub-streams */
+ SVN_TEST_ASSERT(svn_packed__first_int_stream(root) == NULL);
+ SVN_TEST_ASSERT(svn_packed__first_byte_stream(root) == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that COUNT numbers from VALUES can be written as uints to a
+ * packed data stream and can be read from that stream again. Deltify
+ * data in the stream if DIFF is set. Use POOL for allocations.
+ */
+static svn_error_t *
+verify_uint_stream(const apr_uint64_t *values,
+ apr_size_t count,
+ svn_boolean_t diff,
+ apr_pool_t *pool)
+{
+ svn_packed__data_root_t *root = svn_packed__data_create_root(pool);
+ svn_packed__int_stream_t *stream
+ = svn_packed__create_int_stream(root, diff, FALSE);
+
+ apr_size_t i;
+ for (i = 0; i < count; ++i)
+ svn_packed__add_uint(stream, values[i]);
+
+ SVN_ERR(get_read_root(&root, root, pool));
+
+ /* the container should contain exactly one int stream */
+ stream = svn_packed__first_int_stream(root);
+ SVN_TEST_ASSERT(stream);
+ SVN_TEST_ASSERT(!svn_packed__next_int_stream(stream));
+ SVN_TEST_ASSERT(!svn_packed__first_byte_stream(root));
+
+ /* the stream shall contain exactly the items we put into it */
+ SVN_TEST_ASSERT(svn_packed__int_count(stream) == count);
+ for (i = 0; i < count; ++i)
+ SVN_TEST_ASSERT(svn_packed__get_uint(stream) == values[i]);
+
+ /* reading beyond eos should return 0 values */
+ SVN_TEST_ASSERT(svn_packed__get_uint(stream) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uint_stream(apr_pool_t *pool)
+{
+ enum { COUNT = 8 };
+ const apr_uint64_t values[COUNT] =
+ {
+ APR_UINT64_MAX,
+ 0,
+ APR_UINT64_MAX,
+ APR_UINT64_C(0x8000000000000000),
+ 0,
+ APR_UINT64_C(0x7fffffffffffffff),
+ APR_UINT64_C(0x1234567890abcdef),
+ APR_UINT64_C(0x0fedcba987654321),
+ };
+
+ SVN_ERR(verify_uint_stream(values, COUNT, FALSE, pool));
+ SVN_ERR(verify_uint_stream(values, COUNT, TRUE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that COUNT numbers from VALUES can be written as signed ints to a
+ * packed data stream and can be read from that stream again. Deltify
+ * data in the stream if DIFF is set. Use POOL for allocations.
+ */
+static svn_error_t *
+verify_int_stream(const apr_int64_t *values,
+ apr_size_t count,
+ svn_boolean_t diff,
+ apr_pool_t *pool)
+{
+ svn_packed__data_root_t *root = svn_packed__data_create_root(pool);
+ svn_packed__int_stream_t *stream
+ = svn_packed__create_int_stream(root, diff, TRUE);
+
+ apr_size_t i;
+ for (i = 0; i < count; ++i)
+ svn_packed__add_int(stream, values[i]);
+
+ SVN_ERR(get_read_root(&root, root, pool));
+
+ /* the container should contain exactly one int stream */
+ stream = svn_packed__first_int_stream(root);
+ SVN_TEST_ASSERT(stream);
+ SVN_TEST_ASSERT(!svn_packed__next_int_stream(stream));
+ SVN_TEST_ASSERT(!svn_packed__first_byte_stream(root));
+
+ /* the stream shall contain exactly the items we put into it */
+ SVN_TEST_ASSERT(svn_packed__int_count(stream) == count);
+ for (i = 0; i < count; ++i)
+ SVN_TEST_ASSERT(svn_packed__get_int(stream) == values[i]);
+
+ /* reading beyond eos should return 0 values */
+ SVN_TEST_ASSERT(svn_packed__get_int(stream) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_int_stream(apr_pool_t *pool)
+{
+ enum { COUNT = 7 };
+ const apr_int64_t values[COUNT] =
+ {
+ APR_INT64_MAX, /* extreme value */
+ APR_INT64_MIN, /* other extreme, creating maximum delta to predecessor */
+ 0, /* delta to predecessor > APR_INT64_MAX */
+ APR_INT64_MAX, /* max value, again */
+ -APR_INT64_MAX, /* _almost_ min value, almost max delta */
+ APR_INT64_C(0x1234567890abcdef), /* some arbitrary value */
+ -APR_INT64_C(0x0fedcba987654321), /* arbitrary value, different sign */
+ };
+
+ SVN_ERR(verify_int_stream(values, COUNT, FALSE, pool));
+ SVN_ERR(verify_int_stream(values, COUNT, TRUE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_byte_stream(apr_pool_t *pool)
+{
+ enum { COUNT = 6 };
+ const svn_string_t values[COUNT] =
+ {
+ { "", 0 },
+ { "\0", 1 },
+ { "\0", 1 },
+ { "some text", 9 },
+ { "", 0 },
+ { "some more", 9 }
+ };
+
+ svn_packed__data_root_t *root = svn_packed__data_create_root(pool);
+ svn_packed__byte_stream_t *stream
+ = svn_packed__create_bytes_stream(root);
+
+ apr_size_t i;
+ for (i = 0; i < COUNT; ++i)
+ svn_packed__add_bytes(stream, values[i].data, values[i].len);
+
+ SVN_ERR(get_read_root(&root, root, pool));
+
+ /* the container should contain exactly one byte stream */
+ stream = svn_packed__first_byte_stream(root);
+ SVN_TEST_ASSERT(stream);
+ SVN_TEST_ASSERT(!svn_packed__next_byte_stream(stream));
+
+ /* the stream shall contain exactly the items we put into it */
+ SVN_TEST_ASSERT(svn_packed__byte_count(stream) == 20);
+ SVN_TEST_ASSERT(svn_packed__byte_block_count(stream) == COUNT);
+ for (i = 0; i < COUNT; ++i)
+ {
+ svn_string_t string;
+ string.data = svn_packed__get_bytes(stream, &string.len);
+
+ SVN_TEST_ASSERT(string.len == values[i].len);
+ SVN_TEST_ASSERT(!memcmp(string.data, values[i].data, string.len));
+ }
+
+ /* reading beyond eos should return 0 values */
+ SVN_TEST_ASSERT(svn_packed__byte_count(stream) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+/* Some simple structure that we use as sub-structure to BASE_RECORD_T.
+ * Have it contain numbers and strings.
+ */
+typedef struct sub_record_t
+{
+ int sub_counter;
+ svn_string_t text;
+} sub_record_t;
+
+/* signed / unsigned, 64 bits and shorter, diff-able and not, multiple
+ * strings, multiple sub-records. */
+typedef struct base_record_t
+{
+ int counter;
+ svn_string_t description;
+ apr_uint64_t large_unsigned1;
+ apr_uint64_t large_unsigned2;
+ const sub_record_t *left_subs;
+ apr_int64_t large_signed1;
+ apr_int64_t large_signed2;
+ unsigned prime;
+ const sub_record_t *right_subs;
+ svn_string_t binary;
+} base_record_t;
+
+/* our test data */
+enum {SUB_RECORD_COUNT = 7};
+enum {BASE_RECORD_COUNT = 4};
+
+static const sub_record_t sub_records[SUB_RECORD_COUNT] =
+{
+ { 6, { "this is quite a longish piece of text", 37} },
+ { 5, { "x", 1} },
+ { 4, { "not empty", 9} },
+ { 3, { "another bit of text", 19} },
+ { 2, { "", 0} },
+ { 1, { "first sub-record", 16} },
+ { 0 }
+};
+
+static const base_record_t test_data[BASE_RECORD_COUNT] =
+{
+ { 1, { "maximum", 7},
+ APR_UINT64_MAX, APR_UINT64_MAX, sub_records,
+ APR_INT64_MAX, APR_INT64_MAX, 9967, sub_records + 1,
+ { "\0\1\2\3\4\5\6\7\x8\x9\xa", 11} },
+
+ { 2, { "minimum", 7},
+ 0, 0, sub_records + 6,
+ APR_INT64_MIN, APR_INT64_MIN, 6029, sub_records + 5,
+ { "X\0\0Y", 4} },
+
+ { 3, { "mean", 4},
+ APR_UINT64_C(0x8000000000000000), APR_UINT64_C(0x8000000000000000),
+ sub_records + 2,
+ 0, 0, 653, sub_records + 3,
+ { "\xff\0\1\2\3\4\5\6\7\x8\x9\xa", 12} },
+
+ { 4, { "random", 6},
+ APR_UINT64_C(0x1234567890abcdef), APR_UINT64_C(0xfedcba987654321),
+ sub_records + 4,
+ APR_INT64_C(0x1234567890abcd), APR_INT64_C(-0xedcba987654321), 7309,
+ sub_records + 1,
+ { "\x80\x7f\0\1\6", 5} }
+};
+
+/* Serialize RECORDS into INT_STREAM and TEXT_STREAM. Stop when the
+ * current record's SUB_COUNTER is 0.
+ */
+static unsigned
+pack_subs(svn_packed__int_stream_t *int_stream,
+ svn_packed__byte_stream_t *text_stream,
+ const sub_record_t *records)
+{
+ unsigned count;
+ for (count = 0; records[count].sub_counter; ++count)
+ {
+ svn_packed__add_int(int_stream, records[count].sub_counter);
+ svn_packed__add_bytes(text_stream,
+ records[count].text.data,
+ records[count].text.len);
+ }
+
+ return count;
+}
+
+/* Serialize COUNT records starting from DATA into a packed data container
+ * allocated in POOL and return the container root.
+ */
+static svn_packed__data_root_t *
+pack(const base_record_t *data,
+ apr_size_t count,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+ svn_packed__data_root_t *root = svn_packed__data_create_root(pool);
+ svn_packed__int_stream_t *base_stream
+ = svn_packed__create_int_stream(root, FALSE, FALSE);
+ svn_packed__int_stream_t *sub_count_stream
+ = svn_packed__create_int_stream(root, TRUE, FALSE);
+
+ svn_packed__int_stream_t *left_sub_stream
+ = svn_packed__create_int_stream(root, FALSE, TRUE);
+ svn_packed__int_stream_t *right_sub_stream
+ = svn_packed__create_int_stream(root, FALSE, TRUE);
+
+ svn_packed__byte_stream_t *base_description_stream
+ = svn_packed__create_bytes_stream(root);
+ svn_packed__byte_stream_t *base_binary_stream
+ = svn_packed__create_bytes_stream(root);
+ svn_packed__byte_stream_t *sub_text_stream
+ = svn_packed__create_bytes_stream(root);
+
+ svn_packed__create_int_substream(base_stream, TRUE, TRUE); /* counter */
+ svn_packed__create_int_substream(base_stream, TRUE, FALSE); /* large_unsigned1 */
+ svn_packed__create_int_substream(base_stream, FALSE, FALSE); /* large_unsigned2 */
+ svn_packed__create_int_substream(base_stream, TRUE, TRUE); /* large_signed1 */
+ svn_packed__create_int_substream(base_stream, FALSE, TRUE); /* large_signed2 */
+ svn_packed__create_int_substream(base_stream, TRUE, FALSE); /* prime */
+
+ for (i = 0; i < count; ++i)
+ {
+ svn_packed__add_int(base_stream, data[i].counter);
+ svn_packed__add_bytes(base_description_stream,
+ data[i].description.data,
+ data[i].description.len);
+ svn_packed__add_uint(base_stream, data[i].large_unsigned1);
+ svn_packed__add_uint(base_stream, data[i].large_unsigned2);
+ svn_packed__add_uint(sub_count_stream,
+ pack_subs(left_sub_stream, sub_text_stream,
+ data[i].left_subs));
+
+ svn_packed__add_int(base_stream, data[i].large_signed1);
+ svn_packed__add_int(base_stream, data[i].large_signed2);
+ svn_packed__add_uint(base_stream, data[i].prime);
+ svn_packed__add_uint(sub_count_stream,
+ pack_subs(right_sub_stream, sub_text_stream,
+ data[i].right_subs));
+
+ svn_packed__add_bytes(base_binary_stream,
+ data[i].binary.data,
+ data[i].binary.len);
+ }
+
+ return root;
+}
+
+/* Deserialize COUNT records from INT_STREAM and TEXT_STREAM and return
+ * the result allocated in POOL.
+ */
+static sub_record_t *
+unpack_subs(svn_packed__int_stream_t *int_stream,
+ svn_packed__byte_stream_t *text_stream,
+ apr_size_t count,
+ apr_pool_t *pool)
+{
+ sub_record_t *records = apr_pcalloc(pool, (count + 1) * sizeof(*records));
+
+ apr_size_t i;
+ for (i = 0; i < count; ++i)
+ {
+ records[i].sub_counter = (int) svn_packed__get_int(int_stream);
+ records[i].text.data = svn_packed__get_bytes(text_stream,
+ &records[i].text.len);
+ }
+
+ return records;
+}
+
+/* Deserialize all records from the packed data container ROOT, allocate
+ * them in POOL and return them. Set *COUNT to the number of records read.
+ */
+static base_record_t *
+unpack(apr_size_t *count,
+ svn_packed__data_root_t *root,
+ apr_pool_t *pool)
+{
+ svn_packed__int_stream_t *base_stream
+ = svn_packed__first_int_stream(root);
+ svn_packed__int_stream_t *sub_count_stream
+ = svn_packed__next_int_stream(base_stream);
+ svn_packed__byte_stream_t *base_description_stream
+ = svn_packed__first_byte_stream(root);
+ svn_packed__byte_stream_t *base_binary_stream
+ = svn_packed__next_byte_stream(base_description_stream);
+ svn_packed__byte_stream_t *sub_text_stream
+ = svn_packed__next_byte_stream(base_binary_stream);
+
+ svn_packed__int_stream_t *left_sub_stream
+ = svn_packed__next_int_stream(sub_count_stream);
+ svn_packed__int_stream_t *right_sub_stream
+ = svn_packed__next_int_stream(left_sub_stream);
+
+ apr_size_t i;
+ base_record_t *data;
+ *count = svn_packed__int_count(sub_count_stream) / 2;
+ data = apr_pcalloc(pool, *count * sizeof(*data));
+
+ for (i = 0; i < *count; ++i)
+ {
+ data[i].counter = (int) svn_packed__get_int(base_stream);
+ data[i].description.data
+ = svn_packed__get_bytes(base_description_stream,
+ &data[i].description.len);
+ data[i].large_unsigned1 = svn_packed__get_uint(base_stream);
+ data[i].large_unsigned2 = svn_packed__get_uint(base_stream);
+ data[i].left_subs = unpack_subs(left_sub_stream, sub_text_stream,
+ (apr_size_t)svn_packed__get_uint(sub_count_stream),
+ pool);
+
+ data[i].large_signed1 = svn_packed__get_int(base_stream);
+ data[i].large_signed2 = svn_packed__get_int(base_stream);
+ data[i].prime = (unsigned) svn_packed__get_uint(base_stream);
+ data[i].right_subs = unpack_subs(right_sub_stream, sub_text_stream,
+ (apr_size_t)svn_packed__get_uint(sub_count_stream),
+ pool);
+
+ data[i].binary.data
+ = svn_packed__get_bytes(base_binary_stream,
+ &data[i].binary.len);
+ }
+
+ return data;
+}
+
+/* Assert that LHS and RHS contain the same binary data (i.e. don't test
+ * for a terminating NUL).
+ */
+static svn_error_t *
+compare_binary(const svn_string_t *lhs,
+ const svn_string_t *rhs)
+{
+ SVN_TEST_ASSERT(lhs->len == rhs->len);
+ SVN_TEST_ASSERT(!memcmp(lhs->data, rhs->data, rhs->len));
+
+ return SVN_NO_ERROR;
+}
+
+/* Assert that LHS and RHS contain the same number of records with the
+ * same contents.
+ */
+static svn_error_t *
+compare_subs(const sub_record_t *lhs,
+ const sub_record_t *rhs)
+{
+ for (; lhs->sub_counter; ++lhs, ++rhs)
+ {
+ SVN_TEST_ASSERT(lhs->sub_counter == rhs->sub_counter);
+ SVN_ERR(compare_binary(&lhs->text, &rhs->text));
+ }
+
+ SVN_TEST_ASSERT(lhs->sub_counter == rhs->sub_counter);
+ return SVN_NO_ERROR;
+}
+
+/* Assert that the first COUNT records in LHS and RHS have the same contents.
+ */
+static svn_error_t *
+compare(const base_record_t *lhs,
+ const base_record_t *rhs,
+ apr_size_t count)
+{
+ apr_size_t i;
+ for (i = 0; i < count; ++i)
+ {
+ SVN_TEST_ASSERT(lhs[i].counter == rhs[i].counter);
+ SVN_ERR(compare_binary(&lhs[i].description, &rhs[i].description));
+ SVN_TEST_ASSERT(lhs[i].large_unsigned1 == rhs[i].large_unsigned1);
+ SVN_TEST_ASSERT(lhs[i].large_unsigned2 == rhs[i].large_unsigned2);
+ SVN_ERR(compare_subs(lhs[i].left_subs, rhs[i].left_subs));
+ SVN_TEST_ASSERT(lhs[i].counter == rhs[i].counter);
+ SVN_TEST_ASSERT(lhs[i].large_signed1 == rhs[i].large_signed1);
+ SVN_TEST_ASSERT(lhs[i].large_signed2 == rhs[i].large_signed2);
+ SVN_TEST_ASSERT(lhs[i].prime == rhs[i].prime);
+ SVN_ERR(compare_subs(lhs[i].right_subs, rhs[i].right_subs));
+ SVN_ERR(compare_binary(&lhs[i].binary, &rhs[i].binary));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_empty_structure(apr_pool_t *pool)
+{
+ base_record_t *unpacked;
+ apr_size_t count;
+
+ /* create an empty, readable container */
+ svn_packed__data_root_t *root = pack(test_data, 0, pool);
+
+ SVN_ERR(get_read_root(&root, root, pool));
+ unpacked = unpack(&count, root, pool);
+ SVN_TEST_ASSERT(count == 0);
+ SVN_ERR(compare(unpacked, test_data, count));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_full_structure(apr_pool_t *pool)
+{
+ base_record_t *unpacked;
+ apr_size_t count;
+
+ /* create an empty, readable container */
+ svn_packed__data_root_t *root = pack(test_data, BASE_RECORD_COUNT, pool);
+
+ SVN_ERR(get_read_root(&root, root, pool));
+ unpacked = unpack(&count, root, pool);
+ SVN_TEST_ASSERT(count == BASE_RECORD_COUNT);
+ SVN_ERR(compare(unpacked, test_data, count));
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_empty_container,
+ "test empty container"),
+ SVN_TEST_PASS2(test_uint_stream,
+ "test a single uint stream"),
+ SVN_TEST_PASS2(test_int_stream,
+ "test a single int stream"),
+ SVN_TEST_PASS2(test_byte_stream,
+ "test a single bytes stream"),
+ SVN_TEST_PASS2(test_empty_structure,
+ "test empty, nested structure"),
+ SVN_TEST_PASS2(test_full_structure,
+ "test nested structure"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/path-test.c b/subversion/tests/libsvn_subr/path-test.c
new file mode 100644
index 0000000..6f0a996
--- /dev/null
+++ b/subversion/tests/libsvn_subr/path-test.c
@@ -0,0 +1,1766 @@
+/*
+ * path-test.c -- test the path functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifdef _MSC_VER
+#include <direct.h>
+#define getcwd _getcwd
+#else
+#include <unistd.h> /* for getcwd() */
+#endif
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+
+#include "svn_pools.h"
+
+#include "../svn_test.h"
+
+/* Make sure SVN_DEPRECATED is defined as empty before including svn_path.h.
+ We don't want to trigger deprecation warnings by the tests of those
+ functions. */
+#ifdef SVN_DEPRECATED
+#undef SVN_DEPRECATED
+#endif
+#define SVN_DEPRECATED
+
+#include "svn_path.h"
+
+
+/* Using a symbol, because I tried experimenting with different
+ representations */
+#define SVN_EMPTY_PATH ""
+
+/* This check must match the check on top of dirent_uri.c and
+ dirent_uri-tests.c */
+#if defined(WIN32) || defined(__CYGWIN__) || defined(__OS2__)
+#define SVN_USE_DOS_PATHS
+#endif
+
+static svn_error_t *
+test_path_is_child(apr_pool_t *pool)
+{
+ int i, j;
+
+/* The path checking code is platform specific, so we shouldn't run
+ the Windows path handling testcases on non-Windows platforms.
+ */
+#define NUM_TEST_PATHS 11
+
+ static const char * const paths[NUM_TEST_PATHS] = {
+ "/foo/bar",
+ "/foo/bars",
+ "/foo/baz",
+ "/foo/bar/baz",
+ "/flu/blar/blaz",
+ "/foo/bar/baz/bing/boom",
+ SVN_EMPTY_PATH,
+ "foo",
+ ".foo",
+ "/",
+ "foo2",
+ };
+
+ static const char * const remainders[NUM_TEST_PATHS][NUM_TEST_PATHS] = {
+ { 0, 0, 0, "baz", 0, "baz/bing/boom", 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, "bing/boom", 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, "foo", ".foo", 0, "foo2" },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ { "foo/bar", "foo/bars", "foo/baz", "foo/bar/baz", "flu/blar/blaz",
+ "foo/bar/baz/bing/boom", 0, 0, 0, 0, 0 },
+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 },
+ };
+
+ for (i = 0; i < NUM_TEST_PATHS; i++)
+ {
+ for (j = 0; j < NUM_TEST_PATHS; j++)
+ {
+ const char *remainder;
+
+ remainder = svn_path_is_child(paths[i], paths[j], pool);
+
+ if (((remainder) && (! remainders[i][j]))
+ || ((! remainder) && (remainders[i][j]))
+ || (remainder && strcmp(remainder, remainders[i][j])))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_child (%s, %s) returned '%s' instead of '%s'",
+ paths[i], paths[j],
+ remainder ? remainder : "(null)",
+ remainders[i][j] ? remainders[i][j] : "(null)" );
+ }
+ }
+#undef NUM_TEST_PATHS
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_split(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ static const char * const paths[][3] = {
+ { "/foo/bar", "/foo", "bar" },
+ { "/foo/bar/ ", "/foo/bar", " " },
+ { "/foo", "/", "foo" },
+ { "foo", SVN_EMPTY_PATH, "foo" },
+ { ".bar", SVN_EMPTY_PATH, ".bar" },
+ { "/.bar", "/", ".bar" },
+ { "foo/bar", "foo", "bar" },
+ { "/foo/bar", "/foo", "bar" },
+ { "foo/bar", "foo", "bar" },
+ { "foo./.bar", "foo.", ".bar" },
+ { "../foo", "..", "foo" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "/flu\\b/\\blarg", "/flu\\b", "\\blarg" },
+ { "/", "/", "/" },
+ };
+
+ for (i = 0; i < sizeof(paths) / sizeof(paths[0]); i++)
+ {
+ const char *dir, *base_name;
+
+ svn_path_split(paths[i][0], &dir, &base_name, pool);
+ if (strcmp(dir, paths[i][1]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_split (%s) returned dirname '%s' instead of '%s'",
+ paths[i][0], dir, paths[i][1]);
+ }
+ if (strcmp(base_name, paths[i][2]))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_split (%s) returned basename '%s' instead of '%s'",
+ paths[i][0], base_name, paths[i][2]);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_is_url(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "", FALSE },
+ { "/blah/blah", FALSE },
+ { "//blah/blah", FALSE },
+ { "://blah/blah", FALSE },
+ { "a:abb://boo/", FALSE },
+ { "http://svn.apache.org/repos/asf/subversion", TRUE },
+ { "scheme/with", FALSE },
+ { "scheme/with:", FALSE },
+ { "scheme/with:/", FALSE },
+ { "scheme/with://", FALSE },
+ { "scheme/with://slash/", FALSE },
+ { "file:///path/to/repository", TRUE },
+ { "file://", TRUE },
+ { "file:/", FALSE },
+ { "file:", FALSE },
+ { "file", FALSE },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/foo", FALSE },
+ { "X:foo", FALSE },
+ { "X:", FALSE },
+#endif /* non-WIN32 */
+ { "X:/", FALSE },
+ { "//srv/shr", FALSE },
+ { "//srv/shr/fld", FALSE },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_path_is_url(tests[i].path);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_url (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_is_uri_safe(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "http://svn.collab.net/repos", TRUE },
+ { "http://svn.collab.net/repos%", FALSE },
+ { "http://svn.collab.net/repos%/svn", FALSE },
+ { "http://svn.collab.net/repos%2g", FALSE },
+ { "http://svn.collab.net/repos%2g/svn", FALSE },
+ { "http://svn.collab.net/repos%%", FALSE },
+ { "http://svn.collab.net/repos%%/svn", FALSE },
+ { "http://svn.collab.net/repos%2a", TRUE },
+ { "http://svn.collab.net/repos%2a/svn", TRUE },
+ };
+
+ for (i = 0; i < (sizeof(tests) / sizeof(tests[0])); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_path_is_uri_safe(tests[i].path);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_uri_safe (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_uri_encode(apr_pool_t *pool)
+{
+ int i;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "http://subversion.tigris.org",
+ "http://subversion.tigris.org"},
+ { " special_at_beginning",
+ "%20special_at_beginning" },
+ { "special_at_end ",
+ "special_at_end%20" },
+ { "special in middle",
+ "special%20in%20middle" },
+ { "\"Ouch!\" \"Did that hurt?\"",
+ "%22Ouch!%22%20%20%22Did%20that%20hurt%3F%22" }
+ };
+
+ for (i = 0; i < 5; i++)
+ {
+ const char *en_path, *de_path;
+
+ /* URI-encode the path, and verify the results. */
+ en_path = svn_path_uri_encode(tests[i].path, pool);
+ if (strcmp(en_path, tests[i].result))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_encode ('%s') returned '%s' instead of '%s'",
+ tests[i].path, en_path, tests[i].result);
+ }
+
+ /* URI-decode the path, and make sure we're back where we started. */
+ de_path = svn_path_uri_decode(en_path, pool);
+ if (strcmp(de_path, tests[i].path))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_decode ('%s') returned '%s' instead of '%s'",
+ tests[i].result, de_path, tests[i].path);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_uri_decode(apr_pool_t *pool)
+{
+ int i;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "http://c.r.a/s%\0" "8me",
+ "http://c.r.a/s%"},
+ { "http://c.r.a/s%6\0" "me",
+ "http://c.r.a/s%6" },
+ { "http://c.r.a/s%68me",
+ "http://c.r.a/shme" },
+ };
+
+ for (i = 0; i < 3; i++)
+ {
+ const char *de_path;
+
+ /* URI-decode the path, and verify the results. */
+ de_path = svn_path_uri_decode(tests[i].path, pool);
+ if (strcmp(de_path, tests[i].result))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_decode ('%s') returned '%s' instead of '%s'",
+ tests[i].path, de_path, tests[i].result);
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_uri_autoescape(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "http://svn.collab.net/", "http://svn.collab.net/" },
+ { "file:///<>\" {}|\\^`", "file:///%3C%3E%22%20%7B%7D%7C%5C%5E%60" },
+ { "http://[::1]", "http://[::1]" }
+ };
+ int i;
+
+ for (i = 0; i < 3; ++i)
+ {
+ const char* uri = svn_path_uri_autoescape(tests[i].path, pool);
+ if (strcmp(uri, tests[i].result) != 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_autoescape on '%s' returned '%s' instead of '%s'",
+ tests[i].path, uri, tests[i].result);
+ if (strcmp(tests[i].path, tests[i].result) == 0
+ && tests[i].path != uri)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_autoescape on '%s' returned identical but not same"
+ " string", tests[i].path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_uri_from_iri(apr_pool_t *pool)
+{
+ /* We have to code the IRIs like this because the compiler might translate
+ character and string literals outside of ASCII to some character set,
+ but here we are hard-coding UTF-8. But we all read UTF-8 codes like
+ poetry, don't we. */
+ static const char p1[] = {
+ '\x66', '\x69', '\x6C', '\x65', '\x3A', '\x2F', '\x2F', '\x2F',
+ '\x72', '\xC3', '\xA4', '\x6B', '\x73', '\x6D', '\xC3', '\xB6', '\x72',
+ '\x67', '\xC3', '\xA5', '\x73', '\0' };
+ static const char p2[] = {
+ '\x66', '\x69', '\x6C', '\x65', '\x3A', '\x2F', '\x2F', '\x2F',
+ '\x61', '\x62', '\x25', '\x32', '\x30', '\x63', '\x64', '\0' };
+ static const char *paths[2][2] = {
+ { p1,
+ "file:///r%C3%A4ksm%C3%B6rg%C3%A5s" },
+ { p2,
+ "file:///ab%20cd" }
+ };
+ int i;
+
+ for (i = 0; i < 2; ++i)
+ {
+ const char *uri = svn_path_uri_from_iri(paths[i][0], pool);
+ if (strcmp(paths[i][1], uri) != 0)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_from_iri on '%s' returned '%s' instead of '%s'",
+ paths[i][0], uri, paths[i][1]);
+ if (strcmp(paths[i][0], uri) == 0
+ && paths[i][0] != uri)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_uri_from_iri on '%s' returned identical but not same"
+ " string", paths[i][0]);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_join(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ static const char * const joins[][3] = {
+ { "abc", "def", "abc/def" },
+ { "a", "def", "a/def" },
+ { "a", "d", "a/d" },
+ { "/", "d", "/d" },
+ { "/abc", "d", "/abc/d" },
+ { "/abc", "def", "/abc/def" },
+ { "/abc", "/def", "/def" },
+ { "/abc", "/d", "/d" },
+ { "/abc", "/", "/" },
+ { SVN_EMPTY_PATH, "/", "/" },
+ { "/", SVN_EMPTY_PATH, "/" },
+ { SVN_EMPTY_PATH, "abc", "abc" },
+ { "abc", SVN_EMPTY_PATH, "abc" },
+ { SVN_EMPTY_PATH, "/abc", "/abc" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "X:/abc", "/d", "/d" },
+ { "X:/abc", "/", "/" },
+ { "X:",SVN_EMPTY_PATH, "X:" },
+ { "X:", "/def", "/def" },
+ { "X:abc", "/d", "/d" },
+ { "X:abc", "/", "/" },
+ { "file://", "foo", "file:///foo" },
+ { "file:///foo", "bar", "file:///foo/bar" },
+ { "file:///foo", SVN_EMPTY_PATH, "file:///foo" },
+ { SVN_EMPTY_PATH, "file:///foo", "file:///foo" },
+ { "file:///X:", "bar", "file:///X:/bar" },
+ { "file:///X:foo", "bar", "file:///X:foo/bar" },
+ { "http://svn.dm.net", "repos", "http://svn.dm.net/repos" },
+#ifdef SVN_USE_DOS_PATHS
+/* These will fail, see issue #2028
+ { "//srv/shr", "fld", "//srv/shr/fld" },
+ { "//srv", "shr/fld", "//srv/shr/fld" },
+ { "//srv/shr/fld", "subfld", "//srv/shr/fld/subfld" },
+ { "//srv/shr/fld", "//srv/shr", "//srv/shr" },
+ { "//srv", "//srv/fld", "//srv/fld" },
+ { "X:abc", "X:/def", "X:/def" }, { "X:/",SVN_EMPTY_PATH, "X:/" },
+ { "X:/","abc", "X:/abc" },
+ { "X:/", "/def", "/def" },
+ { "X:/abc", "X:/", "X:/" },
+ { "X:abc", "X:/", "X:/" },
+ { "X:abc", "X:/def", "X:/def" },
+ { "X:","abc", "X:abc" },
+ { "X:/abc", "X:/def", "X:/def" },
+*/
+#else /* WIN32 or Cygwin */
+ { "X:abc", "X:/def", "X:abc/X:/def" },
+ { "X:","abc", "X:/abc" },
+ { "X:/abc", "X:/def", "X:/abc/X:/def" },
+#endif /* non-WIN32 */
+ };
+
+ for (i = sizeof(joins) / sizeof(joins[0]); i--; )
+ {
+ const char *base = joins[i][0];
+ const char *comp = joins[i][1];
+ const char *expect = joins[i][2];
+
+ result = svn_path_join(base, comp, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_join(\"%s\", \"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ base, comp, result, expect);
+
+ /* svn_path_join_many does not support URLs, so skip the URL tests. */
+ if (svn_path_is_url(base))
+ continue;
+
+ result = svn_path_join_many(pool, base, comp, SVN_VA_NULL);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_join_many(\"%s\", \"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ base, comp, result, expect);
+ }
+
+#define TEST_MANY(args, expect) \
+ result = svn_path_join_many args ; \
+ if (strcmp(result, expect) != 0) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "svn_path_join_many" #args " returns \"%s\". " \
+ "expected \"%s\"", \
+ result, expect);
+
+ TEST_MANY((pool, "abc", SVN_VA_NULL), "abc");
+ TEST_MANY((pool, "/abc", SVN_VA_NULL), "/abc");
+ TEST_MANY((pool, "/", SVN_VA_NULL), "/");
+
+ TEST_MANY((pool, "abc", "def", "ghi", SVN_VA_NULL), "abc/def/ghi");
+ TEST_MANY((pool, "abc", "/def", "ghi", SVN_VA_NULL), "/def/ghi");
+ TEST_MANY((pool, "/abc", "def", "ghi", SVN_VA_NULL), "/abc/def/ghi");
+ TEST_MANY((pool, "abc", "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "/def", "/ghi", SVN_VA_NULL), "/ghi");
+
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", SVN_VA_NULL), "def/ghi");
+ TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "abc/ghi");
+ TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "abc/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, SVN_VA_NULL), "def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "ghi");
+ TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "abc");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", SVN_VA_NULL), "/ghi");
+
+ TEST_MANY((pool, "/", "def", "ghi", SVN_VA_NULL), "/def/ghi");
+ TEST_MANY((pool, "abc", "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "abc", "def", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "/", "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "/", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", SVN_VA_NULL), "/ghi");
+ TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, SVN_VA_NULL), "/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", SVN_VA_NULL), "/");
+
+#ifdef SVN_USE_DOS_PATHS
+/* These will fail, see issue #2028
+ TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:def/ghi");
+ TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:ghi");
+ TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:ghi");
+ TEST_MANY((pool, "X:/", "def", "ghi", SVN_VA_NULL), "X:/def/ghi");
+ TEST_MANY((pool, "abc", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "abc", "def", "X:/", SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, "X:/", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", "X:/", "/", SVN_VA_NULL), "/");
+ TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", SVN_VA_NULL), "X:/");
+ TEST_MANY((pool, "X:", "X:/", "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:", "X:/", "/", SVN_VA_NULL), "/");
+
+ TEST_MANY((pool, "//srv/shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi");
+ TEST_MANY((pool, "//srv", "shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi");
+ TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL),
+ "//srv/shr/fld/def/ghi");
+ TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr");
+ TEST_MANY((pool, "//srv", "shr", "//srv/shr", SVN_VA_NULL), "//srv/shr");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL),
+ "//srv/shr/fld/def/ghi");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL),
+ "//srv/shr");
+*/
+#else /* WIN32 or Cygwin */
+ TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:/def/ghi");
+ TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi");
+ TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def");
+ TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:/ghi");
+#endif /* non-WIN32 */
+
+ /* ### probably need quite a few more tests... */
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_basename(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "abc", "abc" },
+ { "/abc", "abc" },
+ { "/abc", "abc" },
+ { "/x/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "/xx/abc", "abc" },
+ { "a", "a" },
+ { "/a", "a" },
+ { "/b/a", "a" },
+ { "/b/a", "a" },
+ { "/", "/" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "X:/abc", "abc" },
+ { "X:", "X:" },
+
+#ifdef SVN_USE_DOS_PATHS
+/* These will fail, see issue #2028
+ { "X:/", "X:/" },
+ { "X:abc", "abc" },
+ { "//srv/shr", "//srv/shr" },
+ { "//srv", "//srv" },
+ { "//srv/shr/fld", "fld" },
+ { "//srv/shr/fld/subfld", "subfld" },
+*/
+#else /* WIN32 or Cygwin */
+ { "X:abc", "X:abc" },
+#endif /* non-WIN32 */
+ };
+
+ for (i = sizeof(tests) / sizeof(tests[0]); i--; )
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_path_basename(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_basename(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_dirname(apr_pool_t *pool)
+{
+ int i;
+ char *result;
+
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "abc", "" },
+ { "/abc", "/" },
+ { "/x/abc", "/x" },
+ { "/xx/abc", "/xx" },
+ { "a", "" },
+ { "/a", "/" },
+ { "/b/a", "/b" },
+ { "/", "/" },
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH },
+ { "X:abc/def", "X:abc" },
+#ifdef SVN_USE_DOS_PATHS
+ { "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld/subfld", "//srv/shr/fld" },
+
+/* These will fail, see issue #2028
+ { "X:/", "X:/" },
+ { "X:/abc", "X:/" },
+ { "X:", "X:" },
+ { "X:abc", "X:" },
+ { "//srv/shr", "//srv/shr" },
+*/
+#else /* WIN32 or Cygwin */
+ /* on non-Windows platforms, ':' is allowed in pathnames */
+ { "X:", "" },
+ { "X:abc", "" },
+#endif /* non-WIN32 */
+ };
+
+ for (i = sizeof(tests) / sizeof(tests[0]); i--; )
+ {
+ const char *path = tests[i].path;
+ const char *expect = tests[i].result;
+
+ result = svn_path_dirname(path, pool);
+ if (strcmp(result, expect))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_dirname(\"%s\") returned "
+ "\"%s\". expected \"%s\"",
+ path, result, expect);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_decompose(apr_pool_t *pool)
+{
+ static const char * const paths[] = {
+ "/", "/", NULL,
+ "foo", "foo", NULL,
+ "/foo", "/", "foo", NULL,
+ "/foo/bar", "/", "foo", "bar", NULL,
+ "foo/bar", "foo", "bar", NULL,
+
+ /* Are these canonical? Should the middle bits produce SVN_EMPTY_PATH? */
+ "foo/bar", "foo", "bar", NULL,
+ NULL,
+ };
+ int i = 0;
+
+ for (;;)
+ {
+ if (! paths[i])
+ break;
+ else
+ {
+ apr_array_header_t *components = svn_path_decompose(paths[i], pool);
+ int j;
+ for (j = 0; j < components->nelts; ++j)
+ {
+ const char *component = APR_ARRAY_IDX(components,
+ j,
+ const char*);
+ if (! paths[i+j+1])
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_decompose(\"%s\") returned "
+ "unexpected component \"%s\"",
+ paths[i], component);
+ if (strcmp(component, paths[i+j+1]))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_decompose(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ paths[i], component, paths[i+j+1]);
+ }
+ if (paths[i+j+1])
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_decompose(\"%s\") failed "
+ "to return \"%s\"",
+ paths[i], paths[i+j+1]);
+ i += components->nelts + 2;
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_canonicalize(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "/", "/" },
+ { "/.", "/" },
+ { "./", "" },
+ { "./.", "" },
+ { "//", "/" },
+ { "/////", "/" },
+ { "./././.", "" },
+ { "////././.", "/" },
+ { "foo", "foo" },
+ { ".foo", ".foo" },
+ { "foo.", "foo." },
+ { "/foo", "/foo" },
+ { "foo/", "foo" },
+ { "foo//", "foo" },
+ { "foo///", "foo" },
+ { "foo./", "foo." },
+ { "foo./.", "foo." },
+ { "foo././/.", "foo." },
+ { "/foo/bar", "/foo/bar" },
+ { "foo/..", "foo/.." },
+ { "foo/../", "foo/.." },
+ { "foo/../.", "foo/.." },
+ { "foo//.//bar", "foo/bar" },
+ { "///foo", "/foo" },
+ { "/.//./.foo", "/.foo" },
+ { ".///.foo", ".foo" },
+ { "../foo", "../foo" },
+ { "../../foo/", "../../foo" },
+ { "../../foo/..", "../../foo/.." },
+ { "/../../", "/../.." },
+ { "dirA", "dirA" },
+ { "foo/dirA", "foo/dirA" },
+ { "http://hst", "http://hst" },
+ { "http://hst/foo/../bar","http://hst/foo/../bar" },
+ { "http://hst/", "http://hst" },
+ { "http:///", "http://" },
+ { "https://", "https://" },
+ { "file:///", "file://" },
+ { "file://", "file://" },
+ { "svn:///", "svn://" },
+ { "svn+ssh:///", "svn+ssh://" },
+ { "http://HST/", "http://hst" },
+ { "http://HST/FOO/BaR", "http://hst/FOO/BaR" },
+ { "svn+ssh://j.raNDom@HST/BaR", "svn+ssh://j.raNDom@hst/BaR" },
+ { "svn+SSH://j.random:jRaY@HST/BaR", "svn+ssh://j.random:jRaY@hst/BaR" },
+ { "SVN+ssh://j.raNDom:jray@HST/BaR", "svn+ssh://j.raNDom:jray@hst/BaR" },
+ { "fILe:///Users/jrandom/wc", "file:///Users/jrandom/wc" },
+ { "fiLE:///", "file://" },
+ { "fiLE://", "file://" },
+ { "X:/foo", "X:/foo" },
+ { "X:", "X:" },
+ { "X:foo", "X:foo" },
+#ifdef SVN_USE_DOS_PATHS
+ { "file:///c:/temp/repos", "file:///C:/temp/repos" },
+ { "file:///c:/temp/REPOS", "file:///C:/temp/REPOS" },
+ { "file:///C:/temp/REPOS", "file:///C:/temp/REPOS" },
+ { "C:/folder/subfolder/file", "C:/folder/subfolder/file" },
+ /* We permit UNC paths on Windows. By definition UNC
+ * paths must have two components so we should remove the
+ * double slash if there is only one component. */
+ { "//hst", "/hst" },
+ { "//hst/./", "/hst" },
+ { "//server/share/", "//server/share" },
+ { "//server/SHare/", "//server/SHare" },
+ { "//SERVER/SHare/", "//server/SHare" },
+ { "X:/", "X:/" },
+#else /* WIN32 or Cygwin */
+ { "file:///c:/temp/repos", "file:///c:/temp/repos" },
+ { "file:///c:/temp/REPOS", "file:///c:/temp/REPOS" },
+ { "file:///C:/temp/REPOS", "file:///C:/temp/REPOS" },
+#endif /* non-WIN32 */
+ { NULL, NULL }
+ };
+ int i;
+
+ i = 0;
+ while (tests[i].path)
+ {
+ const char *canonical = svn_path_canonicalize(tests[i].path, pool);
+
+ if (strcmp(canonical, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_canonicalize(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, canonical, tests[i].result);
+ ++i;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_remove_component(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { "/", "/" },
+ { "foo", "" },
+ { "foo/bar", "foo" },
+ { "/foo/bar", "/foo" },
+ { "/foo", "/" },
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/foo/bar", "X:/foo" },
+ { "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld/subfld", "//srv/shr/fld" },
+/* These will fail, see issue #2028
+ { "X:/foo", "X:/" },
+ { "X:/", "X:/" },
+ { "X:foo", "X:" },
+ { "X:", "X:" },
+ { "//srv/shr", "//srv/shr" },
+*/
+#else /* WIN32 or Cygwin */
+ { "X:foo", "" },
+ { "X:", "" },
+#endif /* non-WIN32 */
+ { NULL, NULL }
+ };
+ int i;
+ svn_stringbuf_t *buf;
+
+ buf = svn_stringbuf_create_empty(pool);
+
+ i = 0;
+ while (tests[i].path)
+ {
+ svn_stringbuf_set(buf, tests[i].path);
+
+ svn_path_remove_component(buf);
+
+ if (strcmp(buf->data, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_remove_component(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, buf->data, tests[i].result);
+ ++i;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_check_valid(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "/foo/bar", TRUE },
+ { "/foo", TRUE },
+ { "/", TRUE },
+ { "foo/bar", TRUE },
+ { "foo bar", TRUE },
+ { "foo\7bar", FALSE },
+ { "foo\31bar", FALSE },
+ { "\7foo\31bar", FALSE },
+ { "\7", FALSE },
+ { "", TRUE },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ svn_error_t *err = svn_path_check_valid(tests[i].path, pool);
+ svn_boolean_t retval = (err == SVN_NO_ERROR);
+
+ svn_error_clear(err);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_check_valid (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_is_ancestor(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path1;
+ const char *path2;
+ svn_boolean_t result;
+ } tests[] = {
+ { "/foo", "/foo/bar", TRUE},
+ { "/foo/bar", "/foo/bar/", TRUE},
+ { "/", "/foo", TRUE},
+ { SVN_EMPTY_PATH, "foo", TRUE},
+ { SVN_EMPTY_PATH, ".bar", TRUE},
+
+ { "/.bar", "/", FALSE},
+ { "foo/bar", "foo", FALSE},
+ { "/foo/bar", "/foo", FALSE},
+ { "foo", "foo/bar", TRUE},
+ { "foo.", "foo./.bar", TRUE},
+
+ { "../foo", "..", FALSE},
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, TRUE},
+ { "/", "/", TRUE},
+
+ { "http://test", "http://test", TRUE},
+ { "http://test", "http://taste", FALSE},
+ { "http://test", "http://test/foo", TRUE},
+ { "http://test", "file://test/foo", FALSE},
+ { "http://test", "http://testF", FALSE},
+/*
+ TODO: this testcase fails, showing that svn_path_is_ancestor
+ shouldn't be used on urls. This is related to issue #1711.
+
+ { "http://", "http://test", FALSE},
+*/
+ { "X:foo", "X:bar", FALSE},
+#ifdef SVN_USE_DOS_PATHS
+ { "//srv/shr", "//srv", FALSE},
+ { "//srv/shr", "//srv/shr/fld", TRUE },
+ { "//srv", "//srv/shr/fld", TRUE },
+ { "//srv/shr/fld", "//srv/shr", FALSE },
+ { "//srv/shr/fld", "//srv2/shr/fld", FALSE },
+/* These will fail, see issue #2028
+ { "X:/", "X:/", TRUE},
+ { "X:/foo", "X:/", FALSE},
+ { "X:/", "X:/foo", TRUE},
+ { "X:", "X:foo", TRUE},
+*/
+#else /* WIN32 or Cygwin */
+ { "X:", "X:foo", FALSE},
+
+#endif /* non-WIN32 */
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_path_is_ancestor(tests[i].path1, tests[i].path2);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_ancestor (%s, %s) returned %s instead of %s",
+ tests[i].path1, tests[i].path2, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_is_single_path_component(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results.
+ * Note that these paths need to be canonical,
+ * else we might trigger an abort(). */
+ struct {
+ const char *path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "/foo/bar", FALSE },
+ { "/foo", FALSE },
+ { "/", FALSE },
+ { "foo/bar", FALSE },
+ { "foo", TRUE },
+ { "..", FALSE },
+ { "", FALSE },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ svn_boolean_t retval;
+
+ retval = svn_path_is_single_path_component(tests[i].path);
+ if (tests[i].result != retval)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_single_path_component (%s) returned %s instead of %s",
+ tests[i].path, retval ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_compare_paths(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path1;
+ const char *path2;
+ int result;
+ } tests[] = {
+ { "/foo", "/foo", 0},
+ { "/foo/bar", "/foo/bar", 0},
+ { "/", "/", 0},
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, 0},
+ { "foo", "foo", 0},
+ { "foo", "foo/bar", -1},
+ { "foo/bar", "foo/boo", -1},
+ { "boo", "foo", -1},
+ { "foo", "boo", 1},
+ { "foo/bar", "foo", 1},
+ { "/", "/foo", -1},
+ { "/foo", "/foo/bar", -1},
+ { "/foo", "/foo/bar/boo", -1},
+ { "foo", "/foo", 1},
+ { "foo\xe0""bar", "foo", 1},
+ { "X:/foo", "X:/foo", 0},
+ { "X:foo", "X:foo", 0},
+ { "X:", "X:foo", -1},
+ { "X:foo", "X:", 1},
+#ifdef SVN_USE_DOS_PATHS
+ { "//srv/shr", "//srv", 1},
+ { "//srv/shr", "//srv/shr/fld", -1 },
+ { "//srv/shr/fld", "//srv/shr", 1 },
+ { "//srv/shr/fld", "//abc/def/ghi", 1 },
+/* These will fail, see issue #2028
+ { "X:/", "X:/", 0},
+ { "X:/", "X:/foo", -1},
+ { "X:/foo", "X:/", 1},
+*/
+#endif /* WIN32 or Cygwin */
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ int retval;
+
+ retval = svn_path_compare_paths(tests[i].path1, tests[i].path2);
+ /* tests if expected and actual result are both < 0,
+ equal to 0 or greater than 0. */
+ if (! (tests[i].result * retval > 0 ||
+ (tests[i].result == 0 && retval == 0)) )
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_compare_paths (%s, %s) returned %d instead of %d",
+ tests[i].path1, tests[i].path2, retval, tests[i].result);
+ }
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_get_longest_ancestor(apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path1;
+ const char *path2;
+ const char *result;
+ } tests[] = {
+ { "/foo", "/foo/bar", "/foo"},
+ { "/foo/bar", "foo/bar", ""},
+ { "/", "/foo", "/"},
+ { SVN_EMPTY_PATH, "foo", SVN_EMPTY_PATH},
+ { SVN_EMPTY_PATH, ".bar", SVN_EMPTY_PATH},
+ { "/.bar", "/", "/"},
+ { "foo/bar", "foo", "foo"},
+ { "/foo/bar", "/foo", "/foo"},
+ { "/rif", "/raf", "/"},
+ { "foo", "foo/bar", "foo"},
+ { "foo.", "foo./.bar", "foo."},
+ { SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_EMPTY_PATH},
+ { "/", "/", "/"},
+ { "http://test", "http://test", "http://test"},
+ { "http://test", "http://taste", ""},
+ { "http://test", "http://test/foo", "http://test"},
+ { "http://test", "file://test/foo", ""},
+ { "http://test", "http://tests", ""},
+ { "http://", "http://test", ""},
+ { "file:///A/C", "file:///B/D", ""},
+ { "file:///A/C", "file:///A/D", "file:///A"},
+
+#ifdef SVN_USE_DOS_PATHS
+ { "X:/", "X:/", "X:/"},
+ { "X:/foo/bar/A/D/H/psi", "X:/foo/bar/A/B", "X:/foo/bar/A" },
+ { "X:/foo/bar/boo", "X:/foo/bar/baz/boz", "X:/foo/bar"},
+ { "X:foo/bar", "X:foo/bar/boo", "X:foo/bar"},
+ { "//srv/shr", "//srv/shr/fld", "//srv/shr" },
+ { "//srv/shr/fld", "//srv/shr", "//srv/shr" },
+
+/* These will fail, see issue #2028
+ { "//srv/shr/fld", "//srv2/shr/fld", "" },
+ { "X:/foo", "X:/", "X:/"},
+ { "X:/folder1", "X:/folder2", "X:/"},
+ { "X:/", "X:/foo", "X:/"},
+ { "X:", "X:foo", "X:"},
+ { "X:", "X:/", ""},
+ { "X:foo", "X:bar", "X:"},
+*/
+#else /* WIN32 or Cygwin */
+ { "X:/foo", "X:", "X:"},
+ { "X:/folder1", "X:/folder2", "X:"},
+ { "X:", "X:foo", ""},
+ { "X:foo", "X:bar", ""},
+#endif /* non-WIN32 */
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *retval;
+
+ retval = svn_path_get_longest_ancestor(tests[i].path1, tests[i].path2,
+ pool);
+
+ if (strcmp(tests[i].result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ tests[i].path1, tests[i].path2, retval, tests[i].result);
+
+ /* changing the order of the paths should return the same results */
+ retval = svn_path_get_longest_ancestor(tests[i].path2, tests[i].path1,
+ pool);
+
+ if (strcmp(tests[i].result, retval))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_get_longest_ancestor (%s, %s) returned %s instead of %s",
+ tests[i].path2, tests[i].path1, retval, tests[i].result);
+ }
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_splitext(apr_pool_t *pool)
+{
+ apr_size_t i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Paths to test and their expected results. */
+ struct {
+ const char *path;
+ const char *path_root;
+ const char *path_ext;
+ } tests[] = {
+ { "no-ext", "no-ext", "" },
+ { "test-file.py", "test-file.", "py" },
+ { "period.file.ext", "period.file.", "ext" },
+ { "multi-component/file.txt", "multi-component/file.", "txt" },
+ { "yep.still/no-ext", "yep.still/no-ext", "" },
+ { "folder.with/period.log", "folder.with/period.", "log" },
+ { "period.", "period.", "" },
+ { "dir/period.", "dir/period.", "" },
+ { "file.ends-with/period.", "file.ends-with/period.", "" },
+ { "two-periods..txt", "two-periods..", "txt" },
+ { ".dot-file", ".dot-file", "" },
+ { "sub/.dot-file", "sub/.dot-file", "" },
+ { ".dot-file.withext", ".dot-file.", "withext" },
+ { "sub/.dot-file.withext", "sub/.dot-file.", "withext" },
+ { "sub/a.out", "sub/a.", "out" },
+ { "a.out", "a.", "out" },
+ { "", "", "" },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *path = tests[i].path;
+ const char *path_root;
+ const char *path_ext;
+
+ svn_pool_clear(subpool);
+
+ /* First, we'll try splitting and fetching both root and
+ extension to see if they match our expected results. */
+ svn_path_splitext(&path_root, &path_ext, path, subpool);
+ if ((strcmp(tests[i].path_root, path_root))
+ || (strcmp(tests[i].path_ext, path_ext)))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_splitext (%s) returned ('%s', '%s') "
+ "instead of ('%s', '%s')",
+ tests[i].path, path_root, path_ext,
+ tests[i].path_root, tests[i].path_ext);
+
+ /* Now, let's only fetch the root. */
+ svn_path_splitext(&path_root, NULL, path, subpool);
+ if (strcmp(tests[i].path_root, path_root))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_splitext (%s) with a NULL path_ext returned '%s' "
+ "for the path_root instead of '%s'",
+ tests[i].path, path_root, tests[i].path_root);
+
+ /* Next, let's only fetch the extension. */
+ svn_path_splitext(NULL, &path_ext, path, subpool);
+ if ((strcmp(tests[i].path_root, path_root))
+ || (strcmp(tests[i].path_ext, path_ext)))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_splitext (%s) with a NULL path_root returned '%s' "
+ "for the path_ext instead of '%s'",
+ tests[i].path, path_ext, tests[i].path_ext);
+ }
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_compose(apr_pool_t *pool)
+{
+ static const char * const paths[] = {
+ "",
+ "/",
+ "/foo",
+ "/foo/bar",
+ "/foo/bar/baz",
+ "foo",
+ "foo/bar",
+ "foo/bar/baz",
+ NULL,
+ };
+ const char * const *path_ptr = paths;
+ const char *input_path;
+
+ for (input_path = *path_ptr; *path_ptr; input_path = *++path_ptr)
+ {
+ apr_array_header_t *components = svn_path_decompose(input_path, pool);
+ const char *output_path = svn_path_compose(components, pool);
+
+ if (strcmp(input_path, output_path))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_compose("
+ "svn_path_decompose(\"%s\")) "
+ "returned \"%s\" expected \"%s\"",
+ input_path, output_path, input_path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_is_canonical(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ svn_boolean_t canonical;
+ } tests[] = {
+ { "", TRUE },
+ { ".", FALSE },
+ { "/", TRUE },
+ { "/.", FALSE },
+ { "./", FALSE },
+ { "./.", FALSE },
+ { "//", FALSE },
+ { "/////", FALSE },
+ { "./././.", FALSE },
+ { "////././.", FALSE },
+ { "foo", TRUE },
+ { ".foo", TRUE },
+ { "foo.", TRUE },
+ { "/foo", TRUE },
+ { "foo/", FALSE },
+ { "foo./", FALSE },
+ { "foo./.", FALSE },
+ { "foo././/.", FALSE },
+ { "/foo/bar", TRUE },
+ { "foo/..", TRUE },
+ { "foo/../", FALSE },
+ { "foo/../.", FALSE },
+ { "foo//.//bar", FALSE },
+ { "///foo", FALSE },
+ { "/.//./.foo", FALSE },
+ { ".///.foo", FALSE },
+ { "../foo", TRUE },
+ { "../../foo/", FALSE },
+ { "../../foo/..", TRUE },
+ { "/../../", FALSE },
+ { "dirA", TRUE },
+ { "foo/dirA", TRUE },
+ { "http://hst", TRUE },
+ { "http://hst/foo/../bar", TRUE },
+ { "http://hst/", FALSE },
+ { "foo/./bar", FALSE },
+ { "http://HST/", FALSE },
+ { "http://HST/FOO/BaR", FALSE },
+ { "svn+ssh://j.raNDom@HST/BaR", FALSE },
+ { "svn+SSH://j.random:jRaY@HST/BaR", FALSE },
+ { "SVN+ssh://j.raNDom:jray@HST/BaR", FALSE },
+ { "svn+ssh://j.raNDom:jray@hst/BaR", TRUE },
+ { "fILe:///Users/jrandom/wc", FALSE },
+ { "fiLE:///", FALSE },
+ { "fiLE://", FALSE },
+#ifdef SVN_USE_DOS_PATHS
+ { "file:///c:/temp/repos", FALSE },
+ { "file:///c:/temp/REPOS", FALSE },
+ { "file:///C:/temp/REPOS", TRUE },
+ { "//server/share/", FALSE },
+ { "//server/share", TRUE },
+ { "//server/SHare", TRUE },
+ { "//SERVER/SHare", FALSE },
+ { "C:/folder/subfolder/file", TRUE },
+#else /* WIN32 or Cygwin */
+ { "file:///c:/temp/repos", TRUE },
+ { "file:///c:/temp/REPOS", TRUE },
+ { "file:///C:/temp/REPOS", TRUE },
+#endif /* non-WIN32 */
+ { NULL, FALSE },
+ };
+ int i;
+
+ for (i = 0; tests[i].path; i++)
+ {
+ svn_boolean_t canonical;
+
+ canonical = svn_path_is_canonical(tests[i].path, pool);
+ if (tests[i].canonical != canonical)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_canonical(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path,
+ canonical ? "TRUE" : "FALSE",
+ tests[i].canonical ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_local_style(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "." },
+ { ".", "." },
+ { "http://host/dir", "http://host/dir" }, /* Not with local separator */
+#ifdef SVN_USE_DOS_PATHS
+ { "A:/", "A:\\" },
+ { "a:/", "a:\\" },
+ { "A:/file", "A:\\file" },
+ { "dir/file", "dir\\file" },
+ { "/", "\\" },
+ { "//server/share/dir", "\\\\server\\share\\dir" },
+#else
+ { "a:/file", "a:/file" },
+ { "dir/file", "dir/file" },
+ { "/", "/" },
+#endif
+ { NULL, NULL }
+ };
+ int i = 0;
+
+ while (tests[i].path)
+ {
+ const char *local = svn_path_local_style(tests[i].path, pool);
+
+ if (strcmp(local, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_local_style(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, local, tests[i].result);
+ ++i;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_internal_style(apr_pool_t *pool)
+{
+ struct {
+ const char *path;
+ const char *result;
+ } tests[] = {
+ { "", "" },
+ { ".", "" },
+ { "http://host/dir", "http://host/dir" },
+ { "/", "/" },
+#ifdef SVN_USE_DOS_PATHS
+ { "a:\\", "A:/" },
+ { "a:\\file", "A:/file" },
+ { "dir\\file", "dir/file" },
+ { "\\", "/" },
+ { "\\\\server/share/dir", "//server/share/dir" },
+#else
+ { "a:/", "a:" },
+ { "a:/file", "a:/file" },
+ { "dir/file", "dir/file" },
+ { "/", "/" },
+ { "//server/share/dir", "/server/share/dir" },
+#endif
+ { NULL, NULL }
+ };
+ int i;
+
+ i = 0;
+ while (tests[i].path)
+ {
+ const char *local = svn_path_internal_style(tests[i].path, pool);
+
+ if (strcmp(local, tests[i].result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_internal_style(\"%s\") returned "
+ "\"%s\" expected \"%s\"",
+ tests[i].path, local, tests[i].result);
+ ++i;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The type of a function to be tested by condense_targets_tests_helper().
+ * Matches svn_path_condense_targets().
+ */
+typedef svn_error_t *(*condense_targets_func_t)
+ (const char **pcommon,
+ apr_array_header_t **pcondensed_targets,
+ const apr_array_header_t *targets,
+ svn_boolean_t remove_redundancies,
+ apr_pool_t *pool);
+
+/** Executes function CONDENSE_TARGETS twice - with and without requesting the
+ * condensed targets list - on TEST_TARGETS (comma sep. string) and compares
+ * the results with EXP_COMMON and EXP_TARGETS (comma sep. string).
+ *
+ * @note: a '%' character at the beginning of EXP_COMMON or EXP_TARGETS will
+ * be replaced by the current working directory.
+ *
+ * Returns an error if any of the comparisons fail.
+ */
+static svn_error_t *
+condense_targets_tests_helper(const char* title,
+ const char* test_targets,
+ const char* exp_common,
+ const char* exp_targets,
+ const char* func_name,
+ condense_targets_func_t condense_targets,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *targets;
+ apr_array_header_t *condensed_targets;
+ const char *common_path, *common_path2, *curdir;
+ char *token, *iter;
+ const char *exp_common_abs = exp_common;
+ int i;
+ char buf[8192];
+
+ if (! getcwd(buf, sizeof(buf)))
+ return svn_error_create(SVN_ERR_BASE, NULL, "getcwd() failed");
+ curdir = svn_path_internal_style(buf, pool);
+
+ /* Create the target array */
+ targets = apr_array_make(pool, sizeof(test_targets), sizeof(const char *));
+ token = apr_strtok(apr_pstrdup(pool, test_targets), ",", &iter);
+ while (token)
+ {
+ APR_ARRAY_PUSH(targets, const char *) =
+ svn_path_internal_style(token, pool);
+ token = apr_strtok(NULL, ",", &iter);
+ };
+
+ /* Call the function */
+ SVN_ERR(condense_targets(&common_path, &condensed_targets, targets,
+ TRUE, pool));
+
+ /* Verify the common part with the expected (prefix with cwd). */
+ if (*exp_common == '%')
+ exp_common_abs = apr_pstrcat(pool, curdir, exp_common + 1, SVN_VA_NULL);
+
+ if (strcmp(common_path, exp_common_abs) != 0)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "%s (test %s) returned %s instead of %s",
+ func_name, title,
+ common_path, exp_common_abs);
+ }
+
+ /* Verify the condensed targets */
+ token = apr_strtok(apr_pstrdup(pool, exp_targets), ",", &iter);
+ for (i = 0; i < condensed_targets->nelts; i++)
+ {
+ const char * target = APR_ARRAY_IDX(condensed_targets, i, const char*);
+ if (token && (*token == '%'))
+ token = apr_pstrcat(pool, curdir, token + 1, SVN_VA_NULL);
+ if (! token ||
+ (target && (strcmp(target, token) != 0)))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "%s (test %s) couldn't find %s in expected targets list",
+ func_name, title,
+ target);
+ }
+ token = apr_strtok(NULL, ",", &iter);
+ }
+
+ /* Now ensure it works without the pbasename */
+ SVN_ERR(condense_targets(&common_path2, NULL, targets, TRUE, pool));
+
+ /* Verify the common part again */
+ if (strcmp(common_path, common_path2) != 0)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "%s (test %s): Common path without getting targets %s does not match" \
+ "common path with targets %s",
+ func_name, title,
+ common_path2, common_path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_path_condense_targets(apr_pool_t *pool)
+{
+ int i;
+ struct {
+ const char* title;
+ const char* targets;
+ const char* exp_common;
+ const char* exp_targets;
+ } tests[] = {
+ { "normal use", "z/A/B,z/A,z/A/C,z/D/E,z/D/F,z/D,z/G,z/G/H,z/G/I",
+ "%/z", "A,D,G" },
+ {"identical dirs", "z/A,z/A,z/A,z/A",
+ "%/z/A", "" },
+ {"identical files", "z/A/file,z/A/file,z/A/file,z/A/file",
+ "%/z/A/file", "" },
+ {"single dir", "z/A",
+ "%/z/A", "" },
+ {"single file", "z/A/file",
+ "%/z/A/file", "" },
+ {"URLs", "http://host/A/C,http://host/A/C/D,http://host/A/B/D",
+ "http://host/A", "C,B/D" },
+ {"URLs with no common prefix",
+ "http://host1/A/C,http://host2/A/C/D,http://host3/A/B/D",
+ "", "http://host1/A/C,http://host2/A/C/D,http://host3/A/B/D" },
+ {"file URLs with no common prefix", "file:///A/C,file:///B/D",
+ "", "file:///A/C,file:///B/D" },
+ {"URLs with mixed protocols",
+ "http://host/A/C,file:///B/D,gopher://host/A",
+ "", "http://host/A/C,file:///B/D,gopher://host/A" },
+ {"mixed paths and URLs",
+ "z/A/B,z/A,http://host/A/C/D,http://host/A/C",
+ "", "%/z/A,http://host/A/C" },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ SVN_ERR(condense_targets_tests_helper(tests[i].title,
+ tests[i].targets,
+ tests[i].exp_common,
+ tests[i].exp_targets,
+ "svn_path_condense_targets",
+ svn_path_condense_targets,
+ pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_is_repos_relative_url(apr_pool_t *pool)
+{
+ int i;
+ struct {
+ const char* path;
+ svn_boolean_t result;
+ } tests[] = {
+ { "^/A", TRUE },
+ { "http://host/A", FALSE },
+ { "/A/B", FALSE },
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ svn_boolean_t result = svn_path_is_repos_relative_url(tests[i].path);
+
+ if (tests[i].result != result)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_is_repos_relative_url(\"%s\")"
+ " returned \"%s\" expected \"%s\"",
+ tests[i].path,
+ result ? "TRUE" : "FALSE",
+ tests[i].result ? "TRUE" : "FALSE");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_path_resolve_repos_relative_url(apr_pool_t *pool)
+{
+ int i;
+ struct {
+ const char *relative_url;
+ const char *repos_root_url;
+ const char *absolute_url;
+ } tests[] = {
+ { "^/A", "file:///Z/X", "file:///Z/X/A" },
+ { "^/A", "file:///Z/X/", "file:///Z/X//A" }, /* doesn't canonicalize */
+ { "^/A@2", "file:///Z/X", "file:///Z/X/A@2" }, /* peg rev */
+ { "^/A", "/Z/X", "/Z/X/A" }, /* doesn't verify repos_root is URL */
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *result;
+
+ SVN_ERR(svn_path_resolve_repos_relative_url(&result,
+ tests[i].relative_url,
+ tests[i].repos_root_url,
+ pool));
+
+ if (strcmp(tests[i].absolute_url,result))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "svn_path_resolve_repos_relative_url(\"%s\","
+ "\"%s\") returned \"%s\" expected \"%s\"",
+ tests[i].relative_url,
+ tests[i].repos_root_url,
+ result, tests[i].absolute_url);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* local define to support XFail-ing tests on Windows/Cygwin only */
+#ifdef SVN_USE_DOS_PATHS
+#define WINDOWS_OR_CYGWIN TRUE
+#else
+#define WINDOWS_OR_CYGWIN FALSE
+#endif /* WIN32 or Cygwin */
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_path_is_child,
+ "test svn_path_is_child"),
+ SVN_TEST_PASS2(test_path_split,
+ "test svn_path_split"),
+ SVN_TEST_PASS2(test_path_is_url,
+ "test svn_path_is_url"),
+ SVN_TEST_PASS2(test_path_is_uri_safe,
+ "test svn_path_is_uri_safe"),
+ SVN_TEST_PASS2(test_uri_encode,
+ "test svn_path_uri_[en/de]code"),
+ SVN_TEST_PASS2(test_uri_decode,
+ "test svn_path_uri_decode with invalid escape"),
+ SVN_TEST_PASS2(test_uri_autoescape,
+ "test svn_path_uri_autoescape"),
+ SVN_TEST_PASS2(test_uri_from_iri,
+ "test svn_path_uri_from_iri"),
+ SVN_TEST_PASS2(test_path_join,
+ "test svn_path_join(_many)"),
+ SVN_TEST_PASS2(test_path_basename,
+ "test svn_path_basename"),
+ SVN_TEST_PASS2(test_path_dirname,
+ "test svn_path_dirname"),
+ SVN_TEST_PASS2(test_path_decompose,
+ "test svn_path_decompose"),
+ SVN_TEST_PASS2(test_path_canonicalize,
+ "test svn_path_canonicalize"),
+ SVN_TEST_PASS2(test_path_remove_component,
+ "test svn_path_remove_component"),
+ SVN_TEST_PASS2(test_path_is_ancestor,
+ "test svn_path_is_ancestor"),
+ SVN_TEST_PASS2(test_path_check_valid,
+ "test svn_path_check_valid"),
+ SVN_TEST_PASS2(test_is_single_path_component,
+ "test svn_path_is_single_path_component"),
+ SVN_TEST_PASS2(test_compare_paths,
+ "test svn_path_compare_paths"),
+ SVN_TEST_PASS2(test_path_get_longest_ancestor,
+ "test svn_path_get_longest_ancestor"),
+ SVN_TEST_PASS2(test_path_splitext,
+ "test svn_path_splitext"),
+ SVN_TEST_PASS2(test_path_compose,
+ "test svn_path_decompose"),
+ SVN_TEST_PASS2(test_path_is_canonical,
+ "test svn_path_is_canonical"),
+ SVN_TEST_PASS2(test_path_local_style,
+ "test svn_path_local_style"),
+ SVN_TEST_PASS2(test_path_internal_style,
+ "test svn_path_internal_style"),
+ SVN_TEST_PASS2(test_path_condense_targets,
+ "test svn_path_condense_targets"),
+ SVN_TEST_PASS2(test_path_is_repos_relative_url,
+ "test svn_path_is_repos_relative_url"),
+ SVN_TEST_PASS2(test_path_resolve_repos_relative_url,
+ "test svn_path_resolve_repos_relative_url"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/prefix-string-test.c b/subversion/tests/libsvn_subr/prefix-string-test.c
new file mode 100644
index 0000000..e420cff
--- /dev/null
+++ b/subversion/tests/libsvn_subr/prefix-string-test.c
@@ -0,0 +1,154 @@
+/*
+ * prefix-string-test.c: a collection of svn_prefix_string__* tests
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_error.h"
+#include "svn_string.h" /* This includes <apr_*.h> */
+#include "private/svn_string_private.h"
+
+static svn_error_t *
+test_empty_string(apr_pool_t *pool)
+{
+ svn_prefix_tree__t *tree = svn_prefix_tree__create(pool);
+ svn_prefix_string__t *empty = svn_prefix_string__create(tree, "");
+
+ /* same instance for all strings of the same value */
+ SVN_TEST_ASSERT(empty == svn_prefix_string__create(tree, ""));
+
+ /* does it actually have the right contents? */
+ SVN_TEST_ASSERT(svn_prefix_string__expand(empty, pool)->len == 0);
+ SVN_TEST_STRING_ASSERT(svn_prefix_string__expand(empty, pool)->data, "");
+
+ /* strings shall be equal to themselves */
+ SVN_TEST_ASSERT(0 == svn_prefix_string__compare(empty, empty));
+
+ return SVN_NO_ERROR;
+}
+
+enum {TEST_CASE_COUNT = 9};
+
+static const char *test_cases[TEST_CASE_COUNT] =
+{
+ "a longish string of sorts, longer than 7 anyway",
+ "some other string",
+ "more stuff on root",
+ "some shorter string",
+ "some short string",
+ "some short str",
+ "some short str2",
+ "a longish string of sorts, longer than ?! anyway",
+ "a"
+};
+
+static svn_error_t *
+test_string_creation(apr_pool_t *pool)
+{
+ svn_prefix_tree__t *tree = svn_prefix_tree__create(pool);
+ svn_prefix_string__t *strings[TEST_CASE_COUNT];
+ int i;
+
+ /* create strings and remember their initial references */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ strings[i] = svn_prefix_string__create(tree, test_cases[i]);
+
+ /* doing this again must yield the same pointers */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ SVN_TEST_ASSERT(strings[i]
+ == svn_prefix_string__create(tree, test_cases[i]));
+
+ /* converting them back to strings must be the initial values */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ {
+ svn_string_t *expanded = svn_prefix_string__expand(strings[i], pool);
+
+ SVN_TEST_ASSERT(expanded->len == strlen(test_cases[i]));
+ SVN_TEST_STRING_ASSERT(expanded->data, test_cases[i]);
+
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_string_comparison(apr_pool_t *pool)
+{
+ svn_prefix_tree__t *tree = svn_prefix_tree__create(pool);
+ svn_prefix_string__t *strings[TEST_CASE_COUNT];
+ int i, k;
+
+ /* create strings */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ strings[i] = svn_prefix_string__create(tree, test_cases[i]);
+
+ /* comparing them with themselves */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ SVN_TEST_ASSERT(! svn_prefix_string__compare(strings[i], strings[i]));
+
+ /* compare with all other strings */
+ for (i = 0; i < TEST_CASE_COUNT; ++i)
+ {
+ svn_string_t *lhs = svn_prefix_string__expand(strings[i], pool);
+ for (k = 0; k < TEST_CASE_COUNT; ++k)
+ {
+ svn_string_t *rhs = svn_prefix_string__expand(strings[k], pool);
+ int expected_diff = strcmp(lhs->data, rhs->data);
+ int actual_diff = svn_prefix_string__compare(strings[i], strings[k]);
+
+ SVN_TEST_ASSERT((actual_diff < 0) == (expected_diff < 0));
+ SVN_TEST_ASSERT((actual_diff > 0) == (expected_diff > 0));
+ SVN_TEST_ASSERT(!actual_diff == !expected_diff);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_empty_string,
+ "check empty strings"),
+ SVN_TEST_PASS2(test_string_creation,
+ "create many strings"),
+ SVN_TEST_PASS2(test_string_comparison,
+ "compare strings"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/priority-queue-test.c b/subversion/tests/libsvn_subr/priority-queue-test.c
new file mode 100644
index 0000000..bd2d991
--- /dev/null
+++ b/subversion/tests/libsvn_subr/priority-queue-test.c
@@ -0,0 +1,240 @@
+/*
+ * priority-queue-test.c: a collection of svn_priority_queue__* tests
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "../svn_test.h"
+
+#include "svn_error.h"
+#include "private/svn_sorts_private.h"
+
+/* priority queue test:
+ * items in the queue are simple integers, in ascending order */
+
+/* number of items to put into the queue */
+enum {NUMBER_COUNT = 11};
+
+/* the actual values in the order we add them to the queue */
+static const int numbers[NUMBER_COUNT]
+ = { 8395, 0, -1, 3885, 1, -435, 99993, 10, 0, 1, 8395 };
+
+/* test_update will modify in-queue data and expects the queue to return
+ the values in the following order: */
+static const int expected_modified[NUMBER_COUNT]
+ = { -431, 0, 1, 3, 5, 10, 16, 3889, 8395, 8403, 99997 };
+
+/* standard compare function for integers */
+static int
+compare_func(const void *lhs, const void *rhs)
+{
+ return *(const int *)lhs - *(const int *)rhs;
+}
+
+/* Check that QUEUE is empty and the usual operations still work */
+static svn_error_t *
+verify_empty_queue(svn_priority_queue__t *queue)
+{
+ /* it's an empty queue */
+ SVN_TEST_ASSERT(svn_priority_queue__size(queue) == 0);
+ SVN_TEST_ASSERT(svn_priority_queue__peek(queue) == NULL);
+
+ /* these should be no-ops */
+ svn_priority_queue__update(queue);
+ svn_priority_queue__pop(queue);
+
+ return SVN_NO_ERROR;
+}
+
+/* check that the tip of QUEUE equals EXPECTED and remove the first element */
+static svn_error_t *
+extract_expected(svn_priority_queue__t *queue, int expected)
+{
+ int value = *(int *)svn_priority_queue__peek(queue);
+ SVN_TEST_ASSERT(value == expected);
+ svn_priority_queue__pop(queue);
+
+ return SVN_NO_ERROR;
+}
+
+/* Verify that QUEUE returns all elements in the proper order.
+ Also check that data can be added & removed without disturbing the order.
+ */
+static svn_error_t *
+verify_queue_order(svn_priority_queue__t *queue)
+{
+ int sorted[NUMBER_COUNT];
+ int i;
+
+ /* reference order */
+ memcpy(sorted, numbers, sizeof(numbers));
+ qsort(sorted, NUMBER_COUNT, sizeof(sorted[0]), compare_func);
+
+ /* verify that the queue returns the data in the same order */
+ for (i = 0; i < NUMBER_COUNT; ++i)
+ {
+ int item = *(int *)svn_priority_queue__peek(queue);
+ int to_insert;
+
+ /* is this the value we expected? */
+ SVN_TEST_ASSERT(item == sorted[i]);
+
+ /* add two items at the tip of the queue */
+ to_insert = item - 1;
+ svn_priority_queue__push(queue, &to_insert);
+ svn_priority_queue__push(queue, &item);
+
+ /* check queue length */
+ SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i+2);
+
+ /* now, lets extract all 3 of them */
+ SVN_ERR(extract_expected(queue, item-1));
+ SVN_ERR(extract_expected(queue, item));
+ SVN_ERR(extract_expected(queue, item));
+
+ /* check queue length */
+ SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i-1);
+ }
+
+ /* the queue should now be empty */
+ verify_empty_queue(queue);
+
+ return SVN_NO_ERROR;
+}
+
+/* return a queue allocated in POOL containing all items of NUMBERS */
+static svn_priority_queue__t *
+create_standard_queue(apr_pool_t *pool)
+{
+ apr_array_header_t *elements
+ = apr_array_make(pool, 11, sizeof(numbers[0]));
+
+ /* build queue */
+ int i;
+ for (i = 0; i < NUMBER_COUNT; ++i)
+ APR_ARRAY_PUSH(elements, int) = numbers[i];
+
+ return svn_priority_queue__create(elements, compare_func);
+}
+
+
+static svn_error_t *
+test_empty_queue(apr_pool_t *pool)
+{
+ apr_array_header_t *elements
+ = apr_array_make(pool, 0, sizeof(int));
+ svn_priority_queue__t *queue
+ = svn_priority_queue__create(elements, compare_func);
+
+ verify_empty_queue(queue);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_sort_queue(apr_pool_t *pool)
+{
+ svn_priority_queue__t *queue = create_standard_queue(pool);
+
+ /* data should come out of the queue in sorted order */
+ SVN_ERR(verify_queue_order(queue));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_push(apr_pool_t *pool)
+{
+ apr_array_header_t *elements
+ = apr_array_make(pool, 3, sizeof(int));
+ svn_priority_queue__t *queue
+ = svn_priority_queue__create(elements, compare_func);
+
+ /* build queue */
+ int i;
+ for (i = 0; i < NUMBER_COUNT; ++i)
+ svn_priority_queue__push(queue, &numbers[i]);
+
+ /* data should come out of the queue in sorted order */
+ SVN_ERR(verify_queue_order(queue));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_update(apr_pool_t *pool)
+{
+ svn_priority_queue__t *queue = create_standard_queue(pool);
+
+ /* modify all items in the queue */
+ int i;
+ for (i = 0; i < NUMBER_COUNT; ++i)
+ {
+ int *tip = svn_priority_queue__peek(queue);
+ *tip += 4;
+ svn_priority_queue__update(queue);
+
+ /* extract and verify tip */
+ SVN_TEST_ASSERT(*(int *)svn_priority_queue__peek(queue)
+ == expected_modified[i]);
+ svn_priority_queue__pop(queue);
+
+ /* this should be a no-op now */
+ svn_priority_queue__update(queue);
+
+ SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i-1);
+ }
+
+ /* the queue should now be empty */
+ verify_empty_queue(queue);
+
+ return SVN_NO_ERROR;
+}
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_empty_queue,
+ "test empty queue"),
+ SVN_TEST_PASS2(test_sort_queue,
+ "data returned by a priority queue shall be ordered"),
+ SVN_TEST_PASS2(test_push,
+ "priority queues can be built up incrementally"),
+ SVN_TEST_PASS2(test_update,
+ "updating the head of the queue"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/revision-test.c b/subversion/tests/libsvn_subr/revision-test.c
new file mode 100644
index 0000000..53ca8da
--- /dev/null
+++ b/subversion/tests/libsvn_subr/revision-test.c
@@ -0,0 +1,136 @@
+/*
+ * revision-test.c -- test the revision functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_types.h"
+
+#include "../svn_test.h"
+
+static svn_error_t *
+test_revnum_parse(apr_pool_t *pool)
+{
+ const char **t;
+
+ const char *failure_tests[] = {
+ "",
+ "abc",
+ "-456",
+ "2147483648",
+ "4294967295",
+ "4300000000",
+ "00000000001",
+ "21474836470",
+ "999999999999999999999999",
+ NULL
+ };
+
+ const char *success_tests[] = {
+ "0",
+ "12345",
+ "12345ABC",
+ "0000000001",
+ "2147483647x",
+ NULL
+ };
+
+ /* These tests should succeed. */
+ for (t=success_tests; *t; ++t)
+ {
+ svn_revnum_t rev = -123;
+ const char *endptr;
+
+ /* Do one test with a NULL end pointer and then with non-NULL
+ pointer. */
+ SVN_ERR(svn_revnum_parse(&rev, *t, NULL));
+ SVN_ERR(svn_revnum_parse(&rev, *t, &endptr));
+
+ if (-123 == rev)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED,
+ NULL,
+ "svn_revnum_parse('%s') should change the revision for "
+ "a good string",
+ *t);
+
+ if (endptr == *t)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED,
+ NULL,
+ "End pointer for svn_revnum_parse('%s') should not "
+ "point to the start of the string",
+ *t);
+ }
+
+ /* These tests should fail. */
+ for (t=failure_tests; *t; ++t)
+ {
+ svn_revnum_t rev = -123;
+ const char *endptr;
+
+ /* Do one test with a NULL end pointer and then with non-NULL
+ pointer. */
+ svn_error_t *err = svn_revnum_parse(&rev, *t, NULL);
+ svn_error_clear(err);
+
+ err = svn_revnum_parse(&rev, *t, &endptr);
+ if (! err)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_revnum_parse('%s') succeeded when it should "
+ "have failed",
+ *t);
+ svn_error_clear(err);
+
+ if (-123 != rev)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED,
+ NULL,
+ "svn_revnum_parse('%s') should not change the revision "
+ "for a bad string",
+ *t);
+
+ if (endptr != *t)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED,
+ NULL,
+ "End pointer for svn_revnum_parse('%s') does not "
+ "point to the start of the string",
+ *t);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_revnum_parse,
+ "test svn_revnum_parse"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/root-pools-test.c b/subversion/tests/libsvn_subr/root-pools-test.c
new file mode 100644
index 0000000..8116418
--- /dev/null
+++ b/subversion/tests/libsvn_subr/root-pools-test.c
@@ -0,0 +1,137 @@
+/*
+ * root-pools-test.c -- test the svn_root_pools__* API
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+#include <apr_thread_proc.h>
+#include <apr_thread_cond.h>
+
+#include "private/svn_atomic.h"
+#include "private/svn_subr_private.h"
+
+#include "../svn_test.h"
+
+/* do a few allocations of various sizes from POOL */
+static void
+do_some_allocations(apr_pool_t *pool)
+{
+ int i;
+ apr_size_t fib = 1, fib1 = 0, fib2 = 0;
+ for (i = 0; i < 25; ++i) /* fib(25) = 75025 */
+ {
+ apr_pcalloc(pool, fib1);
+ fib2 = fib1;
+ fib1 = fib;
+ fib += fib2;
+ }
+}
+
+/* allocate, use and recycle a pool from POOLs a few times */
+static void
+use_root_pool(svn_root_pools__t *pools)
+{
+ int i;
+ for (i = 0; i < 1000; ++i)
+ {
+ apr_pool_t *pool = svn_root_pools__acquire_pool(pools);
+ do_some_allocations(pool);
+ svn_root_pools__release_pool(pool, pools);
+ }
+}
+
+#if APR_HAS_THREADS
+static void *
+APR_THREAD_FUNC thread_func(apr_thread_t *tid, void *data)
+{
+ /* give all threads a good chance to get started by the scheduler */
+ apr_thread_yield();
+
+ use_root_pool(data);
+ apr_thread_exit(tid, APR_SUCCESS);
+
+ return NULL;
+}
+#endif
+
+static svn_error_t *
+test_root_pool(apr_pool_t *pool)
+{
+ svn_root_pools__t *pools;
+ SVN_ERR(svn_root_pools__create(&pools));
+ use_root_pool(pools);
+
+ return SVN_NO_ERROR;
+}
+
+#define APR_ERR(expr) \
+ do { \
+ apr_status_t status = (expr); \
+ if (status) \
+ return svn_error_wrap_apr(status, NULL); \
+ } while (0)
+
+static svn_error_t *
+test_root_pool_concurrency(apr_pool_t *pool)
+{
+#if APR_HAS_THREADS
+ /* The svn_root_pools__t container is supposed to be thread-safe.
+ Do some multi-threaded access and hope that there are no segfaults.
+ */
+ enum { THREAD_COUNT = 10 };
+ svn_root_pools__t *pools;
+ apr_thread_t *threads[THREAD_COUNT];
+ int i;
+
+ SVN_ERR(svn_root_pools__create(&pools));
+
+ for (i = 0; i < THREAD_COUNT; ++i)
+ APR_ERR(apr_thread_create(&threads[i], NULL, thread_func, pools, pool));
+
+ /* wait for the threads to finish */
+ for (i = 0; i < THREAD_COUNT; ++i)
+ {
+ apr_status_t retval;
+ APR_ERR(apr_thread_join(&retval, threads[i]));
+ APR_ERR(retval);
+ }
+#endif
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_root_pool,
+ "test root pool recycling"),
+ SVN_TEST_SKIP2(test_root_pool_concurrency,
+ ! APR_HAS_THREADS,
+ "test concurrent root pool recycling"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/skel-test.c b/subversion/tests/libsvn_subr/skel-test.c
new file mode 100644
index 0000000..9839e6a
--- /dev/null
+++ b/subversion/tests/libsvn_subr/skel-test.c
@@ -0,0 +1,909 @@
+/* skel-test.c --- tests for the skeleton functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <stdio.h>
+
+#include <apr.h>
+
+#include "svn_pools.h"
+#include "svn_string.h"
+#include "private/svn_skel.h"
+
+#include "../svn_test.h"
+#include "../svn_test_fs.h"
+
+
+/* Some utility functions. */
+
+
+/* A quick way to create error messages. */
+static svn_error_t *
+fail(apr_pool_t *pool, const char *fmt, ...)
+{
+ va_list ap;
+ char *msg;
+
+ va_start(ap, fmt);
+ msg = apr_pvsprintf(pool, fmt, ap);
+ va_end(ap);
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0, msg);
+}
+
+
+/* Free everything from pool, and return an empty Subversion string. */
+static svn_stringbuf_t *
+get_empty_string(apr_pool_t *pool)
+{
+ svn_pool_clear(pool);
+
+ return svn_stringbuf_create_empty(pool);
+}
+
+/* Parse a skeleton from a Subversion string. */
+static svn_skel_t *
+parse_str(svn_stringbuf_t *str, apr_pool_t *pool)
+{
+ return svn_skel__parse(str->data, str->len, pool);
+}
+
+
+/* Parse a skeleton from a C string. */
+static svn_skel_t *
+parse_cstr(const char *str, apr_pool_t *pool)
+{
+ return svn_skel__parse(str, strlen(str), pool);
+}
+
+
+enum char_type {
+ type_nothing = 0,
+ type_space = 1,
+ type_digit = 2,
+ type_paren = 3,
+ type_name = 4
+};
+
+static int skel_char_map_initialized;
+static enum char_type skel_char_map[256];
+
+static void
+init_char_types(void)
+{
+ int i;
+ const char *c;
+
+ if (skel_char_map_initialized)
+ return;
+
+ for (i = 0; i < 256; i++)
+ skel_char_map[i] = type_nothing;
+
+ for (i = '0'; i <= '9'; i++)
+ skel_char_map[i] = type_digit;
+
+ for (c = "\t\n\f\r "; *c; c++)
+ skel_char_map[(unsigned char) *c] = type_space;
+
+ for (c = "()[]"; *c; c++)
+ skel_char_map[(unsigned char) *c] = type_paren;
+
+ for (i = 'A'; i <= 'Z'; i++)
+ skel_char_map[i] = type_name;
+ for (i = 'a'; i <= 'z'; i++)
+ skel_char_map[i] = type_name;
+
+ skel_char_map_initialized = 1;
+}
+
+/* Return true iff BYTE is a whitespace byte. */
+static int
+skel_is_space(char byte)
+{
+ init_char_types();
+
+ return skel_char_map[(unsigned char) byte] == type_space;
+}
+
+#if 0
+/* Return true iff BYTE is a digit byte. */
+static int
+skel_is_digit(char byte)
+{
+ init_char_types();
+
+ return skel_char_map[(unsigned char) byte] == type_digit;
+}
+#endif
+
+/* Return true iff BYTE is a paren byte. */
+static int
+skel_is_paren(char byte)
+{
+ init_char_types();
+
+ return skel_char_map[(unsigned char) byte] == type_paren;
+}
+
+/* Return true iff BYTE is a name byte. */
+static int
+skel_is_name(char byte)
+{
+ init_char_types();
+
+ return skel_char_map[(unsigned char) byte] == type_name;
+}
+
+
+/* Check that SKEL is an atom, and its contents match LEN bytes of
+ DATA. */
+static int
+check_atom(svn_skel_t *skel, const char *data, apr_size_t len)
+{
+ return (skel
+ && skel->is_atom
+ && skel->len == len
+ && ! memcmp(skel->data, data, len));
+}
+
+
+/* Functions that generate/check interesting implicit-length atoms. */
+
+
+/* Append to STR an implicit-length atom consisting of the byte BYTE,
+ terminated by the character TERM. BYTE must be a name byte,
+ and TERM must be a valid skel separator, or NULL. */
+static void
+put_implicit_length_byte(svn_stringbuf_t *str, char byte, char term)
+{
+ if (! skel_is_name(byte))
+ abort();
+ if (term != '\0'
+ && ! skel_is_space(term)
+ && ! skel_is_paren(term))
+ abort();
+ svn_stringbuf_appendbyte(str, byte);
+ if (term != '\0')
+ svn_stringbuf_appendbyte(str, term);
+}
+
+
+/* Return true iff SKEL is the parsed form of the atom produced by
+ calling put_implicit_length with BYTE. */
+static int
+check_implicit_length_byte(svn_skel_t *skel, char byte)
+{
+ if (! skel_is_name(byte))
+ abort();
+
+ return check_atom(skel, &byte, 1);
+}
+
+
+/* Subroutine for the *_implicit_length_all_chars functions. */
+static char *
+gen_implicit_length_all_chars(apr_size_t *len_p)
+{
+ apr_size_t pos;
+ int i;
+ static char name[256];
+
+ /* Gotta start with a valid name character. */
+ pos = 0;
+ name[pos++] = 'x';
+ for (i = 0; i < 256; i++)
+ if (! skel_is_space( (apr_byte_t)i)
+ && ! skel_is_paren( (apr_byte_t)i))
+ name[pos++] = (char)i;
+
+ *len_p = pos;
+ return name;
+}
+
+
+/* Append to STR an implicit-length atom containing every character
+ that's legal in such atoms, terminated by the valid atom terminator
+ TERM. */
+static void
+put_implicit_length_all_chars(svn_stringbuf_t *str, char term)
+{
+ apr_size_t len;
+ char *name = gen_implicit_length_all_chars(&len);
+
+ if (term != '\0'
+ && ! skel_is_space(term)
+ && ! skel_is_paren(term))
+ abort();
+
+ svn_stringbuf_appendbytes(str, name, len);
+ if (term != '\0')
+ svn_stringbuf_appendbyte(str, term);
+}
+
+
+/* Return true iff SKEL is the parsed form of the atom produced by
+ calling put_implicit_length_all_chars. */
+static int
+check_implicit_length_all_chars(svn_skel_t *skel)
+{
+ apr_size_t len;
+ char *name = gen_implicit_length_all_chars(&len);
+
+ return check_atom(skel, name, len);
+}
+
+
+
+/* Test parsing of implicit-length atoms. */
+
+static svn_error_t *
+parse_implicit_length(apr_pool_t *pool)
+{
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+
+ /* Try all valid single-byte atoms. */
+ {
+ const char *c;
+ int i;
+
+ for (c = "\t\n\f\r ()[]"; *c; c++)
+ for (i = 0; i < 256; i++)
+ if (skel_is_name((apr_byte_t)i))
+ {
+ svn_stringbuf_setempty(str);
+ put_implicit_length_byte(str, (apr_byte_t)i, *c);
+ skel = parse_str(str, pool);
+ if (! check_implicit_length_byte(skel, (apr_byte_t)i))
+ return fail(pool, "single-byte implicit-length skel 0x%02x"
+ " with terminator 0x%02x",
+ i, c);
+ }
+ }
+
+ /* Try an atom that contains every character that's legal in an
+ implicit-length atom. */
+ svn_stringbuf_setempty(str);
+ put_implicit_length_all_chars(str, '\0');
+ skel = parse_str(str, pool);
+ if (! check_implicit_length_all_chars(skel))
+ return fail(pool, "implicit-length skel containing all legal chars");
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Functions that generate/check interesting explicit-length atoms. */
+
+
+/* Append to STR the representation of the atom containing the LEN
+ bytes at DATA, in explicit-length form, using SEP as the separator
+ between the length and the data. */
+static void
+put_explicit_length(svn_stringbuf_t *str,
+ const char *data,
+ apr_size_t len,
+ char sep)
+{
+ char *buf = malloc(len + 100);
+ apr_size_t length_len;
+
+ if (! skel_is_space(sep))
+ abort();
+
+ /* Generate the length and separator character. */
+ sprintf(buf, "%"APR_SIZE_T_FMT"%c", len, sep);
+ length_len = strlen(buf);
+
+ /* Copy in the real data (which may contain nulls). */
+ memcpy(buf + length_len, data, len);
+
+ svn_stringbuf_appendbytes(str, buf, length_len + len);
+ free(buf);
+}
+
+
+/* Return true iff SKEL is the parsed form of an atom generated by
+ put_explicit_length. */
+static int
+check_explicit_length(svn_skel_t *skel, const char *data, apr_size_t len)
+{
+ return check_atom(skel, data, len);
+}
+
+
+/* Test parsing of explicit-length atoms. */
+
+static svn_error_t *
+try_explicit_length(const char *data,
+ apr_size_t len,
+ apr_size_t check_len,
+ apr_pool_t *pool)
+{
+ int i;
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+
+ /* Try it with every possible separator character. */
+ for (i = 0; i < 256; i++)
+ if (skel_is_space( (apr_byte_t)i))
+ {
+ svn_stringbuf_setempty(str);
+ put_explicit_length(str, data, len, (apr_byte_t)i);
+ skel = parse_str(str, pool);
+ if (! check_explicit_length(skel, data, check_len))
+ return fail(pool, "failed to reparse explicit-length atom");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+parse_explicit_length(apr_pool_t *pool)
+{
+ /* Try to parse the empty atom. */
+ SVN_ERR(try_explicit_length("", 0, 0, pool));
+
+ /* Try to parse every one-character atom. */
+ {
+ int i;
+
+ for (i = 0; i < 256; i++)
+ {
+ char buf[1];
+
+ buf[0] = (char)i;
+ SVN_ERR(try_explicit_length(buf, 1, 1, pool));
+ }
+ }
+
+ /* Try to parse an atom containing every character. */
+ {
+ int i;
+ char data[256];
+
+ for (i = 0; i < 256; i++)
+ data[i] = (char)i;
+
+ SVN_ERR(try_explicit_length(data, 256, 256, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Test parsing of invalid atoms. */
+
+static struct invalid_atoms
+{
+ int type;
+ apr_size_t len;
+ const char *data;
+} invalid_atoms[] = { { 1, 1, "(" },
+ { 1, 1, ")" },
+ { 1, 1, "[" },
+ { 1, 1, "]" },
+ { 1, 1, " " },
+ { 1, 13, "Hello, World!" },
+ { 1, 8, "1mplicit" },
+
+ { 2, 2, "1" },
+ { 2, 1, "12" },
+
+ { 7, 0, NULL } };
+
+static svn_error_t *
+parse_invalid_atoms(apr_pool_t *pool)
+{
+ struct invalid_atoms *ia = invalid_atoms;
+
+ while (ia->type != 7)
+ {
+ if (ia->type == 1)
+ {
+ svn_skel_t *skel = parse_cstr(ia->data, pool);
+ if (check_atom(skel, ia->data, ia->len))
+ return fail(pool,
+ "failed to detect parsing error in '%s'", ia->data);
+ }
+ else
+ {
+ svn_error_t *err = try_explicit_length(ia->data, ia->len,
+ strlen(ia->data), pool);
+ if (err == SVN_NO_ERROR)
+ return fail(pool, "got wrong length in explicit-length atom");
+ svn_error_clear(err);
+ }
+
+ ia++;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Functions that generate/check interesting lists. */
+
+/* Append the start of a list to STR, using LEN bytes of the
+ whitespace character SPACE. */
+static void
+put_list_start(svn_stringbuf_t *str, char space, int len)
+{
+ int i;
+
+ if (len > 0 && ! skel_is_space(space))
+ abort();
+
+ svn_stringbuf_appendcstr(str, "(");
+ for (i = 0; i < len; i++)
+ svn_stringbuf_appendbyte(str, space);
+}
+
+
+/* Append the end of a list to STR, using LEN bytes of the
+ whitespace character SPACE. */
+static void
+put_list_end(svn_stringbuf_t *str, char space, int len)
+{
+ int i;
+
+ if (len > 0 && ! skel_is_space(space))
+ abort();
+
+ for (i = 0; i < len; i++)
+ svn_stringbuf_appendbyte(str, space);
+ svn_stringbuf_appendcstr(str, ")");
+}
+
+
+/* Return true iff SKEL is a list of length DESIRED_LEN. */
+static int
+check_list(svn_skel_t *skel, int desired_len)
+{
+ int len;
+ svn_skel_t *child;
+
+ if (! (skel
+ && ! skel->is_atom))
+ return 0;
+
+ len = 0;
+ for (child = skel->children; child; child = child->next)
+ len++;
+
+ return len == desired_len;
+}
+
+
+
+/* Parse lists. */
+
+static svn_error_t *
+parse_list(apr_pool_t *pool)
+{
+ {
+ /* Try lists of varying length. */
+ int list_len;
+
+ for (list_len = 0;
+ list_len < 30;
+ list_len < 4 ? list_len++ : (list_len *= 3))
+ {
+ /* Try lists with different separators. */
+ int sep;
+
+ for (sep = 0; sep < 256; sep++)
+ if (skel_is_space( (apr_byte_t)sep))
+ {
+ /* Try lists with different numbers of separator
+ characters between the elements. */
+ int sep_count;
+
+ for (sep_count = 0;
+ sep_count < 30;
+ sep_count < 4 ? sep_count++ : (sep_count *= 3))
+ {
+ /* Try various single-byte implicit-length atoms
+ for elements. */
+ int atom_byte;
+
+ for (atom_byte = 0; atom_byte < 256; atom_byte++)
+ if (skel_is_name( (apr_byte_t)atom_byte))
+ {
+ int i;
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+ svn_skel_t *child;
+
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ for (i = 0; i < list_len; i++)
+ put_implicit_length_byte(str,
+ (apr_byte_t)atom_byte,
+ (apr_byte_t)sep);
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+
+ skel = parse_str(str, pool);
+ if (! check_list(skel, list_len))
+ return fail(pool, "couldn't parse list");
+ for (child = skel->children;
+ child;
+ child = child->next)
+ if (! check_implicit_length_byte
+ (child, (apr_byte_t)atom_byte))
+ return fail(pool,
+ "list was reparsed incorrectly");
+ }
+
+ /* Try the atom containing every character that's
+ legal in an implicit-length atom as the element. */
+ {
+ int i;
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+ svn_skel_t *child;
+
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ for (i = 0; i < list_len; i++)
+ put_implicit_length_all_chars(str, (apr_byte_t)sep);
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+
+ skel = parse_str(str, pool);
+ if (! check_list(skel, list_len))
+ return fail(pool, "couldn't parse list");
+ for (child = skel->children;
+ child;
+ child = child->next)
+ if (! check_implicit_length_all_chars(child))
+ return fail(pool, "couldn't parse list");
+ }
+
+ /* Try using every one-byte explicit-length atom as
+ an element. */
+ for (atom_byte = 0; atom_byte < 256; atom_byte++)
+ {
+ int i;
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+ svn_skel_t *child;
+ char buf[1];
+
+ buf[0] = (char)atom_byte;
+
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ for (i = 0; i < list_len; i++)
+ put_explicit_length(str, buf, 1, (apr_byte_t)sep);
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+
+ skel = parse_str(str, pool);
+ if (! check_list(skel, list_len))
+ return fail(pool, "couldn't parse list");
+ for (child = skel->children;
+ child;
+ child = child->next)
+ if (! check_explicit_length(child, buf, 1))
+ return fail(pool, "list was reparsed incorrectly");
+ }
+
+ /* Try using an atom containing every character as
+ an element. */
+ {
+ int i;
+ svn_stringbuf_t *str = get_empty_string(pool);
+ svn_skel_t *skel;
+ svn_skel_t *child;
+ char data[256];
+
+ for (i = 0; i < 256; i++)
+ data[i] = (char)i;
+
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ for (i = 0; i < list_len; i++)
+ put_explicit_length(str, data, 256, (apr_byte_t)sep);
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+
+ skel = parse_str(str, pool);
+ if (! check_list(skel, list_len))
+ return fail(pool, "couldn't parse list");
+ for (child = skel->children;
+ child;
+ child = child->next)
+ if (! check_explicit_length(child, data, 256))
+ return fail(pool, "list was re-parsed incorrectly");
+ }
+ }
+ }
+ }
+ }
+
+ /* Try to parse some invalid lists. */
+ {
+ int sep;
+
+ /* Try different separators. */
+ for (sep = 0; sep < 256; sep++)
+ if (skel_is_space( (apr_byte_t)sep))
+ {
+ /* Try lists with different numbers of separator
+ characters between the elements. */
+ int sep_count;
+
+ for (sep_count = 0;
+ sep_count < 100;
+ sep_count < 10 ? sep_count++ : (sep_count *= 3))
+ {
+ svn_stringbuf_t *str;
+
+ /* A list with only a separator. */
+ str = get_empty_string(pool);
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ if (parse_str(str, pool))
+ return fail(pool, "failed to detect syntax error");
+
+ /* A list with only a terminator. */
+ str = get_empty_string(pool);
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+ if (parse_str(str, pool))
+ return fail(pool, "failed to detect syntax error");
+
+ /* A list containing an invalid element. */
+ str = get_empty_string(pool);
+ put_list_start(str, (apr_byte_t)sep, sep_count);
+ svn_stringbuf_appendcstr(str, "100 ");
+ put_list_end(str, (apr_byte_t)sep, sep_count);
+ if (parse_str(str, pool))
+ return fail(pool, "failed to detect invalid element");
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Building interesting skels. */
+
+/* Build an atom skel containing the LEN bytes at DATA. */
+static svn_skel_t *
+build_atom(apr_size_t len, char *data, apr_pool_t *pool)
+{
+ char *copy = apr_palloc(pool, len);
+ svn_skel_t *skel = apr_palloc(pool, sizeof(*skel));
+
+ memcpy(copy, data, len);
+ skel->is_atom = 1;
+ skel->len = len;
+ skel->data = copy;
+
+ return skel;
+}
+
+/* Build an empty list skel. */
+static svn_skel_t *
+empty(apr_pool_t *pool)
+{
+ svn_skel_t *skel = apr_palloc(pool, sizeof(*skel));
+
+ skel->is_atom = 0;
+ skel->children = 0;
+
+ return skel;
+}
+
+/* Stick ELEMENT at the beginning of the list skeleton LIST. */
+static void
+add(svn_skel_t *element, svn_skel_t *list)
+{
+ element->next = list->children;
+ list->children = element;
+}
+
+
+/* Return true if the contents of skel A are identical to those of
+ skel B. */
+static int
+skel_equal(svn_skel_t *a, svn_skel_t *b)
+{
+ if (a->is_atom != b->is_atom)
+ return 0;
+
+ if (a->is_atom)
+ return (a->len == b->len
+ && ! memcmp(a->data, b->data, a->len));
+ else
+ {
+ svn_skel_t *a_child, *b_child;
+
+ for (a_child = a->children, b_child = b->children;
+ a_child && b_child;
+ a_child = a_child->next, b_child = b_child->next)
+ if (! skel_equal(a_child, b_child))
+ return 0;
+
+ if (a_child || b_child)
+ return 0;
+ }
+
+ return 1;
+}
+
+
+/* Unparsing implicit-length atoms. */
+
+static svn_error_t *
+unparse_implicit_length(apr_pool_t *pool)
+{
+ /* Unparse and check every single-byte implicit-length atom. */
+ {
+ int byte;
+
+ for (byte = 0; byte < 256; byte++)
+ if (skel_is_name( (apr_byte_t)byte))
+ {
+ char buf = (char)byte;
+ svn_skel_t *skel = build_atom(1, &buf, pool);
+ svn_stringbuf_t *str = svn_skel__unparse(skel, pool);
+
+ if (! (str
+ && str->len == 1
+ && str->data[0] == (char)byte))
+ return fail(pool, "incorrectly unparsed single-byte "
+ "implicit-length atom");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Unparse some lists. */
+
+static svn_error_t *
+unparse_list(apr_pool_t *pool)
+{
+ /* Make a list of all the single-byte implicit-length atoms. */
+ {
+ svn_stringbuf_t *str;
+ int byte;
+ svn_skel_t *list = empty(pool);
+ svn_skel_t *reparsed, *elt;
+
+ for (byte = 0; byte < 256; byte++)
+ if (skel_is_name( (apr_byte_t)byte))
+ {
+ char buf = (char)byte;
+ add(build_atom(1, &buf, pool), list);
+ }
+
+ /* Unparse that, parse it again, and see if we got the same thing
+ back. */
+ str = svn_skel__unparse(list, pool);
+ reparsed = svn_skel__parse(str->data, str->len, pool);
+
+ if (! reparsed || reparsed->is_atom)
+ return fail(pool, "result is syntactically misformed, or not a list");
+
+ if (! skel_equal(list, reparsed))
+ return fail(pool, "unparsing and parsing didn't preserve contents");
+
+ elt = reparsed->children;
+ for (byte = 255; byte >= 0; byte--)
+ if (skel_is_name( (apr_byte_t)byte))
+ {
+ if (! (elt
+ && elt->is_atom
+ && elt->len == 1
+ && elt->data[0] == byte))
+ return fail(pool, "bad element");
+
+ /* Verify that each element's data falls within the string. */
+ if (elt->data < str->data
+ || elt->data + elt->len > str->data + str->len)
+ return fail(pool, "bad element");
+
+ elt = elt->next;
+ }
+
+ /* We should have reached the end of the list at this point. */
+ if (elt)
+ return fail(pool, "list too long");
+ }
+
+ /* Make a list of lists. */
+ {
+ svn_stringbuf_t *str;
+ svn_skel_t *top = empty(pool);
+ svn_skel_t *reparsed;
+ int i;
+
+ for (i = 0; i < 10; i++)
+ {
+ svn_skel_t *middle = empty(pool);
+ int j;
+
+ for (j = 0; j < 10; j++)
+ {
+ char buf[10];
+ apr_size_t k;
+ int val;
+
+ /* Make some interesting atom, containing lots of binary
+ characters. */
+ val = i * 10 + j;
+ for (k = 0; k < sizeof(buf); k++)
+ {
+ buf[k] = (char)val;
+ val += j;
+ }
+
+ add(build_atom(sizeof(buf), buf, pool), middle);
+ }
+
+ add(middle, top);
+ }
+
+ str = svn_skel__unparse(top, pool);
+ reparsed = svn_skel__parse(str->data, str->len, pool);
+
+ if (! skel_equal(top, reparsed))
+ return fail(pool, "failed to reparse list of lists");
+
+ reparsed = svn_skel__dup(reparsed, TRUE, pool);
+
+ if (! skel_equal(top, reparsed))
+ return fail(pool, "failed to dup list of lists");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(parse_implicit_length,
+ "parse implicit-length atoms"),
+ SVN_TEST_PASS2(parse_explicit_length,
+ "parse explicit-length atoms"),
+ SVN_TEST_PASS2(parse_invalid_atoms,
+ "parse invalid atoms"),
+ SVN_TEST_PASS2(parse_list,
+ "parse lists"),
+ SVN_TEST_PASS2(unparse_implicit_length,
+ "unparse implicit-length atoms"),
+ SVN_TEST_PASS2(unparse_list,
+ "unparse lists"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/spillbuf-test.c b/subversion/tests/libsvn_subr/spillbuf-test.c
new file mode 100644
index 0000000..8d4e8b2
--- /dev/null
+++ b/subversion/tests/libsvn_subr/spillbuf-test.c
@@ -0,0 +1,595 @@
+/*
+ * spillbuf-test.c : test the spill buffer code
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_types.h"
+
+#include "private/svn_subr_private.h"
+
+#include "../svn_test.h"
+
+
+static const char basic_data[] = "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "0123456789";
+
+
+/* Validate that BUF is STARTING_SIZE in length. Then read some data from
+ the buffer, which should match EXPECTED. The EXPECTED value must be
+ NUL-terminated, but the NUL is not part of the expected/verified value. */
+#define CHECK_READ(b, s, e, p) SVN_ERR(check_read(b, s, e, p))
+static svn_error_t *
+check_read(svn_spillbuf_t *buf,
+ svn_filesize_t starting_size,
+ const char *expected,
+ apr_pool_t *scratch_pool)
+{
+ apr_size_t expected_len = strlen(expected);
+ const char *readptr;
+ apr_size_t readlen;
+
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == starting_size);
+ SVN_ERR(svn_spillbuf__read(&readptr, &readlen, buf, scratch_pool));
+ SVN_TEST_ASSERT(readptr != NULL
+ && readlen == expected_len
+ && memcmp(readptr, expected, expected_len) == 0);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_spillbuf__basic(apr_pool_t *pool, apr_size_t len, svn_spillbuf_t *buf)
+{
+ int i;
+ const char *readptr;
+ apr_size_t readlen;
+
+ /* It starts empty. */
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 0);
+
+ /* Place enough data into the buffer to cause a spill to disk. */
+ for (i = 20; i--; )
+ SVN_ERR(svn_spillbuf__write(buf, basic_data, len, pool));
+
+ /* And now has content. */
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) > 0);
+
+ /* Verify that we can read 20 copies of basic_data from the buffer. */
+ for (i = 20; i--; )
+ CHECK_READ(buf, (i + 1) * len, basic_data, pool);
+
+ /* And after precisely 20 reads, it should be empty. */
+ SVN_ERR(svn_spillbuf__read(&readptr, &readlen, buf, pool));
+ SVN_TEST_ASSERT(readptr == NULL);
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_basic(apr_pool_t *pool)
+{
+ apr_size_t len = strlen(basic_data); /* Don't include basic_data's NUL */
+ svn_spillbuf_t *buf = svn_spillbuf__create(len, 10 * len, pool);
+ return test_spillbuf__basic(pool, len, buf);
+}
+
+static svn_error_t *
+test_spillbuf_basic_spill_all(apr_pool_t *pool)
+{
+ apr_size_t len = strlen(basic_data); /* Don't include basic_data's NUL */
+ svn_spillbuf_t *buf =
+ svn_spillbuf__create_extended(len, 10 * len, TRUE, TRUE, NULL, pool);
+ return test_spillbuf__basic(pool, len, buf);
+}
+
+static svn_error_t *
+read_callback(svn_boolean_t *stop,
+ void *baton,
+ const char *data,
+ apr_size_t len,
+ apr_pool_t *scratch_pool)
+{
+ int *counter = baton;
+
+ SVN_TEST_ASSERT(len == sizeof(basic_data));
+ SVN_TEST_ASSERT(memcmp(data, basic_data, len) == 0);
+
+ *stop = (++*counter == 10);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_spillbuf__callback(apr_pool_t *pool, svn_spillbuf_t *buf)
+{
+ int i;
+ int counter;
+ svn_boolean_t exhausted;
+
+ /* Place enough data into the buffer to cause a spill to disk. */
+ for (i = 20; i--; )
+ SVN_ERR(svn_spillbuf__write(buf, basic_data, sizeof(basic_data), pool));
+
+ counter = 0;
+ SVN_ERR(svn_spillbuf__process(&exhausted, buf, read_callback, &counter,
+ pool));
+ SVN_TEST_ASSERT(!exhausted);
+
+ SVN_ERR(svn_spillbuf__process(&exhausted, buf, read_callback, &counter,
+ pool));
+ SVN_TEST_ASSERT(exhausted);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_callback(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(
+ sizeof(basic_data) /* blocksize */,
+ 10 * sizeof(basic_data) /* maxsize */,
+ pool);
+ return test_spillbuf__callback(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf_callback_spill_all(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ sizeof(basic_data) /* blocksize */,
+ 10 * sizeof(basic_data) /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__callback(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf__file(apr_pool_t *pool, apr_size_t altsize, svn_spillbuf_t *buf)
+{
+ int i;
+ const char *readptr;
+ apr_size_t readlen;
+ apr_size_t cur_index;
+
+ /* Place enough data into the buffer to cause a spill to disk. Note that
+ we are writing data that is *smaller* than the blocksize. */
+ for (i = 7; i--; )
+ SVN_ERR(svn_spillbuf__write(buf, basic_data, sizeof(basic_data), pool));
+
+ /* The first two reads will be in-memory blocks (the third write causes
+ the spill to disk). The spillbuf will pack the content into BLOCKSIZE
+ blocks. The second/last memory block will (thus) be a bit smaller. */
+ SVN_ERR(svn_spillbuf__read(&readptr, &readlen, buf, pool));
+ SVN_TEST_ASSERT(readptr != NULL);
+ SVN_TEST_ASSERT(readlen == altsize);
+ SVN_ERR(svn_spillbuf__read(&readptr, &readlen, buf, pool));
+ SVN_TEST_ASSERT(readptr != NULL);
+ /* The second write put sizeof(basic_data) into the buffer. A small
+ portion was stored at the end of the memblock holding the first write.
+ Thus, the size of this read will be the written data, minus that
+ slice written to the first block. */
+ SVN_TEST_ASSERT(readlen
+ == sizeof(basic_data) - (altsize - sizeof(basic_data)));
+
+ /* Current index into basic_data[] that we compare against. */
+ cur_index = 0;
+
+ while (TRUE)
+ {
+ /* This will read more bytes (from the spill file into a temporary
+ in-memory block) than the blocks of data that we wrote. This makes
+ it trickier to verify that the right data is being returned. */
+ SVN_ERR(svn_spillbuf__read(&readptr, &readlen, buf, pool));
+ if (readptr == NULL)
+ break;
+
+ while (TRUE)
+ {
+ apr_size_t amt;
+
+ /* Compute the slice of basic_data that we will compare against,
+ given the readlen and cur_index. */
+ if (cur_index + readlen >= sizeof(basic_data))
+ amt = sizeof(basic_data) - cur_index;
+ else
+ amt = readlen;
+ SVN_TEST_ASSERT(memcmp(readptr, &basic_data[cur_index], amt) == 0);
+ if ((cur_index += amt) == sizeof(basic_data))
+ cur_index = 0;
+ if ((readlen -= amt) == 0)
+ break;
+ readptr += amt;
+ }
+ }
+
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_file(apr_pool_t *pool)
+{
+ apr_size_t altsize = sizeof(basic_data) + 2;
+ svn_spillbuf_t *buf = svn_spillbuf__create(
+ altsize /* blocksize */,
+ 2 * sizeof(basic_data) /* maxsize */,
+ pool);
+ return test_spillbuf__file(pool, altsize, buf);
+}
+
+static svn_error_t *
+test_spillbuf_file_spill_all(apr_pool_t *pool)
+{
+ apr_size_t altsize = sizeof(basic_data) + 2;
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ altsize /* blocksize */,
+ 2 * sizeof(basic_data) /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__file(pool, altsize, buf);
+}
+
+static svn_error_t *
+test_spillbuf__interleaving(apr_pool_t *pool, svn_spillbuf_t* buf)
+{
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ /* now: two blocks: 8 and 4 bytes */
+
+ CHECK_READ(buf, 12, "abcdefgh", pool);
+ /* now: one block: 4 bytes */
+
+ SVN_ERR(svn_spillbuf__write(buf, "mnopqr", 6, pool));
+ /* now: two blocks: 8 and 2 bytes */
+
+ CHECK_READ(buf, 10, "ijklmnop", pool);
+ /* now: one block: 2 bytes */
+
+ SVN_ERR(svn_spillbuf__write(buf, "stuvwx", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ABCDEF", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "GHIJKL", 6, pool));
+ /* now: two blocks: 8 and 6 bytes, and 6 bytes spilled to a file */
+
+ CHECK_READ(buf, 20, "qrstuvwx", pool);
+ CHECK_READ(buf, 12, "ABCDEF", pool);
+ CHECK_READ(buf, 6, "GHIJKL", pool);
+
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_interleaving(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(8 /* blocksize */,
+ 15 /* maxsize */,
+ pool);
+ return test_spillbuf__interleaving(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf_interleaving_spill_all(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ 8 /* blocksize */,
+ 15 /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__interleaving(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf_reader(apr_pool_t *pool)
+{
+ svn_spillbuf_reader_t *sbr = svn_spillbuf__reader_create(4 /* blocksize */,
+ 100 /* maxsize */,
+ pool);
+ apr_size_t amt;
+ char buf[10];
+
+ SVN_ERR(svn_spillbuf__reader_write(sbr, "abcdef", 6, pool));
+
+ /* Get a buffer from the underlying reader, and grab a couple bytes. */
+ SVN_ERR(svn_spillbuf__reader_read(&amt, sbr, buf, 2, pool));
+ SVN_TEST_ASSERT(amt == 2 && memcmp(buf, "ab", 2) == 0);
+
+ /* Trigger the internal "save" feature of the SBR. */
+ SVN_ERR(svn_spillbuf__reader_write(sbr, "ghijkl", 6, pool));
+
+ /* Read from the save buffer, and from the internal blocks. */
+ SVN_ERR(svn_spillbuf__reader_read(&amt, sbr, buf, 10, pool));
+ SVN_TEST_ASSERT(amt == 10 && memcmp(buf, "cdefghijkl", 10) == 0);
+
+ /* Should be done. */
+ SVN_ERR(svn_spillbuf__reader_read(&amt, sbr, buf, 10, pool));
+ SVN_TEST_ASSERT(amt == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_stream(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */,
+ 100 /* maxsize */,
+ pool);
+ svn_stream_t *stream = svn_stream__from_spillbuf(buf, pool);
+ char readbuf[256];
+ apr_size_t readlen;
+ apr_size_t writelen;
+
+ writelen = 6;
+ SVN_ERR(svn_stream_write(stream, "abcdef", &writelen));
+ SVN_ERR(svn_stream_write(stream, "ghijkl", &writelen));
+ /* now: two blocks: 8 and 4 bytes */
+
+ readlen = 8;
+ SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen));
+ SVN_TEST_ASSERT(readlen == 8
+ && memcmp(readbuf, "abcdefgh", 8) == 0);
+ /* now: one block: 4 bytes */
+
+ SVN_ERR(svn_stream_write(stream, "mnopqr", &writelen));
+ /* now: two blocks: 8 and 2 bytes */
+
+ SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen));
+ SVN_TEST_ASSERT(readlen == 8
+ && memcmp(readbuf, "ijklmnop", 8) == 0);
+ /* now: one block: 2 bytes */
+
+ SVN_ERR(svn_stream_write(stream, "stuvwx", &writelen));
+ SVN_ERR(svn_stream_write(stream, "ABCDEF", &writelen));
+ SVN_ERR(svn_stream_write(stream, "GHIJKL", &writelen));
+ /* now: two blocks: 8 and 6 bytes, and 6 bytes spilled to a file */
+
+ SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen));
+ SVN_TEST_ASSERT(readlen == 8
+ && memcmp(readbuf, "qrstuvwx", 8) == 0);
+ readlen = 6;
+ SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen));
+ SVN_TEST_ASSERT(readlen == 6
+ && memcmp(readbuf, "ABCDEF", 6) == 0);
+ SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen));
+ SVN_TEST_ASSERT(readlen == 6
+ && memcmp(readbuf, "GHIJKL", 6) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf__rwfile(apr_pool_t *pool, svn_spillbuf_t *buf)
+{
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "mnopqr", 6, pool));
+ /* now: two blocks: 4 and 2 bytes, and 12 bytes in spill file. */
+
+ CHECK_READ(buf, 18, "abcd", pool);
+ /* now: one block: 2 bytes, and 12 bytes in spill file. */
+
+ CHECK_READ(buf, 14, "ef", pool);
+ /* now: no blocks, and 12 bytes in spill file. */
+
+ CHECK_READ(buf, 12, "ghij", pool);
+ /* now: no blocks, and 8 bytes in spill file. */
+
+ /* Write more data. It should be appended to the spill file. */
+ SVN_ERR(svn_spillbuf__write(buf, "stuvwx", 6, pool));
+ /* now: no blocks, and 14 bytes in spill file. */
+
+ CHECK_READ(buf, 14, "klmn", pool);
+ /* now: no blocks, and 10 bytes in spill file. */
+
+ CHECK_READ(buf, 10, "opqr", pool);
+ /* now: no blocks, and 6 bytes in spill file. */
+
+ CHECK_READ(buf, 6, "stuv", pool);
+ /* now: no blocks, and 2 bytes in spill file. */
+
+ CHECK_READ(buf, 2, "wx", pool);
+ /* now: no blocks, and no spill file. */
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_rwfile(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */,
+ 10 /* maxsize */,
+ pool);
+ return test_spillbuf__rwfile(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf_rwfile_spill_all(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ 4 /* blocksize */,
+ 10 /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__rwfile(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf__eof(apr_pool_t *pool, svn_spillbuf_t *buf)
+{
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ /* now: two blocks: 4 and 2 bytes, and 6 bytes in spill file. */
+
+ CHECK_READ(buf, 12, "abcd", pool);
+ CHECK_READ(buf, 8, "ef", pool);
+ CHECK_READ(buf, 6, "ghij", pool);
+ CHECK_READ(buf, 2, "kl", pool);
+ /* The spill file should have been emptied and forgotten. */
+
+ /* Assuming the spill file has been forgotten, this should result in
+ precisely the same behavior. Specifically: the initial write should
+ create two blocks, and the second write should be spilled. If there
+ *was* a spill file, then this written data would go into the file. */
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ CHECK_READ(buf, 12, "abcd", pool);
+ CHECK_READ(buf, 8, "ef", pool);
+ CHECK_READ(buf, 6, "ghij", pool);
+ CHECK_READ(buf, 2, "kl", pool);
+ /* The spill file should have been emptied and forgotten. */
+
+ /* Now, let's do a sequence where we arrange to hit EOF precisely on
+ a block-sized read. Note: the second write must be more than 4 bytes,
+ or it will not cause a spill. We use 8 to get the right boundary. */
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijklmn", 8, pool));
+ CHECK_READ(buf, 14, "abcd", pool);
+ CHECK_READ(buf, 10, "ef", pool);
+ CHECK_READ(buf, 8, "ghij", pool);
+ CHECK_READ(buf, 4, "klmn", pool);
+ /* We discard the spill file when we know it has no data, rather than
+ upon hitting EOF (upon a read attempt). Thus, the spill file should
+ be gone. */
+
+ /* Verify the forgotten spill file. */
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ CHECK_READ(buf, 12, "abcd", pool);
+ CHECK_READ(buf, 8, "ef", pool);
+ CHECK_READ(buf, 6, "ghij", pool);
+ /* Two unread bytes remaining in the spill file. */
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 2);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_eof(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */,
+ 10 /* maxsize */,
+ pool);
+ return test_spillbuf__eof(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf_eof_spill_all(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ 4 /* blocksize */,
+ 10 /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__eof(pool, buf);
+}
+
+static svn_error_t *
+test_spillbuf__file_attrs(apr_pool_t *pool, svn_boolean_t spill_all,
+ svn_spillbuf_t *buf)
+{
+ svn_filesize_t filesize;
+
+ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool));
+ SVN_ERR(svn_spillbuf__write(buf, "mnopqr", 6, pool));
+
+ /* Check that the spillbuf size is what we expect it to be */
+ SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 18);
+
+ /* Check file existence */
+ SVN_TEST_ASSERT(svn_spillbuf__get_filename(buf) != NULL);
+ SVN_TEST_ASSERT(svn_spillbuf__get_file(buf) != NULL);
+
+ /* The size of the file must match expectations */
+ SVN_ERR(svn_io_file_size_get(&filesize, svn_spillbuf__get_file(buf), pool));
+ if (spill_all)
+ SVN_TEST_ASSERT(filesize == svn_spillbuf__get_size(buf));
+ else
+ SVN_TEST_ASSERT(filesize == (svn_spillbuf__get_size(buf)
+ - svn_spillbuf__get_memory_size(buf)));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_spillbuf_file_attrs(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */,
+ 10 /* maxsize */,
+ pool);
+ return test_spillbuf__file_attrs(pool, FALSE, buf);
+}
+
+static svn_error_t *
+test_spillbuf_file_attrs_spill_all(apr_pool_t *pool)
+{
+ svn_spillbuf_t *buf = svn_spillbuf__create_extended(
+ 4 /* blocksize */,
+ 10 /* maxsize */,
+ TRUE /* delte on close */,
+ TRUE /* spill all data */,
+ NULL, pool);
+ return test_spillbuf__file_attrs(pool, TRUE, buf);
+}
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_spillbuf_basic, "basic spill buffer test"),
+ SVN_TEST_PASS2(test_spillbuf_basic_spill_all,
+ "basic spill buffer test (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_callback, "spill buffer read callback"),
+ SVN_TEST_PASS2(test_spillbuf_callback_spill_all,
+ "spill buffer read callback (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_file, "spill buffer file test"),
+ SVN_TEST_PASS2(test_spillbuf_file_spill_all,
+ "spill buffer file test (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_interleaving,
+ "interleaving reads and writes"),
+ SVN_TEST_PASS2(test_spillbuf_interleaving_spill_all,
+ "interleaving reads and writes (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_reader, "spill buffer reader test"),
+ SVN_TEST_PASS2(test_spillbuf_stream, "spill buffer stream test"),
+ SVN_TEST_PASS2(test_spillbuf_rwfile, "read/write spill file"),
+ SVN_TEST_PASS2(test_spillbuf_rwfile_spill_all,
+ "read/write spill file (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_eof, "validate reaching EOF of spill file"),
+ SVN_TEST_PASS2(test_spillbuf_eof_spill_all,
+ "validate reaching EOF (spill-all-data)"),
+ SVN_TEST_PASS2(test_spillbuf_file_attrs, "check spill file properties"),
+ SVN_TEST_PASS2(test_spillbuf_file_attrs_spill_all,
+ "check spill file properties (spill-all-data)"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/sqlite-test.c b/subversion/tests/libsvn_subr/sqlite-test.c
new file mode 100644
index 0000000..f44aa8d
--- /dev/null
+++ b/subversion/tests/libsvn_subr/sqlite-test.c
@@ -0,0 +1,186 @@
+/*
+ * sqlite-test.c -- test the stream functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "private/svn_sqlite.h"
+#include "../svn_test.h"
+
+static svn_error_t *
+open_db(svn_sqlite__db_t **sdb,
+ const char **db_abspath_p,
+ const char *db_name,
+ const char *const *statements,
+ apr_int32_t timeout,
+ apr_pool_t *pool)
+{
+ const char *db_dir, *db_abspath;
+
+ SVN_ERR(svn_dirent_get_absolute(&db_dir, "sqlite-test-tmp", pool));
+ SVN_ERR(svn_io_remove_dir2(db_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_make_dir_recursively(db_dir, pool));
+ svn_test_add_dir_cleanup(db_dir);
+
+ db_abspath = svn_dirent_join(db_dir, db_name, pool);
+
+ SVN_ERR(svn_sqlite__open(sdb, db_abspath, svn_sqlite__mode_rwcreate,
+ statements, 0, NULL, timeout, pool, pool));
+
+ if (db_abspath_p)
+ *db_abspath_p = db_abspath;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+error_second(svn_sqlite__context_t *sctx,
+ int argc,
+ svn_sqlite__value_t *values[],
+ void *baton)
+{
+ static int i = 0;
+
+ if (++i == 2)
+ svn_sqlite__result_error(sctx, "fake error", 0);
+ else
+ svn_sqlite__result_int64(sctx, 1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_sqlite_reset(apr_pool_t *pool)
+{
+ svn_sqlite__db_t *sdb;
+ svn_sqlite__stmt_t *stmt;
+ svn_boolean_t have_row;
+ const char *value;
+
+ static const char *const statements[] = {
+ "CREATE TABLE reset ("
+ " one TEXT NOT NULL PRIMARY KEY,"
+ " two TEXT"
+ ");"
+ "INSERT INTO reset(one, two) VALUES ('foo', 'bar');"
+ "INSERT INTO reset(one, two) VALUES ('zig', 'zag')",
+
+ "SELECT one FROM reset WHERE two IS NOT NULL AND error_second(one) "
+ "ORDER BY one",
+
+ NULL
+ };
+
+ SVN_ERR(open_db(&sdb, NULL, "reset", statements, 0, pool));
+ SVN_ERR(svn_sqlite__create_scalar_function(sdb, "error_second",
+ 1, FALSE /* deterministic */,
+ error_second, NULL));
+ SVN_ERR(svn_sqlite__exec_statements(sdb, 0));
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 1));
+
+ /* First step is OK. */
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ SVN_TEST_ASSERT(have_row);
+ value = svn_sqlite__column_text(stmt, 0, NULL);
+ SVN_TEST_ASSERT(value && !strcmp(value, "foo"));
+
+ /* Second step fails. */
+ SVN_TEST_ASSERT_ERROR(svn_sqlite__step(&have_row, stmt),
+ SVN_ERR_SQLITE_ERROR);
+
+ /* The svn_sqlite__step wrapper calls svn_sqlite__reset when step
+ fails so the reset call here is a no-op. The first step can be
+ repeated. */
+ SVN_ERR(svn_sqlite__reset(stmt));
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ SVN_TEST_ASSERT(have_row);
+ value = svn_sqlite__column_text(stmt, 0, NULL);
+ SVN_TEST_ASSERT(value && !strcmp(value, "foo"));
+ SVN_ERR(svn_sqlite__reset(stmt));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_sqlite_txn_commit_busy(apr_pool_t *pool)
+{
+ svn_sqlite__db_t *sdb1;
+ svn_sqlite__db_t *sdb2;
+ const char *db_abspath;
+ svn_error_t *err;
+
+ static const char *const statements[] = {
+ "CREATE TABLE test (one TEXT NOT NULL PRIMARY KEY)",
+
+ "INSERT INTO test(one) VALUES ('foo')",
+
+ "SELECT one from test",
+
+ NULL
+ };
+
+ /* Open two db connections.
+
+ Use a small busy_timeout of 250ms, since we're about to receive an
+ SVN_ERR_SQLITE_BUSY error, and retrying for the default 10 seconds
+ would be a waste of time. */
+ SVN_ERR(open_db(&sdb1, &db_abspath, "txn_commit_busy",
+ statements, 250, pool));
+ SVN_ERR(svn_sqlite__open(&sdb2, db_abspath, svn_sqlite__mode_readwrite,
+ statements, 0, NULL, 250, pool, pool));
+ SVN_ERR(svn_sqlite__exec_statements(sdb1, 0));
+
+ /* Begin two deferred transactions. */
+ SVN_ERR(svn_sqlite__begin_transaction(sdb1));
+ SVN_ERR(svn_sqlite__exec_statements(sdb1, 1 /* INSERT */));
+ SVN_ERR(svn_sqlite__begin_transaction(sdb2));
+ SVN_ERR(svn_sqlite__exec_statements(sdb2, 2 /* SELECT */));
+
+ /* Try to COMMIT the first write transaction; this should fail due to
+ the concurrent read transaction that holds a shared lock on the db. */
+ err = svn_sqlite__finish_transaction(sdb1, SVN_NO_ERROR);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_SQLITE_BUSY);
+
+ /* We failed to COMMIT the first transaction, but COMMIT-ting the
+ second transaction through a different db connection should succeed.
+ Upgrade it to a write transaction by executing the INSERT statement,
+ and then commit. */
+ SVN_ERR(svn_sqlite__exec_statements(sdb2, 1 /* INSERT */));
+ SVN_ERR(svn_sqlite__finish_transaction(sdb2, SVN_NO_ERROR));
+
+ SVN_ERR(svn_sqlite__close(sdb2));
+ SVN_ERR(svn_sqlite__close(sdb1));
+
+ return SVN_NO_ERROR;
+}
+
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_sqlite_reset,
+ "sqlite reset"),
+ SVN_TEST_PASS2(test_sqlite_txn_commit_busy,
+ "sqlite busy on transaction commit"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/stream-test.c b/subversion/tests/libsvn_subr/stream-test.c
new file mode 100644
index 0000000..0083d41
--- /dev/null
+++ b/subversion/tests/libsvn_subr/stream-test.c
@@ -0,0 +1,1043 @@
+/*
+ * stream-test.c -- test the stream functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include "svn_pools.h"
+#include "svn_io.h"
+#include "svn_subst.h"
+#include "svn_base64.h"
+#include <apr_general.h>
+
+#include "private/svn_io_private.h"
+
+#include "../svn_test.h"
+
+/*------------------------ Tests --------------------------- */
+
+static svn_error_t *
+test_stream_from_string(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+#define NUM_TEST_STRINGS 4
+#define TEST_BUF_SIZE 10
+
+ static const char * const strings[NUM_TEST_STRINGS] = {
+ /* 0 */
+ "",
+ /* 1 */
+ "This is a string.",
+ /* 2 */
+ "This is, by comparison to the previous string, a much longer string.",
+ /* 3 */
+ "And if you thought that last string was long, you just wait until "
+ "I'm finished here. I mean, how can a string really claim to be long "
+ "when it fits on a single line of 80-columns? Give me a break. "
+ "Now, I'm not saying that I'm the longest string out there--far from "
+ "it--but I feel that it is safe to assume that I'm far longer than my "
+ "peers. And that demands some amount of respect, wouldn't you say?"
+ };
+
+ /* Test svn_stream_from_stringbuf() as a readable stream. */
+ for (i = 0; i < NUM_TEST_STRINGS; i++)
+ {
+ svn_stream_t *stream;
+ char buffer[TEST_BUF_SIZE];
+ svn_stringbuf_t *inbuf, *outbuf;
+ apr_size_t len;
+
+ inbuf = svn_stringbuf_create(strings[i], subpool);
+ outbuf = svn_stringbuf_create_empty(subpool);
+ stream = svn_stream_from_stringbuf(inbuf, subpool);
+ len = TEST_BUF_SIZE;
+ while (len == TEST_BUF_SIZE)
+ {
+ /* Read a chunk ... */
+ SVN_ERR(svn_stream_read_full(stream, buffer, &len));
+
+ /* ... and append the chunk to the stringbuf. */
+ svn_stringbuf_appendbytes(outbuf, buffer, len);
+ }
+
+ if (! svn_stringbuf_compare(inbuf, outbuf))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected result.");
+
+ svn_pool_clear(subpool);
+ }
+
+ /* Test svn_stream_from_stringbuf() as a writable stream. */
+ for (i = 0; i < NUM_TEST_STRINGS; i++)
+ {
+ svn_stream_t *stream;
+ svn_stringbuf_t *inbuf, *outbuf;
+ apr_size_t amt_read, len;
+
+ inbuf = svn_stringbuf_create(strings[i], subpool);
+ outbuf = svn_stringbuf_create_empty(subpool);
+ stream = svn_stream_from_stringbuf(outbuf, subpool);
+ amt_read = 0;
+ while (amt_read < inbuf->len)
+ {
+ /* Write a chunk ... */
+ len = TEST_BUF_SIZE < (inbuf->len - amt_read)
+ ? TEST_BUF_SIZE
+ : inbuf->len - amt_read;
+ SVN_ERR(svn_stream_write(stream, inbuf->data + amt_read, &len));
+ amt_read += len;
+ }
+
+ if (! svn_stringbuf_compare(inbuf, outbuf))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected result.");
+
+ svn_pool_clear(subpool);
+ }
+
+#undef NUM_TEST_STRINGS
+#undef TEST_BUF_SIZE
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+/* generate some poorly compressable data */
+static svn_stringbuf_t *
+generate_test_bytes(int num_bytes, apr_pool_t *pool)
+{
+ svn_stringbuf_t *buffer = svn_stringbuf_create_empty(pool);
+ int total, repeat, repeat_iter;
+ char c;
+
+ for (total = 0, repeat = repeat_iter = 1, c = 0; total < num_bytes; total++)
+ {
+ svn_stringbuf_appendbyte(buffer, c);
+
+ repeat_iter--;
+ if (repeat_iter == 0)
+ {
+ if (c == 127)
+ repeat++;
+ c = (char)((c + 1) % 127);
+ repeat_iter = repeat;
+ }
+ }
+
+ return buffer;
+}
+
+
+static svn_error_t *
+test_stream_compressed(apr_pool_t *pool)
+{
+#define NUM_TEST_STRINGS 5
+#define TEST_BUF_SIZE 10
+#define GENERATED_SIZE 20000
+
+ int i;
+ svn_stringbuf_t *bufs[NUM_TEST_STRINGS];
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ static const char * const strings[NUM_TEST_STRINGS - 1] = {
+ /* 0 */
+ "",
+ /* 1 */
+ "This is a string.",
+ /* 2 */
+ "This is, by comparison to the previous string, a much longer string.",
+ /* 3 */
+ "And if you thought that last string was long, you just wait until "
+ "I'm finished here. I mean, how can a string really claim to be long "
+ "when it fits on a single line of 80-columns? Give me a break. "
+ "Now, I'm not saying that I'm the longest string out there--far from "
+ "it--but I feel that it is safe to assume that I'm far longer than my "
+ "peers. And that demands some amount of respect, wouldn't you say?"
+ };
+
+
+ for (i = 0; i < (NUM_TEST_STRINGS - 1); i++)
+ bufs[i] = svn_stringbuf_create(strings[i], pool);
+
+ /* the last buffer is for the generated data */
+ bufs[NUM_TEST_STRINGS - 1] = generate_test_bytes(GENERATED_SIZE, pool);
+
+ for (i = 0; i < NUM_TEST_STRINGS; i++)
+ {
+ svn_stream_t *stream;
+ svn_stringbuf_t *origbuf, *inbuf, *outbuf;
+ char buf[TEST_BUF_SIZE];
+ apr_size_t len;
+
+ origbuf = bufs[i];
+ inbuf = svn_stringbuf_create_empty(subpool);
+ outbuf = svn_stringbuf_create_empty(subpool);
+
+ stream = svn_stream_compressed(svn_stream_from_stringbuf(outbuf,
+ subpool),
+ subpool);
+ len = origbuf->len;
+ SVN_ERR(svn_stream_write(stream, origbuf->data, &len));
+ SVN_ERR(svn_stream_close(stream));
+
+ stream = svn_stream_compressed(svn_stream_from_stringbuf(outbuf,
+ subpool),
+ subpool);
+ len = TEST_BUF_SIZE;
+ while (len >= TEST_BUF_SIZE)
+ {
+ len = TEST_BUF_SIZE;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ if (len > 0)
+ svn_stringbuf_appendbytes(inbuf, buf, len);
+ }
+
+ if (! svn_stringbuf_compare(inbuf, origbuf))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected result.");
+
+ SVN_ERR(svn_stream_close(stream));
+
+ svn_pool_clear(subpool);
+ }
+
+#undef NUM_TEST_STRINGS
+#undef TEST_BUF_SIZE
+#undef GENERATED_SIZE
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_tee(apr_pool_t *pool)
+{
+ svn_stringbuf_t *test_bytes = generate_test_bytes(100, pool);
+ svn_stringbuf_t *output_buf1 = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *output_buf2 = svn_stringbuf_create_empty(pool);
+ svn_stream_t *source_stream = svn_stream_from_stringbuf(test_bytes, pool);
+ svn_stream_t *output_stream1 = svn_stream_from_stringbuf(output_buf1, pool);
+ svn_stream_t *output_stream2 = svn_stream_from_stringbuf(output_buf2, pool);
+ svn_stream_t *tee_stream;
+
+ tee_stream = svn_stream_tee(output_stream1, output_stream2, pool);
+ SVN_ERR(svn_stream_copy3(source_stream, tee_stream, NULL, NULL, pool));
+
+ if (!svn_stringbuf_compare(output_buf1, output_buf2))
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Duplicated streams did not match.");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_seek_file(apr_pool_t *pool)
+{
+ static const char *file_data[2] = {"One", "Two"};
+ svn_stream_t *stream;
+ svn_stringbuf_t *line;
+ svn_boolean_t eof;
+ apr_file_t *f;
+ static const char *fname = "test_stream_seek.txt";
+ int j;
+ apr_status_t status;
+ static const char *NL = APR_EOL_STR;
+ svn_stream_mark_t *mark;
+
+ status = apr_file_open(&f, fname, (APR_READ | APR_WRITE | APR_CREATE |
+ APR_TRUNCATE | APR_DELONCLOSE), APR_OS_DEFAULT, pool);
+ if (status != APR_SUCCESS)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "Cannot open '%s'",
+ fname);
+
+ /* Create the file. */
+ for (j = 0; j < 2; j++)
+ {
+ apr_size_t len;
+
+ len = strlen(file_data[j]);
+ status = apr_file_write(f, file_data[j], &len);
+ if (status || len != strlen(file_data[j]))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Cannot write to '%s'", fname);
+ len = strlen(NL);
+ status = apr_file_write(f, NL, &len);
+ if (status || len != strlen(NL))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Cannot write to '%s'", fname);
+ }
+
+ /* Create a stream to read from the file. */
+ stream = svn_stream_from_aprfile2(f, FALSE, pool);
+ SVN_ERR(svn_stream_reset(stream));
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(! eof && strcmp(line->data, file_data[0]) == 0);
+ /* Set a mark at the beginning of the second line of the file. */
+ SVN_ERR(svn_stream_mark(stream, &mark, pool));
+ /* Read the second line and then seek back to the mark. */
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(! eof && strcmp(line->data, file_data[1]) == 0);
+ SVN_ERR(svn_stream_seek(stream, mark));
+ /* The next read should return the second line again. */
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(! eof && strcmp(line->data, file_data[1]) == 0);
+ /* The next read should return EOF. */
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(eof);
+
+ /* Go back to the beginning of the last line and try to skip it
+ * NOT including the EOL. */
+ SVN_ERR(svn_stream_seek(stream, mark));
+ SVN_ERR(svn_stream_skip(stream, strlen(file_data[1])));
+ /* The remaining line should be empty */
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(! eof && strcmp(line->data, "") == 0);
+ /* The next read should return EOF. */
+ SVN_ERR(svn_stream_readline(stream, &line, NL, &eof, pool));
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_seek_stringbuf(apr_pool_t *pool)
+{
+ svn_stream_t *stream;
+ svn_stringbuf_t *stringbuf;
+ char buf[4];
+ apr_size_t len;
+ svn_stream_mark_t *mark;
+
+ stringbuf = svn_stringbuf_create("OneTwo", pool);
+ stream = svn_stream_from_stringbuf(stringbuf, pool);
+ len = 3;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ buf[3] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "One");
+ SVN_ERR(svn_stream_mark(stream, &mark, pool));
+ len = 3;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ buf[3] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "Two");
+ SVN_ERR(svn_stream_seek(stream, mark));
+ len = 3;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ buf[3] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "Two");
+
+ /* Go back to the begin of last word and try to skip some of it */
+ SVN_ERR(svn_stream_seek(stream, mark));
+ SVN_ERR(svn_stream_skip(stream, 2));
+ /* The remaining line should be empty */
+ len = 3;
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ buf[len] = '\0';
+ SVN_TEST_ASSERT(len == 1);
+ SVN_TEST_STRING_ASSERT(buf, "o");
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_seek_translated(apr_pool_t *pool)
+{
+ svn_stream_t *stream, *translated_stream;
+ svn_stringbuf_t *stringbuf;
+ char buf[44]; /* strlen("One$MyKeyword: my keyword was expanded $Two") + \0 */
+ apr_size_t len;
+ svn_stream_mark_t *mark;
+ apr_hash_t *keywords;
+ svn_string_t *keyword_val;
+
+ keywords = apr_hash_make(pool);
+ keyword_val = svn_string_create("my keyword was expanded", pool);
+ apr_hash_set(keywords, "MyKeyword", APR_HASH_KEY_STRING, keyword_val);
+ stringbuf = svn_stringbuf_create("One$MyKeyword$Two", pool);
+ stream = svn_stream_from_stringbuf(stringbuf, pool);
+ translated_stream = svn_subst_stream_translated(stream, APR_EOL_STR,
+ FALSE, keywords, TRUE, pool);
+ /* Seek from outside of keyword to inside of keyword. */
+ len = 25;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 25);
+ buf[25] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "One$MyKeyword: my keyword");
+ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool));
+ SVN_ERR(svn_stream_reset(translated_stream));
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ len = 4;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 4);
+ buf[4] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, " was");
+
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ SVN_ERR(svn_stream_skip(translated_stream, 2));
+ len = 2;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 2);
+ buf[len] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "as");
+
+ /* Seek from inside of keyword to inside of keyword. */
+ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool));
+ len = 9;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 9);
+ buf[9] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, " expanded");
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ len = 9;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 9);
+ buf[9] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, " expanded");
+
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ SVN_ERR(svn_stream_skip(translated_stream, 6));
+ len = 3;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 3);
+ buf[len] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "ded");
+
+ /* Seek from inside of keyword to outside of keyword. */
+ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool));
+ len = 4;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 4);
+ buf[4] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, " $Tw");
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ len = 4;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 4);
+ buf[4] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, " $Tw");
+
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ SVN_ERR(svn_stream_skip(translated_stream, 2));
+ len = 2;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 2);
+ buf[len] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "Tw");
+
+ /* Seek from outside of keyword to outside of keyword. */
+ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool));
+ len = 1;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 1);
+ buf[1] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "o");
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ len = 1;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 1);
+ buf[1] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "o");
+
+ SVN_ERR(svn_stream_seek(translated_stream, mark));
+ SVN_ERR(svn_stream_skip(translated_stream, 2));
+ len = 1;
+ SVN_ERR(svn_stream_read_full(translated_stream, buf, &len));
+ SVN_TEST_ASSERT(len == 0);
+ buf[len] = '\0';
+ SVN_TEST_STRING_ASSERT(buf, "");
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_readonly(apr_pool_t *pool)
+{
+ const char *path;
+ apr_finfo_t finfo;
+ svn_boolean_t read_only;
+ apr_int32_t wanted = APR_FINFO_SIZE | APR_FINFO_MTIME | APR_FINFO_TYPE
+ | APR_FINFO_LINK | APR_FINFO_PROT;
+
+
+ SVN_ERR(svn_io_open_unique_file3(NULL, &path, NULL,
+ svn_io_file_del_on_pool_cleanup,
+ pool, pool));
+
+ /* File should be writable */
+ SVN_ERR(svn_io_stat(&finfo, path, wanted, pool));
+ SVN_ERR(svn_io__is_finfo_read_only(&read_only, &finfo, pool));
+ SVN_TEST_ASSERT(!read_only);
+
+ /* Set read only */
+ SVN_ERR(svn_io_set_file_read_only(path, FALSE, pool));
+
+ /* File should be read only */
+ SVN_ERR(svn_io_stat(&finfo, path, wanted, pool));
+ SVN_ERR(svn_io__is_finfo_read_only(&read_only, &finfo, pool));
+ SVN_TEST_ASSERT(read_only);
+
+ /* Set writable */
+ SVN_ERR(svn_io_set_file_read_write(path, FALSE, pool));
+
+ /* File should be writable */
+ SVN_ERR(svn_io_stat(&finfo, path, wanted, pool));
+ SVN_ERR(svn_io__is_finfo_read_only(&read_only, &finfo, pool));
+ SVN_TEST_ASSERT(!read_only);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_compressed_empty_file(apr_pool_t *pool)
+{
+ svn_stream_t *stream, *empty_file_stream;
+ char buf[1];
+ apr_size_t len;
+
+ /* Reading an empty file with a compressed stream should not error. */
+ SVN_ERR(svn_stream_open_unique(&empty_file_stream, NULL, NULL,
+ svn_io_file_del_on_pool_cleanup,
+ pool, pool));
+ stream = svn_stream_compressed(empty_file_stream, pool);
+ len = sizeof(buf);
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ if (len > 0)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected result.");
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_base64(apr_pool_t *pool)
+{
+ svn_stream_t *stream;
+ svn_stringbuf_t *actual = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *expected = svn_stringbuf_create_empty(pool);
+ int i;
+ static const char *strings[] = {
+ "fairly boring test data... blah blah",
+ "A",
+ "abc",
+ "012345679",
+ NULL
+ };
+
+ /* Test svn_base64_encode2() with BREAK_LINES=FALSE. */
+ stream = svn_stream_from_stringbuf(actual, pool);
+ stream = svn_base64_decode(stream, pool);
+ stream = svn_base64_encode2(stream, TRUE, pool);
+
+ for (i = 0; strings[i]; i++)
+ {
+ apr_size_t len = strlen(strings[i]);
+
+ svn_stringbuf_appendbytes(expected, strings[i], len);
+ SVN_ERR(svn_stream_write(stream, strings[i], &len));
+ }
+
+ SVN_ERR(svn_stream_close(stream));
+
+ SVN_TEST_STRING_ASSERT(actual->data, expected->data);
+
+ /* Test svn_base64_encode2() with BREAK_LINES=FALSE. */
+ stream = svn_stream_from_stringbuf(actual, pool);
+ stream = svn_base64_decode(stream, pool);
+ stream = svn_base64_encode2(stream, FALSE, pool);
+
+ for (i = 0; strings[i]; i++)
+ {
+ apr_size_t len = strlen(strings[i]);
+
+ svn_stringbuf_appendbytes(expected, strings[i], len);
+ SVN_ERR(svn_stream_write(stream, strings[i], &len));
+ }
+
+ SVN_ERR(svn_stream_close(stream));
+
+ SVN_TEST_STRING_ASSERT(actual->data, expected->data);
+
+ return SVN_NO_ERROR;
+}
+
+/* This test doesn't test much unless run under valgrind when it
+ triggers the problem reported here:
+
+ http://mail-archives.apache.org/mod_mbox/subversion-dev/201202.mbox/%3C87sjik3m8q.fsf@stat.home.lan%3E
+
+ The two data writes caused the base 64 code to allocate a buffer
+ that was a byte short but exactly matched a stringbuf blocksize.
+ That meant the stringbuf didn't overallocate and a write beyond
+ the end of the buffer occurred.
+ */
+static svn_error_t *
+test_stream_base64_2(apr_pool_t *pool)
+{
+ const struct data_t {
+ const char *encoded1;
+ const char *encoded2;
+ } data[] = {
+ {
+ "MTI",
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "A23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "B23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "C23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "D23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "E23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "F23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "G23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "H23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "I23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D123456789E"
+ "623456789A123456789B123456789C123456789D123456789E"
+ "723456789A123456789B123456789C123456789D123456789E"
+ "823456789A123456789B123456789C123456789D123456789E"
+ "923456789A123456789B123456789C123456789D123456789E"
+ "J23456789A123456789B123456789C123456789D123456789E"
+ "123456789A123456789B123456789C123456789D123456789E"
+ "223456789A123456789B123456789C123456789D123456789E"
+ "323456789A123456789B123456789C123456789D123456789E"
+ "423456789A123456789B123456789C123456789D123456789E"
+ "523456789A123456789B123456789C123456789D12345"
+ },
+ {
+ NULL,
+ NULL,
+ },
+ };
+ int i;
+
+ /* Test svn_base64_encode2() with BREAK_LINES=TRUE. */
+ for (i = 0; data[i].encoded1; i++)
+ {
+ apr_size_t len1 = strlen(data[i].encoded1);
+
+ svn_stringbuf_t *actual = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *expected = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(actual, pool);
+
+ stream = svn_base64_encode2(stream, TRUE, pool);
+ stream = svn_base64_decode(stream, pool);
+
+ SVN_ERR(svn_stream_write(stream, data[i].encoded1, &len1));
+ svn_stringbuf_appendbytes(expected, data[i].encoded1, len1);
+
+ if (data[i].encoded2)
+ {
+ apr_size_t len2 = strlen(data[i].encoded2);
+ SVN_ERR(svn_stream_write(stream, data[i].encoded2, &len2));
+ svn_stringbuf_appendbytes(expected, data[i].encoded2, len2);
+ }
+
+ SVN_ERR(svn_stream_close(stream));
+ }
+
+ /* Test svn_base64_encode2() with BREAK_LINES=FALSE. */
+ for (i = 0; data[i].encoded1; i++)
+ {
+ apr_size_t len1 = strlen(data[i].encoded1);
+
+ svn_stringbuf_t *actual = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_t *expected = svn_stringbuf_create_empty(pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(actual, pool);
+
+ stream = svn_base64_encode2(stream, FALSE, pool);
+ stream = svn_base64_decode(stream, pool);
+
+ SVN_ERR(svn_stream_write(stream, data[i].encoded1, &len1));
+ svn_stringbuf_appendbytes(expected, data[i].encoded1, len1);
+
+ if (data[i].encoded2)
+ {
+ apr_size_t len2 = strlen(data[i].encoded2);
+ SVN_ERR(svn_stream_write(stream, data[i].encoded2, &len2));
+ svn_stringbuf_appendbytes(expected, data[i].encoded2, len2);
+ }
+
+ SVN_ERR(svn_stream_close(stream));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_from_stream(apr_pool_t *pool)
+{
+ const char *test_cases[] =
+ {
+ "",
+ "x",
+ "this string is longer than the default 64 minimum block size used"
+ "by the function under test",
+ NULL
+ };
+
+ const char **test_case;
+ for (test_case = test_cases; *test_case; ++test_case)
+ {
+ svn_stringbuf_t *result1, *result2, *result3, *result4;
+ svn_stringbuf_t *original = svn_stringbuf_create(*test_case, pool);
+
+ svn_stream_t *stream1 = svn_stream_from_stringbuf(original, pool);
+ svn_stream_t *stream2 = svn_stream_from_stringbuf(original, pool);
+
+ SVN_ERR(svn_stringbuf_from_stream(&result1, stream1, 0, pool));
+ SVN_ERR(svn_stringbuf_from_stream(&result2, stream1, 0, pool));
+ SVN_ERR(svn_stringbuf_from_stream(&result3, stream2, original->len,
+ pool));
+ SVN_ERR(svn_stringbuf_from_stream(&result4, stream2, original->len,
+ pool));
+
+ /* C-string contents must match */
+ SVN_TEST_STRING_ASSERT(result1->data, original->data);
+ SVN_TEST_STRING_ASSERT(result2->data, "");
+ SVN_TEST_STRING_ASSERT(result3->data, original->data);
+ SVN_TEST_STRING_ASSERT(result4->data, "");
+
+ /* assumed length must match */
+ SVN_TEST_ASSERT(result1->len == original->len);
+ SVN_TEST_ASSERT(result2->len == 0);
+ SVN_TEST_ASSERT(result3->len == original->len);
+ SVN_TEST_ASSERT(result4->len == 0);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+empty_read_full_fn(void *baton, char *buffer, apr_size_t *len)
+{
+ *len = 0;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_compressed_read_full(apr_pool_t *pool)
+{
+ svn_stream_t *stream, *empty_stream;
+ char buf[1];
+ apr_size_t len;
+
+ /* Reading an empty stream with read_full only support should not error. */
+ empty_stream = svn_stream_create(NULL, pool);
+
+ /* Create stream with only full read support. */
+ svn_stream_set_read2(empty_stream, NULL, empty_read_full_fn);
+
+ stream = svn_stream_compressed(empty_stream, pool);
+ len = sizeof(buf);
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+ if (len > 0)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected result.");
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_checksum(apr_pool_t *pool)
+{
+ svn_string_t *str =
+ svn_string_create("The quick brown fox jumps over the lazy dog", pool);
+ svn_checksum_t *actual;
+
+ SVN_ERR(svn_stream_contents_checksum(&actual,
+ svn_stream_from_string(str, pool),
+ svn_checksum_md5, pool, pool));
+ SVN_TEST_STRING_ASSERT("9e107d9d372bb6826bd81d3542a419d6",
+ svn_checksum_to_cstring(actual, pool));
+
+ SVN_ERR(svn_stream_contents_checksum(&actual,
+ svn_stream_from_string(str, pool),
+ svn_checksum_sha1, pool, pool));
+ SVN_TEST_STRING_ASSERT("2fd4e1c67a2d28fced849ee1bb76e7391b93eb12",
+ svn_checksum_to_cstring(actual, pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_readline_file(const char *testname,
+ const char *eol,
+ apr_pool_t *pool)
+{
+ const char *tmp_dir;
+ const char *tmp_file;
+ svn_stream_t *stream;
+ svn_stringbuf_t *line;
+ svn_boolean_t eof;
+ static const char long_line[] =
+ "The quick brown fox jumps over the lazy dog, and "
+ "jackdaws love my big sphinx of quartz, and "
+ "pack my box with five dozen liquor jugs.";
+
+ SVN_ERR(svn_dirent_get_absolute(&tmp_dir, testname, pool));
+ SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool));
+ svn_test_add_dir_cleanup(tmp_dir);
+
+ /* Test 1: Read empty file. */
+ tmp_file = svn_dirent_join(tmp_dir, "empty", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "", pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, tmp_file, pool, pool));
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Test 2: Read empty line. */
+ tmp_file = svn_dirent_join(tmp_dir, "empty-line", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, eol, pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, tmp_file, pool, pool));
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(!eof);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Test 3: Read two lines. */
+ tmp_file = svn_dirent_join(tmp_dir, "lines", pool);
+ SVN_ERR(svn_io_file_create(tmp_file,
+ apr_pstrcat(pool,
+ "first", eol, "second", eol,
+ SVN_VA_NULL),
+ pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, tmp_file, pool, pool));
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 5);
+ SVN_TEST_STRING_ASSERT(line->data, "first");
+ SVN_TEST_ASSERT(!eof);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 6);
+ SVN_TEST_STRING_ASSERT(line->data, "second");
+ SVN_TEST_ASSERT(!eof);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Test 4: Content without end-of-line. */
+ tmp_file = svn_dirent_join(tmp_dir, "no-eol", pool);
+ SVN_ERR(svn_io_file_create(tmp_file, "text", pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, tmp_file, pool, pool));
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 4);
+ SVN_TEST_STRING_ASSERT(line->data, "text");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ /* Test 5: Read long line. */
+ tmp_file = svn_dirent_join(tmp_dir, "long-line", pool);
+ SVN_ERR(svn_io_file_create(tmp_file,
+ apr_pstrcat(pool, long_line, eol, SVN_VA_NULL),
+ pool));
+ SVN_ERR(svn_stream_open_readonly(&stream, tmp_file, pool, pool));
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == strlen(long_line));
+ SVN_TEST_STRING_ASSERT(line->data, long_line);
+ SVN_TEST_ASSERT(!eof);
+
+ SVN_ERR(svn_stream_readline(stream, &line, eol, &eof, pool));
+ SVN_TEST_ASSERT(line->len == 0);
+ SVN_TEST_STRING_ASSERT(line->data, "");
+ SVN_TEST_ASSERT(eof);
+
+ SVN_ERR(svn_stream_close(stream));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_readline_file_lf(apr_pool_t *pool)
+{
+ SVN_ERR(test_stream_readline_file("test_stream_readline_file_lf",
+ "\n", pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stream_readline_file_crlf(apr_pool_t *pool)
+{
+ SVN_ERR(test_stream_readline_file("test_stream_readline_file_crlf",
+ "\r\n", pool));
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_stream_from_string,
+ "test svn_stream_from_string"),
+ SVN_TEST_PASS2(test_stream_compressed,
+ "test compressed streams"),
+ SVN_TEST_PASS2(test_stream_tee,
+ "test 'tee' streams"),
+ SVN_TEST_PASS2(test_stream_seek_file,
+ "test stream seeking for files"),
+ SVN_TEST_PASS2(test_stream_seek_stringbuf,
+ "test stream seeking for stringbufs"),
+ SVN_TEST_PASS2(test_stream_seek_translated,
+ "test stream seeking for translated streams"),
+ SVN_TEST_PASS2(test_readonly,
+ "test setting a file readonly"),
+ SVN_TEST_PASS2(test_stream_compressed_empty_file,
+ "test compressed streams with empty files"),
+ SVN_TEST_PASS2(test_stream_base64,
+ "test base64 encoding/decoding streams"),
+ SVN_TEST_PASS2(test_stream_base64_2,
+ "base64 decoding allocation problem"),
+ SVN_TEST_PASS2(test_stringbuf_from_stream,
+ "test svn_stringbuf_from_stream"),
+ SVN_TEST_PASS2(test_stream_compressed_read_full,
+ "test compression for streams without partial read"),
+ SVN_TEST_PASS2(test_stream_checksum,
+ "test svn_stream_contents_checksum()"),
+ SVN_TEST_PASS2(test_stream_readline_file_lf,
+ "test reading LF-terminated lines from file"),
+ SVN_TEST_PASS2(test_stream_readline_file_crlf,
+ "test reading CRLF-terminated lines from file"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/string-test.c b/subversion/tests/libsvn_subr/string-test.c
new file mode 100644
index 0000000..59b2910
--- /dev/null
+++ b/subversion/tests/libsvn_subr/string-test.c
@@ -0,0 +1,1157 @@
+/*
+ * string-test.c: a collection of libsvn_string tests
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ====================================================================
+ To add tests, look toward the bottom of this file.
+
+*/
+
+
+
+#include <stdio.h>
+#include <string.h>
+
+#include <apr_pools.h>
+#include <apr_file_io.h>
+
+#include "../svn_test.h"
+
+#include "svn_io.h"
+#include "svn_error.h"
+#include "svn_sorts.h" /* MIN / MAX */
+#include "svn_string.h" /* This includes <apr_*.h> */
+#include "private/svn_string_private.h"
+
+/* A quick way to create error messages. */
+static svn_error_t *
+fail(apr_pool_t *pool, const char *fmt, ...)
+{
+ va_list ap;
+ char *msg;
+
+ va_start(ap, fmt);
+ msg = apr_pvsprintf(pool, fmt, ap);
+ va_end(ap);
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0, msg);
+}
+
+
+/* Some of our own global variables, for simplicity. Yes,
+ simplicity. */
+static const char *phrase_1 = "hello, ";
+static const char *phrase_2 = "a longish phrase of sorts, longer than 16 anyway";
+
+
+
+
+static svn_error_t *
+test1(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+
+ /* Test that length, data, and null-termination are correct. */
+ if ((a->len == strlen(phrase_1)) && ((strcmp(a->data, phrase_1)) == 0))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test2(apr_pool_t *pool)
+{
+ svn_stringbuf_t *b = svn_stringbuf_ncreate(phrase_2, 16, pool);
+
+ /* Test that length, data, and null-termination are correct. */
+ if ((b->len == 16) && ((strncmp(b->data, phrase_2, 16)) == 0))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test3(apr_pool_t *pool)
+{
+ char *tmp;
+ size_t old_len;
+
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+ svn_stringbuf_t *b = svn_stringbuf_ncreate(phrase_2, 16, pool);
+
+ tmp = apr_palloc(pool, (a->len + b->len + 1));
+ strcpy(tmp, a->data);
+ strcat(tmp, b->data);
+ old_len = a->len;
+ svn_stringbuf_appendstr(a, b);
+
+ /* Test that length, data, and null-termination are correct. */
+ if ((a->len == (old_len + b->len)) && ((strcmp(a->data, tmp)) == 0))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test4(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+ svn_stringbuf_appendcstr(a, "new bytes to append");
+
+ /* Test that length, data, and null-termination are correct. */
+ if (svn_stringbuf_compare
+ (a, svn_stringbuf_create("hello, new bytes to append", pool)))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test5(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+ svn_stringbuf_appendbytes(a, "new bytes to append", 9);
+
+ /* Test that length, data, and null-termination are correct. */
+ if (svn_stringbuf_compare
+ (a, svn_stringbuf_create("hello, new bytes", pool)))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test6(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+ svn_stringbuf_t *b = svn_stringbuf_create(phrase_2, pool);
+ svn_stringbuf_t *c = svn_stringbuf_dup(a, pool);
+
+ /* Test that length, data, and null-termination are correct. */
+ if ((svn_stringbuf_compare(a, c)) && (! svn_stringbuf_compare(b, c)))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test7(apr_pool_t *pool)
+{
+ char *tmp;
+ size_t tmp_len;
+
+ svn_stringbuf_t *c = svn_stringbuf_create(phrase_2, pool);
+
+ tmp_len = c->len;
+ tmp = apr_palloc(pool, c->len + 1);
+ strcpy(tmp, c->data);
+
+ svn_stringbuf_chop(c, 11);
+
+ if ((c->len == (tmp_len - 11))
+ && (strncmp(tmp, c->data, c->len) == 0)
+ && (c->data[c->len] == '\0'))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test8(apr_pool_t *pool)
+{
+ svn_stringbuf_t *c = svn_stringbuf_create(phrase_2, pool);
+
+ svn_stringbuf_setempty(c);
+
+ if ((c->len == 0) && (c->data[0] == '\0'))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test9(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool);
+
+ svn_stringbuf_fillchar(a, '#');
+
+ if ((strcmp(a->data, "#######") == 0)
+ && ((strncmp(a->data, "############", a->len - 1)) == 0)
+ && (a->data[(a->len - 1)] == '#')
+ && (a->data[(a->len)] == '\0'))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+
+static svn_error_t *
+test10(apr_pool_t *pool)
+{
+ svn_stringbuf_t *s, *t;
+ size_t len_1 = 0;
+ size_t block_len_1 = 0;
+ size_t block_len_2 = 0;
+
+ s = svn_stringbuf_create("a small string", pool);
+ len_1 = (s->len);
+ block_len_1 = (s->blocksize);
+
+ t = svn_stringbuf_create(", plus a string more than twice as long", pool);
+ svn_stringbuf_appendstr(s, t);
+ block_len_2 = (s->blocksize);
+
+ /* Test that:
+ * - The initial block was at least the right fit.
+ * - The initial block was not excessively large.
+ * - The block more than doubled (because second string so long).
+ */
+ if ((len_1 <= (block_len_1 - 1))
+ && ((block_len_1 - len_1) <= APR_ALIGN_DEFAULT(1))
+ && ((block_len_2 / block_len_1) > 2))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+
+static svn_error_t *
+test11(apr_pool_t *pool)
+{
+ svn_stringbuf_t *s;
+
+ s = svn_stringbuf_createf(pool,
+ "This %s is used in test %d.",
+ "string",
+ 12);
+
+ if (strcmp(s->data, "This string is used in test 12.") == 0)
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+static svn_error_t *
+check_string_contents(svn_stringbuf_t *string,
+ const char *ftext,
+ apr_size_t ftext_len,
+ int repeat,
+ apr_pool_t *pool)
+{
+ const char *data;
+ apr_size_t len;
+ int i;
+
+ data = string->data;
+ len = string->len;
+ for (i = 0; i < repeat; ++i)
+ {
+ if (len < ftext_len || memcmp(ftext, data, ftext_len))
+ return fail(pool, "comparing failed");
+ data += ftext_len;
+ len -= ftext_len;
+ }
+ if (len < 1 || memcmp(data, "\0", 1))
+ return fail(pool, "comparing failed");
+ data += 1;
+ len -= 1;
+ for (i = 0; i < repeat; ++i)
+ {
+ if (len < ftext_len || memcmp(ftext, data, ftext_len))
+ return fail(pool, "comparing failed");
+ data += ftext_len;
+ len -= ftext_len;
+ }
+
+ if (len)
+ return fail(pool, "comparing failed");
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test12(apr_pool_t *pool)
+{
+ svn_stringbuf_t *s;
+ const char fname[] = "string-test.tmp";
+ apr_file_t *file;
+ apr_status_t status;
+ apr_size_t len;
+ int i, repeat;
+ const char ftext[] =
+ "Just some boring text. Avoiding newlines 'cos I don't know"
+ "if any of the Subversion platfoms will mangle them! There's no"
+ "need to test newline handling here anyway, it's not relevant.";
+
+ status = apr_file_open(&file, fname, APR_WRITE | APR_TRUNCATE | APR_CREATE,
+ APR_OS_DEFAULT, pool);
+ if (status)
+ return fail(pool, "opening file");
+
+ repeat = 100;
+
+ /* Some text */
+ for (i = 0; i < repeat; ++i)
+ {
+ status = apr_file_write_full(file, ftext, sizeof(ftext) - 1, &len);
+ if (status)
+ return fail(pool, "writing file");
+ }
+
+ /* A null byte, I don't *think* any of our platforms mangle these */
+ status = apr_file_write_full(file, "\0", 1, &len);
+ if (status)
+ return fail(pool, "writing file");
+
+ /* Some more text */
+ for (i = 0; i < repeat; ++i)
+ {
+ status = apr_file_write_full(file, ftext, sizeof(ftext) - 1, &len);
+ if (status)
+ return fail(pool, "writing file");
+ }
+
+ status = apr_file_close(file);
+ if (status)
+ return fail(pool, "closing file");
+
+ SVN_ERR(svn_stringbuf_from_file(&s, fname, pool));
+ SVN_ERR(check_string_contents(s, ftext, sizeof(ftext) - 1, repeat, pool));
+
+ /* Reset to avoid false positives */
+ s = NULL;
+
+ status = apr_file_open(&file, fname, APR_READ, APR_OS_DEFAULT, pool);
+ if (status)
+ return fail(pool, "opening file");
+
+ SVN_ERR(svn_stringbuf_from_aprfile(&s, file, pool));
+ SVN_ERR(check_string_contents(s, ftext, sizeof(ftext) - 1, repeat, pool));
+
+ status = apr_file_close(file);
+ if (status)
+ return fail(pool, "closing file");
+
+ status = apr_file_remove(fname, pool);
+ if (status)
+ return fail(pool, "removing file");
+
+ return SVN_NO_ERROR;
+}
+
+/* Helper function for checking correctness of find_char_backward */
+static svn_error_t *
+test_find_char_backward(const char* data,
+ apr_size_t len,
+ char ch,
+ apr_size_t pos,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ svn_stringbuf_t *a = svn_stringbuf_create(data, pool);
+ i = svn_stringbuf_find_char_backward(a, ch);
+
+ if (i == pos)
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+static svn_error_t *
+test13(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("test, test", pool);
+
+ return test_find_char_backward(a->data, a->len, ',', 4, pool);
+}
+
+static svn_error_t *
+test14(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(",test test", pool);
+
+ return test_find_char_backward(a->data, a->len, ',', 0, pool);
+}
+
+static svn_error_t *
+test15(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("testing,", pool);
+
+ return test_find_char_backward(a->data,
+ a->len,
+ ',',
+ a->len - 1,
+ pool);
+}
+
+static svn_error_t *
+test16(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create_empty(pool);
+
+ return test_find_char_backward(a->data, a->len, ',', 0, pool);
+}
+
+static svn_error_t *
+test17(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("test test test", pool);
+
+ return test_find_char_backward(a->data,
+ a->len,
+ ',',
+ a->len,
+ pool);
+}
+
+static svn_error_t *
+test_first_non_whitespace(const char *str,
+ const apr_size_t pos,
+ apr_pool_t *pool)
+{
+ apr_size_t i;
+
+ svn_stringbuf_t *a = svn_stringbuf_create(str, pool);
+
+ i = svn_stringbuf_first_non_whitespace(a);
+
+ if (i == pos)
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+static svn_error_t *
+test18(apr_pool_t *pool)
+{
+ return test_first_non_whitespace(" \ttest", 4, pool);
+}
+
+static svn_error_t *
+test19(apr_pool_t *pool)
+{
+ return test_first_non_whitespace("test", 0, pool);
+}
+
+static svn_error_t *
+test20(apr_pool_t *pool)
+{
+ return test_first_non_whitespace(" ", 3, pool);
+}
+
+static svn_error_t *
+test21(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(" \ttest\t\t \t ", pool);
+ svn_stringbuf_t *b = svn_stringbuf_create("test", pool);
+
+ svn_stringbuf_strip_whitespace(a);
+
+ if (svn_stringbuf_compare(a, b))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+static svn_error_t *
+test_stringbuf_unequal(const char* str1,
+ const char* str2,
+ apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create(str1, pool);
+ svn_stringbuf_t *b = svn_stringbuf_create(str2, pool);
+
+ if (svn_stringbuf_compare(a, b))
+ return fail(pool, "test failed");
+ else
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test22(apr_pool_t *pool)
+{
+ return test_stringbuf_unequal("abc", "abcd", pool);
+}
+
+static svn_error_t *
+test23(apr_pool_t *pool)
+{
+ return test_stringbuf_unequal("abc", "abb", pool);
+}
+
+static svn_error_t *
+test24(apr_pool_t *pool)
+{
+ char buffer[SVN_INT64_BUFFER_SIZE];
+ apr_size_t length;
+
+ length = svn__i64toa(buffer, 0);
+ SVN_TEST_ASSERT(length == 1);
+ SVN_TEST_STRING_ASSERT(buffer, "0");
+
+ length = svn__i64toa(buffer, APR_INT64_MIN);
+ SVN_TEST_ASSERT(length == 20);
+ SVN_TEST_STRING_ASSERT(buffer, "-9223372036854775808");
+
+ length = svn__i64toa(buffer, APR_INT64_MAX);
+ SVN_TEST_ASSERT(length == 19);
+ SVN_TEST_STRING_ASSERT(buffer, "9223372036854775807");
+
+ length = svn__ui64toa(buffer, 0u);
+ SVN_TEST_ASSERT(length == 1);
+ SVN_TEST_STRING_ASSERT(buffer, "0");
+
+ length = svn__ui64toa(buffer, APR_UINT64_MAX);
+ SVN_TEST_ASSERT(length == 20);
+ SVN_TEST_STRING_ASSERT(buffer, "18446744073709551615");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+sub_test_base36(apr_uint64_t value, const char *base36)
+{
+ char buffer[SVN_INT64_BUFFER_SIZE];
+ apr_size_t length;
+ apr_size_t expected_length = strlen(base36);
+ const char *end = buffer;
+ apr_uint64_t result;
+
+ length = svn__ui64tobase36(buffer, value);
+ SVN_TEST_ASSERT(length == expected_length);
+ SVN_TEST_STRING_ASSERT(buffer, base36);
+
+ result = svn__base36toui64(&end, buffer);
+ SVN_TEST_ASSERT(end - buffer == length);
+ SVN_TEST_ASSERT(result == value);
+
+ result = svn__base36toui64(NULL, buffer);
+ SVN_TEST_ASSERT(result == value);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_base36(apr_pool_t *pool)
+{
+ SVN_ERR(sub_test_base36(0, "0"));
+ SVN_ERR(sub_test_base36(APR_UINT64_C(1234567890), "kf12oi"));
+ SVN_ERR(sub_test_base36(APR_UINT64_C(0x7fffffffffffffff), "1y2p0ij32e8e7"));
+ SVN_ERR(sub_test_base36(APR_UINT64_C(0x8000000000000000), "1y2p0ij32e8e8"));
+ SVN_ERR(sub_test_base36(APR_UINT64_MAX, "3w5e11264sgsf"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+expect_stringbuf_equal(const svn_stringbuf_t* str1,
+ const char* str2,
+ apr_pool_t *pool)
+{
+ if (svn_stringbuf_compare(str1, svn_stringbuf_create(str2, pool)))
+ return SVN_NO_ERROR;
+ else
+ return fail(pool, "test failed");
+}
+
+static svn_error_t *
+test_stringbuf_insert(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("st , ", pool);
+
+ svn_stringbuf_insert(a, 0, "teflon", 2);
+ SVN_TEST_STRING_ASSERT(a->data, "test , ");
+
+ svn_stringbuf_insert(a, 5, "hllo", 4);
+ SVN_TEST_STRING_ASSERT(a->data, "test hllo, ");
+
+ svn_stringbuf_insert(a, 6, a->data + 1, 1);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, ");
+
+ svn_stringbuf_insert(a, 12, "world class", 5);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world");
+
+ svn_stringbuf_insert(a, 1200, "!", 1);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world!");
+
+ svn_stringbuf_insert(a, 4, "\0-\0", 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test\0-\0 hello, world!",
+ 21, pool)));
+
+ svn_stringbuf_insert(a, 14, a->data + 4, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test\0-\0 hello,\0-\0 world!",
+ 24, pool)));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_remove(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("test hello, world!", pool);
+
+ svn_stringbuf_remove(a, 0, 2);
+ SVN_TEST_STRING_ASSERT(a->data, "st hello, world!");
+
+ svn_stringbuf_remove(a, 2, 2);
+ SVN_TEST_STRING_ASSERT(a->data, "stello, world!");
+
+ svn_stringbuf_remove(a, 5, 200);
+ SVN_TEST_STRING_ASSERT(a->data, "stell");
+
+ svn_stringbuf_remove(a, 1200, 393);
+ SVN_ERR(expect_stringbuf_equal(a, "stell", pool));
+
+ svn_stringbuf_remove(a, APR_SIZE_MAX, 2);
+ SVN_ERR(expect_stringbuf_equal(a, "stell", pool));
+
+ svn_stringbuf_remove(a, 1, APR_SIZE_MAX);
+ SVN_ERR(expect_stringbuf_equal(a, "s", pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_replace(apr_pool_t *pool)
+{
+ svn_stringbuf_t *a = svn_stringbuf_create("odd with some world?", pool);
+
+ svn_stringbuf_replace(a, 0, 3, "tester", 4);
+ SVN_TEST_STRING_ASSERT(a->data, "test with some world?");
+
+ svn_stringbuf_replace(a, 5, 10, "hllo, coder", 6);
+ SVN_TEST_STRING_ASSERT(a->data, "test hllo, world?");
+
+ svn_stringbuf_replace(a, 6, 0, a->data + 1, 1);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world?");
+
+ svn_stringbuf_replace(a, 17, 10, "!", 1);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world!");
+
+ svn_stringbuf_replace(a, 1200, 199, "!!", 2);
+ SVN_TEST_STRING_ASSERT(a->data, "test hello, world!!!");
+
+ svn_stringbuf_replace(a, 10, 2, "\0-\0", 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!!!",
+ 21, pool)));
+
+ svn_stringbuf_replace(a, 10, 3, a->data + 10, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!!!",
+ 21, pool)));
+
+ svn_stringbuf_replace(a, 19, 1, a->data + 10, 3);
+ SVN_TEST_ASSERT(svn_stringbuf_compare(a,
+ svn_stringbuf_ncreate("test hello\0-\0world!\0-\0!",
+ 23, pool)));
+
+ svn_stringbuf_replace(a, 1, APR_SIZE_MAX, "x", 1);
+ SVN_ERR(expect_stringbuf_equal(a, "tx", pool));
+
+ svn_stringbuf_replace(a, APR_SIZE_MAX, APR_SIZE_MAX, "y", 1);
+ SVN_ERR(expect_stringbuf_equal(a, "txy", pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_string_similarity(apr_pool_t *pool)
+{
+ const struct sim_score_test_t
+ {
+ const char *stra;
+ const char *strb;
+ apr_size_t lcs;
+ unsigned int score;
+ } tests[] =
+ {
+#define SCORE(lcs, len) \
+ ((2 * SVN_STRING__SIM_RANGE_MAX * (lcs) + (len)/2) / (len))
+
+ /* Equality */
+ {"", "", 0, SVN_STRING__SIM_RANGE_MAX},
+ {"quoth", "quoth", 5, SCORE(5, 5+5)},
+
+ /* Deletion at start */
+ {"quoth", "uoth", 4, SCORE(4, 5+4)},
+ {"uoth", "quoth", 4, SCORE(4, 4+5)},
+
+ /* Deletion at end */
+ {"quoth", "quot", 4, SCORE(4, 5+4)},
+ {"quot", "quoth", 4, SCORE(4, 4+5)},
+
+ /* Insertion at start */
+ {"quoth", "Xquoth", 5, SCORE(5, 5+6)},
+ {"Xquoth", "quoth", 5, SCORE(5, 6+5)},
+
+ /* Insertion at end */
+ {"quoth", "quothX", 5, SCORE(5, 5+6)},
+ {"quothX", "quoth", 5, SCORE(5, 6+5)},
+
+ /* Insertion in middle */
+ {"quoth", "quoXth", 5, SCORE(5, 5+6)},
+ {"quoXth", "quoth", 5, SCORE(5, 6+5)},
+
+ /* Transposition at start */
+ {"quoth", "uqoth", 4, SCORE(4, 5+5)},
+ {"uqoth", "quoth", 4, SCORE(4, 5+5)},
+
+ /* Transposition at end */
+ {"quoth", "quoht", 4, SCORE(4, 5+5)},
+ {"quoht", "quoth", 4, SCORE(4, 5+5)},
+
+ /* Transposition in middle */
+ {"quoth", "qutoh", 4, SCORE(4, 5+5)},
+ {"qutoh", "quoth", 4, SCORE(4, 5+5)},
+
+ /* Difference */
+ {"quoth", "raven", 0, SCORE(0, 5+5)},
+ {"raven", "quoth", 0, SCORE(0, 5+5)},
+ {"x", "", 0, SCORE(0, 1+0)},
+ {"", "x", 0, SCORE(0, 0+1)},
+ {"", "quoth", 0, SCORE(0, 0+5)},
+ {"quoth", "", 0, SCORE(0, 5+0)},
+ {"quoth", "the raven", 2, SCORE(2, 5+9)},
+ {"the raven", "quoth", 2, SCORE(2, 5+9)},
+ {NULL, NULL}
+ };
+
+ const struct sim_score_test_t *t;
+ svn_membuf_t buffer;
+
+ svn_membuf__create(&buffer, 0, pool);
+ for (t = tests; t->stra; ++t)
+ {
+ apr_size_t lcs;
+ const apr_size_t score =
+ svn_cstring__similarity(t->stra, t->strb, &buffer, &lcs);
+ /*
+ fprintf(stderr,
+ "lcs %s ~ %s score %.6f (%"APR_SIZE_T_FMT
+ ") expected %.6f (%"APR_SIZE_T_FMT"))\n",
+ t->stra, t->strb, score/1.0/SVN_STRING__SIM_RANGE_MAX,
+ lcs, t->score/1.0/SVN_STRING__SIM_RANGE_MAX, t->lcs);
+ */
+ if (score != t->score)
+ return fail(pool, "%s ~ %s score %.6f <> expected %.6f",
+ t->stra, t->strb,
+ score/1.0/SVN_STRING__SIM_RANGE_MAX,
+ t->score/1.0/SVN_STRING__SIM_RANGE_MAX);
+
+ if (lcs != t->lcs)
+ return fail(pool,
+ "%s ~ %s lcs %"APR_SIZE_T_FMT
+ " <> expected %"APR_SIZE_T_FMT,
+ t->stra, t->strb, lcs, t->lcs);
+ }
+
+ /* Test partial similarity */
+ {
+ const svn_string_t foo = {"svn:foo", 4};
+ const svn_string_t bar = {"svn:bar", 4};
+ if (SVN_STRING__SIM_RANGE_MAX
+ != svn_string__similarity(&foo, &bar, &buffer, NULL))
+ return fail(pool, "'%s'[:4] ~ '%s'[:4] found different",
+ foo.data, bar.data);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_string_matching(apr_pool_t *pool)
+{
+ const struct test_data_t
+ {
+ const char *a;
+ const char *b;
+ apr_size_t match_len;
+ apr_size_t rmatch_len;
+ }
+ tests[] =
+ {
+ /* edge cases */
+ {"", "", 0, 0},
+ {"", "x", 0, 0},
+ {"x", "", 0, 0},
+ {"x", "x", 1, 1},
+ {"", "1234567890abcdef", 0, 0},
+ {"1234567890abcdef", "", 0, 0},
+ {"1234567890abcdef", "1234567890abcdef", 16, 16},
+
+ /* left-side matches */
+ {"x", "y", 0, 0},
+ {"ax", "ay", 1, 0},
+ {"ax", "a", 1, 0},
+ {"a", "ay", 1, 0},
+ {"1234567890abcdef", "1234567890abcdeg", 15, 0},
+ {"1234567890abcdef_", "1234567890abcdefg", 16, 0},
+ {"12345678_0abcdef", "1234567890abcdeg", 8, 0},
+ {"1234567890abcdef", "12345678", 8, 0},
+ {"12345678", "1234567890abcdef", 8, 0},
+ {"12345678_0ab", "1234567890abcdef", 8, 0},
+
+ /* right-side matches */
+ {"xa", "ya", 0, 1},
+ {"xa", "a", 0, 1},
+ {"a", "ya", 0, 1},
+ {"_234567890abcdef", "1234567890abcdef", 0, 15},
+ {"_1234567890abcdef", "x1234567890abcdef", 0, 16},
+ {"1234567_90abcdef", "_1234567890abcdef", 0, 8},
+ {"1234567890abcdef", "90abcdef", 0, 8},
+ {"90abcdef", "1234567890abcdef", 0, 8},
+ {"8_0abcdef", "7890abcdef", 0, 7},
+
+ /* two-side matches */
+ {"bxa", "bya", 1, 1},
+ {"bxa", "ba", 1, 1},
+ {"ba", "bya", 1, 1},
+ {"1234567_90abcdef", "1234567890abcdef", 7, 8},
+ {"12345678_90abcdef", "1234567890abcdef", 8, 8},
+ {"12345678_0abcdef", "1234567890abcdef", 8, 7},
+ {"123456_abcdef", "1234sdffdssdf567890abcdef", 4, 6},
+ {"1234567890abcdef", "12345678ef", 8, 2},
+ {"x_234567890abcdef", "x1234567890abcdef", 1, 15},
+ {"1234567890abcdefx", "1234567890abcdex", 15, 1},
+
+ /* list terminator */
+ {NULL}
+ };
+
+ const struct test_data_t *test;
+ for (test = tests; test->a != NULL; ++test)
+ {
+ apr_size_t a_len = strlen(test->a);
+ apr_size_t b_len = strlen(test->b);
+ apr_size_t max_match = MIN(a_len, b_len);
+ apr_size_t match_len
+ = svn_cstring__match_length(test->a, test->b, max_match);
+ apr_size_t rmatch_len
+ = svn_cstring__reverse_match_length(test->a + a_len, test->b + b_len,
+ max_match);
+
+ SVN_TEST_ASSERT(match_len == test->match_len);
+ SVN_TEST_ASSERT(rmatch_len == test->rmatch_len);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_cstring_skip_prefix(apr_pool_t *pool)
+{
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "12345"),
+ "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "123"),
+ "45");
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", ""),
+ "12345");
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "23"),
+ NULL);
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("1", "12"),
+ NULL);
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("", ""),
+ "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("", "12"),
+ NULL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_replace_all(apr_pool_t *pool)
+{
+ svn_stringbuf_t *s = svn_stringbuf_create("abccabcdabc", pool);
+
+ /* no replacement */
+ SVN_TEST_ASSERT(0 == svn_stringbuf_replace_all(s, "xyz", "k"));
+ SVN_TEST_STRING_ASSERT(s->data, "abccabcdabc");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ /* replace at string head: grow */
+ SVN_TEST_ASSERT(1 == svn_stringbuf_replace_all(s, "abcc", "xyabcz"));
+ SVN_TEST_STRING_ASSERT(s->data, "xyabczabcdabc");
+ SVN_TEST_ASSERT(s->len == 13);
+
+ /* replace at string head: shrink */
+ SVN_TEST_ASSERT(1 == svn_stringbuf_replace_all(s, "xyabcz", "abcc"));
+ SVN_TEST_STRING_ASSERT(s->data, "abccabcdabc");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ /* replace at string tail: grow */
+ SVN_TEST_ASSERT(1 == svn_stringbuf_replace_all(s, "dabc", "xyabcz"));
+ SVN_TEST_STRING_ASSERT(s->data, "abccabcxyabcz");
+ SVN_TEST_ASSERT(s->len == 13);
+
+ /* replace at string tail: shrink */
+ SVN_TEST_ASSERT(1 == svn_stringbuf_replace_all(s, "xyabcz", "dabc"));
+ SVN_TEST_STRING_ASSERT(s->data, "abccabcdabc");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ /* replace at multiple locations: grow */
+ SVN_TEST_ASSERT(3 == svn_stringbuf_replace_all(s, "ab", "xyabz"));
+ SVN_TEST_STRING_ASSERT(s->data, "xyabzccxyabzcdxyabzc");
+ SVN_TEST_ASSERT(s->len == 20);
+
+ /* replace at multiple locations: shrink */
+ SVN_TEST_ASSERT(3 == svn_stringbuf_replace_all(s, "xyabz", "ab"));
+ SVN_TEST_STRING_ASSERT(s->data, "abccabcdabc");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ /* replace at multiple locations: same length */
+ SVN_TEST_ASSERT(3 == svn_stringbuf_replace_all(s, "abc", "xyz"));
+ SVN_TEST_STRING_ASSERT(s->data, "xyzcxyzdxyz");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ /* replace at multiple locations: overlapping */
+ s = svn_stringbuf_create("aaaaaaaaaaa", pool);
+ SVN_TEST_ASSERT(5 == svn_stringbuf_replace_all(s, "aa", "aaa"));
+ SVN_TEST_STRING_ASSERT(s->data, "aaaaaaaaaaaaaaaa");
+ SVN_TEST_ASSERT(s->len == 16);
+
+ SVN_TEST_ASSERT(5 == svn_stringbuf_replace_all(s, "aaa", "aa"));
+ SVN_TEST_STRING_ASSERT(s->data, "aaaaaaaaaaa");
+ SVN_TEST_ASSERT(s->len == 11);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_leftchop(apr_pool_t *pool)
+{
+ svn_stringbuf_t *s;
+
+ s = svn_stringbuf_create("abcd", pool);
+ svn_stringbuf_leftchop(s, 0);
+ SVN_TEST_ASSERT(s->len == 4);
+ SVN_TEST_STRING_ASSERT(s->data, "abcd");
+
+ svn_stringbuf_leftchop(s, 2);
+ SVN_TEST_ASSERT(s->len == 2);
+ SVN_TEST_STRING_ASSERT(s->data, "cd");
+
+ svn_stringbuf_leftchop(s, 4);
+ SVN_TEST_ASSERT(s->len == 0);
+ SVN_TEST_STRING_ASSERT(s->data, "");
+
+ s = svn_stringbuf_create("abcd", pool);
+ svn_stringbuf_leftchop(s, 4);
+ SVN_TEST_ASSERT(s->len == 0);
+ SVN_TEST_STRING_ASSERT(s->data, "");
+
+ s = svn_stringbuf_create_empty(pool);
+ svn_stringbuf_leftchop(s, 0);
+ SVN_TEST_ASSERT(s->len == 0);
+ SVN_TEST_STRING_ASSERT(s->data, "");
+
+ svn_stringbuf_leftchop(s, 2);
+ SVN_TEST_ASSERT(s->len == 0);
+ SVN_TEST_STRING_ASSERT(s->data, "");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_stringbuf_set(apr_pool_t *pool)
+{
+ svn_stringbuf_t *str = svn_stringbuf_create_empty(pool);
+
+ SVN_TEST_STRING_ASSERT(str->data, "");
+ SVN_TEST_INT_ASSERT(str->len, 0);
+
+ svn_stringbuf_set(str, "0123456789");
+ SVN_TEST_STRING_ASSERT(str->data, "0123456789");
+ SVN_TEST_INT_ASSERT(str->len, 10);
+
+ svn_stringbuf_set(str, "");
+ SVN_TEST_STRING_ASSERT(str->data, "");
+ SVN_TEST_INT_ASSERT(str->len, 0);
+
+ svn_stringbuf_set(str, "0123456789abcdef");
+ SVN_TEST_STRING_ASSERT(str->data, "0123456789abcdef");
+ SVN_TEST_INT_ASSERT(str->len, 16);
+
+ svn_stringbuf_set(str, "t");
+ SVN_TEST_STRING_ASSERT(str->data, "t");
+ SVN_TEST_INT_ASSERT(str->len, 1);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_cstring_join(apr_pool_t *pool)
+{
+ apr_array_header_t *arr;
+
+ {
+ arr = apr_array_make(pool, 0, sizeof(const char *));
+
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", FALSE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", TRUE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", FALSE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", TRUE, pool), "");
+ }
+
+ {
+ arr = apr_array_make(pool, 0, sizeof(const char *));
+ APR_ARRAY_PUSH(arr, const char *) = "";
+
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", FALSE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", TRUE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", FALSE, pool), "");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", TRUE, pool), ";");
+ }
+
+ {
+ arr = apr_array_make(pool, 0, sizeof(const char *));
+ APR_ARRAY_PUSH(arr, const char *) = "ab";
+ APR_ARRAY_PUSH(arr, const char *) = "cd";
+
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", FALSE, pool), "abcd");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", TRUE, pool), "abcd");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", FALSE, pool), "ab;cd");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", TRUE, pool), "ab;cd;");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "//", FALSE, pool), "ab//cd");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "//", TRUE, pool), "ab//cd//");
+ }
+
+ {
+ arr = apr_array_make(pool, 0, sizeof(const char *));
+ APR_ARRAY_PUSH(arr, const char *) = "";
+ APR_ARRAY_PUSH(arr, const char *) = "ab";
+ APR_ARRAY_PUSH(arr, const char *) = "";
+
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", FALSE, pool), "ab");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "", TRUE, pool), "ab");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", FALSE, pool), ";ab;");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, ";", TRUE, pool), ";ab;;");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "//", FALSE, pool), "//ab//");
+ SVN_TEST_STRING_ASSERT(svn_cstring_join2(arr, "//", TRUE, pool), "//ab////");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ ====================================================================
+ If you add a new test to this file, update this array.
+
+ (These globals are required by our included main())
+*/
+
+/* An array of all test functions */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test1,
+ "make svn_stringbuf_t from cstring"),
+ SVN_TEST_PASS2(test2,
+ "make svn_stringbuf_t from substring of cstring"),
+ SVN_TEST_PASS2(test3,
+ "append svn_stringbuf_t to svn_stringbuf_t"),
+ SVN_TEST_PASS2(test4,
+ "append C string to svn_stringbuf_t"),
+ SVN_TEST_PASS2(test5,
+ "append bytes, then compare two strings"),
+ SVN_TEST_PASS2(test6,
+ "dup two strings, then compare"),
+ SVN_TEST_PASS2(test7,
+ "chopping a string"),
+ SVN_TEST_PASS2(test8,
+ "emptying a string"),
+ SVN_TEST_PASS2(test9,
+ "fill string with hashmarks"),
+ SVN_TEST_PASS2(test10,
+ "block initialization and growth"),
+ SVN_TEST_PASS2(test11,
+ "formatting strings from varargs"),
+ SVN_TEST_PASS2(test12,
+ "create string from file"),
+ SVN_TEST_PASS2(test13,
+ "find_char_backward; middle case"),
+ SVN_TEST_PASS2(test14,
+ "find_char_backward; 0 case"),
+ SVN_TEST_PASS2(test15,
+ "find_char_backward; strlen - 1 case"),
+ SVN_TEST_PASS2(test16,
+ "find_char_backward; len = 0 case"),
+ SVN_TEST_PASS2(test17,
+ "find_char_backward; no occurrence case"),
+ SVN_TEST_PASS2(test18,
+ "check whitespace removal; common case"),
+ SVN_TEST_PASS2(test19,
+ "check whitespace removal; no whitespace case"),
+ SVN_TEST_PASS2(test20,
+ "check whitespace removal; all whitespace case"),
+ SVN_TEST_PASS2(test21,
+ "check that whitespace will be stripped correctly"),
+ SVN_TEST_PASS2(test22,
+ "compare stringbufs; different lengths"),
+ SVN_TEST_PASS2(test23,
+ "compare stringbufs; same length, different content"),
+ SVN_TEST_PASS2(test24,
+ "verify i64toa"),
+ SVN_TEST_PASS2(test_base36,
+ "verify base36 conversion"),
+ SVN_TEST_PASS2(test_stringbuf_insert,
+ "check inserting into svn_stringbuf_t"),
+ SVN_TEST_PASS2(test_stringbuf_remove,
+ "check deletion from svn_stringbuf_t"),
+ SVN_TEST_PASS2(test_stringbuf_replace,
+ "check replacement in svn_stringbuf_t"),
+ SVN_TEST_PASS2(test_string_similarity,
+ "test string similarity scores"),
+ SVN_TEST_PASS2(test_string_matching,
+ "test string matching"),
+ SVN_TEST_PASS2(test_cstring_skip_prefix,
+ "test svn_cstring_skip_prefix()"),
+ SVN_TEST_PASS2(test_stringbuf_replace_all,
+ "test svn_stringbuf_replace_all"),
+ SVN_TEST_PASS2(test_stringbuf_leftchop,
+ "test svn_stringbuf_leftchop"),
+ SVN_TEST_PASS2(test_stringbuf_set,
+ "test svn_stringbuf_set()"),
+ SVN_TEST_PASS2(test_cstring_join,
+ "test svn_cstring_join2()"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/subst_translate-test.c b/subversion/tests/libsvn_subr/subst_translate-test.c
new file mode 100644
index 0000000..67e14ee
--- /dev/null
+++ b/subversion/tests/libsvn_subr/subst_translate-test.c
@@ -0,0 +1,526 @@
+/*
+ * subst_translate-test.c -- test the svn_subst_translate* functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <locale.h>
+#include <string.h>
+#include <apr_time.h>
+
+#include "../svn_test.h"
+
+#include "svn_types.h"
+#include "svn_string.h"
+#include "svn_subst.h"
+#include "svn_hash.h"
+
+#define ARRAY_LEN(ary) ((sizeof (ary)) / (sizeof ((ary)[0])))
+
+/* Test inputs and expected output for svn_subst_translate_string2(). */
+struct translate_string2_data_t
+{
+ const char *source;
+ const char *expected_str;
+ svn_boolean_t translated_to_utf8;
+ svn_boolean_t translated_line_endings;
+};
+
+static svn_error_t *
+test_svn_subst_translate_string2(apr_pool_t *pool)
+{
+ static const struct translate_string2_data_t tests[] =
+ {
+ /* No reencoding, no translation of line endings */
+ { "abcdefz",
+ "abcdefz", FALSE, FALSE },
+ /* No reencoding, translation of line endings */
+ { " \r\n\r\n \r\n \r\n",
+ " \n\n \n \n", FALSE, TRUE },
+ /* Reencoding, no translation of line endings */
+ { "\xc7\xa9\xf4\xdf",
+ "\xc3\x87\xc2\xa9\xc3\xb4\xc3\x9f", TRUE, FALSE },
+ /* Reencoding, translation of line endings */
+ { "\xc7\xa9\xf4\xdf\r\n",
+ "\xc3\x87\xc2\xa9\xc3\xb4\xc3\x9f\n", TRUE, TRUE },
+ { NULL, NULL, FALSE, FALSE }
+ };
+ const struct translate_string2_data_t *t;
+
+ for (t = tests; t->source != NULL; t++)
+ {
+ svn_string_t *source_string = svn_string_create(t->source, pool);
+ svn_string_t *new_value = NULL;
+ svn_boolean_t translated_line_endings = ! t->translated_line_endings;
+ svn_boolean_t translated_to_utf8;
+
+ SVN_ERR(svn_subst_translate_string2(&new_value,
+ NULL, &translated_line_endings,
+ source_string, "ISO-8859-1", FALSE,
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(new_value->data, t->expected_str);
+ SVN_TEST_ASSERT(translated_line_endings == t->translated_line_endings);
+
+ new_value = NULL;
+ translated_to_utf8 = ! t->translated_to_utf8;
+ translated_line_endings = ! t->translated_line_endings;
+ SVN_ERR(svn_subst_translate_string2(&new_value, &translated_to_utf8,
+ &translated_line_endings,
+ source_string, "ISO-8859-1", FALSE,
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(new_value->data, t->expected_str);
+ SVN_TEST_ASSERT(translated_to_utf8 == t->translated_to_utf8);
+ SVN_TEST_ASSERT(translated_line_endings == t->translated_line_endings);
+ }
+
+ /* Test that when REPAIR is FALSE, SVN_ERR_IO_INCONSISTENT_EOL is returned. */
+ {
+ svn_string_t *source_string = svn_string_create(" \r \r\n \n ", pool);
+ svn_string_t *new_value = NULL;
+ svn_error_t *err = svn_subst_translate_string2(&new_value, NULL, NULL,
+ source_string,
+ "ISO-8859-1", FALSE, pool,
+ pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_IO_INCONSISTENT_EOL);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The body of the svn_subst_translate_string2_null_encoding test. It should
+ only be called by test_svn_subst_translate_string2_null_encoding(), as this
+ code assumes that the process locale has been changed to a locale that uses
+ either CP-1252 or ISO-8859-1 for the default narrow string encoding. */
+static svn_error_t *
+test_svn_subst_translate_string2_null_encoding_helper(apr_pool_t *pool)
+{
+ {
+ svn_string_t *new_value = NULL;
+ svn_boolean_t translated_to_utf8 = FALSE;
+ svn_boolean_t translated_line_endings = TRUE;
+ /* The 'AE' ligature, which is 0xc6 in both ISO-8859-1 and Windows-1252 */
+ svn_string_t *source_string = svn_string_create("\xc6", pool);
+
+ SVN_ERR(svn_subst_translate_string2(&new_value, &translated_to_utf8,
+ &translated_line_endings,
+ source_string, NULL, FALSE,
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(new_value->data, "\xc3\x86");
+ SVN_TEST_ASSERT(translated_to_utf8);
+ SVN_TEST_ASSERT(!translated_line_endings);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that when ENCODING is NULL, the system-default language encoding is used.
+
+ This is a wrapper of test_svn_subst_translate_string2_null_encoding_helper()
+ that ensures that the system-default character encoding is set to either
+ CP-1252 or ISO-8859-1 before test_svn_subst_translate_string2_null_encoding_helper()
+ is called, later restoring the original system-default character encoding. */
+static svn_error_t *
+test_svn_subst_translate_string2_null_encoding(apr_pool_t *pool)
+{
+ char orig_lc_all[256] = { '\0' };
+ svn_error_t *test_result;
+
+ const char *other_locales[] =
+ {
+ /* For Windows' msvcrt */
+ "English.1252", "German.1252", "French.1252",
+
+ /* For glibc */
+ "en_US.ISO-8859-1", "en_GB.ISO-8859-1", "de_DE.ISO-8859-1",
+
+ /* For OpenBSD's libc */
+ "en_US.ISO8859-1", "en_GB.ISO8859-1", "de_DE.ISO8859-1",
+
+ /* Must be last */
+ NULL
+ };
+ const char **other_locale;
+
+ strncpy(orig_lc_all, setlocale(LC_ALL, NULL), sizeof (orig_lc_all) - 1);
+
+ for (other_locale = other_locales; *other_locale != NULL; ++other_locale)
+ {
+ if (setlocale(LC_ALL, *other_locale))
+ break;
+ }
+
+ /* If the end of the list of other locales to try has been reached, then none
+ of the tested locales are installed, so the test must be skipped. */
+ if (*other_locale == NULL)
+ return svn_error_createf(SVN_ERR_TEST_SKIPPED, NULL,
+ "Tried %d locales, but none are installed.",
+ (int) (ARRAY_LEN(other_locales) - 1));
+
+ test_result = test_svn_subst_translate_string2_null_encoding_helper(pool);
+
+ /* Restore the original locale for category LC_ALL. */
+ SVN_TEST_ASSERT(setlocale(LC_ALL, orig_lc_all) != NULL);
+
+ return test_result;
+}
+
+static svn_error_t *
+test_repairing_svn_subst_translate_string2(apr_pool_t *pool)
+{
+ {
+ svn_string_t *source_string = svn_string_create(" \r \r\n \n ", pool);
+ svn_string_t *new_value = NULL;
+ SVN_ERR(svn_subst_translate_string2(&new_value, NULL, NULL, source_string,
+ "ISO-8859-1", TRUE, pool, pool));
+ SVN_TEST_ASSERT(new_value != NULL);
+ SVN_TEST_ASSERT(new_value->data != NULL);
+ SVN_TEST_STRING_ASSERT(new_value->data, " \n \n \n ");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*static svn_error_t *
+test_svn_subst_copy_and_translate4(apr_pool_t *pool)
+{
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_svn_subst_stream_translated(apr_pool_t *pool)
+{
+ return SVN_NO_ERROR;
+}*/
+
+/* Test inputs and expected output for svn_subst_translate_cstring2(). */
+struct translate_cstring2_data_t
+{
+ const char *source;
+ const char *eol_str;
+ svn_boolean_t repair;
+ const char *expected_str;
+};
+
+static svn_error_t *
+test_svn_subst_translate_cstring2(apr_pool_t *pool)
+{
+ static const struct translate_cstring2_data_t tests[] =
+ {
+ /* Test the unusual case where EOL_STR is an empty string. */
+ { " \r \n\r\n \n\n\n", "", TRUE,
+ " " },
+ /* Test the unusual case where EOL_STR is not a standard EOL string. */
+ { " \r \n\r\n \n\n\n", "z", TRUE,
+ " z zz zzz" },
+ { " \n \n ", "buzz", FALSE,
+ " buzz buzz " },
+ { " \r\n \n", "buzz", TRUE ,
+ " buzz buzz"},
+ { NULL, NULL, FALSE, NULL }
+ };
+ const struct translate_cstring2_data_t *t;
+
+ for (t = tests; t->source != NULL; t++)
+ {
+ const char *result = NULL;
+ SVN_ERR(svn_subst_translate_cstring2(t->source, &result, t->eol_str,
+ t->repair, NULL, FALSE, pool));
+ SVN_TEST_STRING_ASSERT(result, t->expected_str);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_svn_subst_build_keywords3(apr_pool_t *pool)
+{
+ /* Test expansion of custom keywords. */
+ struct keywords_tests_data
+ {
+ const char *keyword_name;
+ const char *keywords_string;
+ const char *expanded_keyword;
+ const char *rev;
+ const char *url;
+ const char *repos_root_url;
+ /* Can't test date since expanded value depends on local clock. */
+ const char *author;
+ }
+ tests[] =
+ {
+ {"FOO", "FOO=%P%_%a%_%b%_%%",
+ "trunk/foo.txt stsp foo.txt %",
+ "1234", "http://svn.example.com/repos/trunk/foo.txt",
+ "http://svn.example.com/repos", "stsp"},
+ {"FOO", "FOO=author%_=%_%a",
+ "author = stsp",
+ "1234", "http://svn.example.com/repos/trunk/foo.txt",
+ "http://svn.example.com/repos", "stsp"},
+ {"MyKeyword", "MyKeyword=%r%_%u%_%_%a",
+ "4567 http://svn.example.com/svn/branches/myfile jrandom",
+ "4567", "http://svn.example.com/svn/branches/myfile",
+ "http://svn.example.com/svn", "jrandom"},
+ {"FreeBSD", "FreeBSD=%H",
+ "head/README 222812 joel", /* date is not expanded in this test */
+ "222812", "http://svn.freebsd.org/base/head/README",
+ "http://svn.freebsd.org/base", "joel"},
+ {"FreeBSD", "FreeBSD=%I",
+ "README 222812 joel", /* date is not expanded in this test */
+ "222812", "http://svn.freebsd.org/base/head/README",
+ "http://svn.freebsd.org/base", "joel"},
+ { NULL, NULL, NULL, NULL, NULL, NULL, NULL}
+ };
+
+ const struct keywords_tests_data *t;
+
+ for (t = tests; t->keyword_name != NULL; t++)
+ {
+ apr_hash_t *kw;
+ svn_string_t *expanded_keyword;
+
+ SVN_ERR(svn_subst_build_keywords3(&kw, t->keywords_string,
+ t->rev, t->url, t->repos_root_url,
+ 0 /* date */, t->author, pool));
+ expanded_keyword = svn_hash_gets(kw, t->keyword_name);
+ SVN_TEST_ASSERT(expanded_keyword != NULL);
+ SVN_TEST_STRING_ASSERT(expanded_keyword->data, t->expanded_keyword);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_svn_subst_truncated_keywords(apr_pool_t *pool)
+{
+ svn_string_t *src_string
+ = svn_string_create("$Qq: "
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "012345678901234567890123456789012345678901234567"
+ " $", pool);
+ svn_stream_t *src_stream = svn_stream_from_string(src_string, pool);
+ svn_stringbuf_t *dst_stringbuf = svn_stringbuf_create_empty(pool);
+ svn_stream_t *dst_stream = svn_stream_from_stringbuf(dst_stringbuf, pool);
+ apr_hash_t *keywords = apr_hash_make(pool);
+ svn_string_t *expanded
+ = svn_string_create("01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "012345678901234567890123456789012345678901234567"
+ "xxxxxxxxxx",
+ pool);
+
+ /* The source is already at the maximum length. */
+ SVN_TEST_ASSERT(src_string->len == SVN_KEYWORD_MAX_LEN);
+
+ svn_hash_sets(keywords, "Qq", expanded);
+ dst_stream = svn_subst_stream_translated(dst_stream, NULL, FALSE, keywords,
+ TRUE, pool);
+ SVN_ERR(svn_stream_copy3(src_stream, dst_stream, NULL, NULL, pool));
+
+ /* The expanded value would make the keyword longer than the maximum
+ allowed so it must be truncated; the remaining part of the
+ expanded value is the same as the source. */
+ SVN_TEST_STRING_ASSERT(dst_stringbuf->data, src_string->data);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_one_long_keyword(const char *keyword,
+ const char *expected,
+ apr_pool_t *pool)
+{
+ svn_string_t *src_string;
+ svn_stream_t *src_stream, *dst_stream;
+ svn_stringbuf_t *dst_stringbuf, *src_stringbuf;
+ apr_hash_t *keywords = apr_hash_make(pool);
+ svn_string_t *expanded = svn_string_create("abcdefg", pool);
+
+ svn_hash_sets(keywords, keyword, expanded);
+
+ /* Expand */
+ src_string = svn_string_createf(pool, "$%s$", keyword);
+ src_stream = svn_stream_from_string(src_string, pool);
+ dst_stringbuf = svn_stringbuf_create_empty(pool);
+ dst_stream = svn_stream_from_stringbuf(dst_stringbuf, pool);
+ dst_stream = svn_subst_stream_translated(dst_stream, NULL, FALSE, keywords,
+ TRUE, pool);
+ SVN_ERR(svn_stream_copy3(src_stream, dst_stream, NULL, NULL, pool));
+
+ SVN_TEST_STRING_ASSERT(dst_stringbuf->data, expected);
+
+ /* Unexpand */
+ src_stringbuf = dst_stringbuf;
+ src_stream = svn_stream_from_stringbuf(src_stringbuf, pool);
+ dst_stringbuf = svn_stringbuf_create_empty(pool);
+ dst_stream = svn_stream_from_stringbuf(dst_stringbuf, pool);
+ dst_stream = svn_subst_stream_translated(dst_stream, NULL, FALSE, keywords,
+ FALSE, pool);
+ SVN_ERR(svn_stream_copy3(src_stream, dst_stream, NULL, NULL, pool));
+
+ SVN_TEST_STRING_ASSERT(dst_stringbuf->data, src_string->data);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_svn_subst_long_keywords(apr_pool_t *pool)
+{
+ /* The longest keyword that can be expanded to a value: there is
+ space for one character in the expanded value. */
+ const char keyword_p1[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "012345678901234567890123456789012345678901234567";
+
+ /* The longest keyword that can be expanded: the value is empty. */
+ const char keyword_z[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "0123456789012345678901234567890123456789012345678";
+
+ /* One more than the longest keyword that can be expanded. */
+ const char keyword_m1[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789";
+
+ /* Two more than the longest keyword that can be expanded. */
+ const char keyword_m2[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "0";
+
+ /* Three more than the longest keyword that can be expanded. */
+ const char keyword_m3[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01";
+
+ /* Four more than the longest keyword that can be expanded. */
+ const char keyword_m4[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "012";
+
+ /* Five more than the longest keyword that can be expanded. */
+ const char keyword_m5[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "0123";
+
+ /* Six more than the longest keyword that can be expanded. */
+ const char keyword_m6[]
+ = "Q"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234567890123456789012345678901234567890123456789"
+ "01234";
+
+ SVN_ERR(test_one_long_keyword(keyword_p1,
+ apr_psprintf(pool, "$%s: a $", keyword_p1),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_z,
+ apr_psprintf(pool, "$%s: $", keyword_z),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m1,
+ apr_psprintf(pool, "$%s$", keyword_m1),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m2,
+ apr_psprintf(pool, "$%s$", keyword_m2),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m3,
+ apr_psprintf(pool, "$%s$", keyword_m3),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m4,
+ apr_psprintf(pool, "$%s$", keyword_m4),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m5,
+ apr_psprintf(pool, "$%s$", keyword_m5),
+ pool));
+
+ SVN_ERR(test_one_long_keyword(keyword_m6,
+ apr_psprintf(pool, "$%s$", keyword_m6),
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_svn_subst_translate_string2,
+ "test svn_subst_translate_string2()"),
+ SVN_TEST_PASS2(test_svn_subst_translate_string2_null_encoding,
+ "test svn_subst_translate_string2(encoding = NULL)"),
+ SVN_TEST_PASS2(test_repairing_svn_subst_translate_string2,
+ "test repairing svn_subst_translate_string2()"),
+ SVN_TEST_PASS2(test_svn_subst_translate_cstring2,
+ "test svn_subst_translate_cstring2()"),
+ SVN_TEST_PASS2(test_svn_subst_build_keywords3,
+ "test svn_subst_build_keywords3()"),
+ SVN_TEST_PASS2(test_svn_subst_truncated_keywords,
+ "test truncated keywords (issue 4349)"),
+ SVN_TEST_PASS2(test_svn_subst_long_keywords,
+ "test long keywords (issue 4350)"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
+
diff --git a/subversion/tests/libsvn_subr/time-test.c b/subversion/tests/libsvn_subr/time-test.c
new file mode 100644
index 0000000..51fbe67
--- /dev/null
+++ b/subversion/tests/libsvn_subr/time-test.c
@@ -0,0 +1,360 @@
+/*
+ * time-test.c -- test the time functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+#include "svn_time.h"
+
+#include "../svn_test.h"
+
+/* All these variables should refer to the same point in time. */
+static apr_time_t test_timestamp = APR_TIME_C(1021316450966679);
+static const char *test_timestring =
+"2002-05-13T19:00:50.966679Z";
+static const char *test_old_timestring =
+"Mon 13 May 2002 22:00:50.966679 (day 133, dst 1, gmt_off 010800)";
+
+
+static svn_error_t *
+test_time_to_cstring(apr_pool_t *pool)
+{
+ const char *timestring;
+
+ timestring = svn_time_to_cstring(test_timestamp,pool);
+
+ if (strcmp(timestring,test_timestring) != 0)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_time_to_cstring (%" APR_TIME_T_FMT
+ ") returned date string '%s' instead of '%s'",
+ test_timestamp,timestring,test_timestring);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_time_from_cstring(apr_pool_t *pool)
+{
+ apr_time_t timestamp;
+
+ SVN_ERR(svn_time_from_cstring(&timestamp, test_timestring, pool));
+
+ if (timestamp != test_timestamp)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_time_from_cstring (%s) returned time '%" APR_TIME_T_FMT
+ "' instead of '%" APR_TIME_T_FMT "'",
+ test_timestring,timestamp,test_timestamp);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Before editing these tests cases please see the comment in
+ * test_time_from_cstring_old regarding the requirements to exercise the bug
+ * that they exist to test. */
+static const char *failure_old_tests[] = {
+ /* Overflow Day */
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ " 3 Oct 2000 HH:MM:SS.UUU (day 277, dst 1, gmt_off -18000)",
+
+ /* Overflow Month */
+ "Tue 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ " 2000 HH:MM:SS.UUU (day 277, dst 1, gmt_off -18000)",
+
+ NULL
+};
+
+static svn_error_t *
+test_time_from_cstring_old(apr_pool_t *pool)
+{
+ apr_time_t timestamp;
+ const char **ft;
+
+ SVN_ERR(svn_time_from_cstring(&timestamp, test_old_timestring, pool));
+
+ if (timestamp != test_timestamp)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_time_from_cstring (%s) returned time '%" APR_TIME_T_FMT
+ "' instead of '%" APR_TIME_T_FMT "'",
+ test_old_timestring,timestamp,test_timestamp);
+ }
+
+ /* These tests should fail. They've been added to cover a string overflow
+ * found in our code. However, even if they fail that may not indicate
+ * that there is no problem. The strings being tested need to be
+ * sufficently long to cause a segmentation fault in order to exercise
+ * this bug. Unfortunately due to differences in compilers, architectures,
+ * etc. there is no way to be sure that the bug is being exerercised on
+ * all platforms. */
+ for (ft = failure_old_tests; *ft; ft++)
+ {
+ svn_error_t *err = svn_time_from_cstring(&timestamp, *ft, pool);
+ if (! err)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_time_from_cstring (%s) succeeded when it should have failed",
+ *ft);
+ svn_error_clear(err);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_time_invariant(apr_pool_t *pool)
+{
+ apr_time_t current_timestamp = apr_time_now();
+ const char *timestring;
+ apr_time_t timestamp;
+
+ timestring = svn_time_to_cstring(current_timestamp, pool);
+ SVN_ERR(svn_time_from_cstring(&timestamp, timestring, pool));
+
+ if (timestamp != current_timestamp)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_time_from_cstring ( svn_time_to_cstring (n) ) returned time '%"
+ APR_TIME_T_FMT
+ "' instead of '%" APR_TIME_T_FMT "'",
+ timestamp,current_timestamp);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+struct date_test {
+ const char *str;
+ apr_int32_t year;
+ apr_int32_t mon;
+ apr_int32_t mday;
+ apr_int32_t hour;
+ apr_int32_t min;
+ apr_int32_t sec;
+ apr_int32_t usec;
+};
+
+/* These date strings do not specify a time zone, so we expand the
+ svn_parse_date result it in the local time zone and verify that
+ against the desired values. */
+static struct date_test localtz_tests[] = {
+ /* YYYY-M[M]-D[D] */
+ { "2013-01-25", 2013, 1, 25, 0, 0, 0, 0 },
+ { "2013-1-25", 2013, 1, 25, 0, 0, 0, 0 },
+ { "2013-01-2", 2013, 1, 2, 0, 0, 0, 0 },
+ /* YYYY-M[M]-D[D][Th[h]:mm[:ss[.u[u[u[u[u[u] */
+ { "2015-04-26T00:01:59.652655", 2015, 4, 26, 0, 1, 59, 652655 },
+ { "2034-07-20T17:03:36.11379", 2034, 7, 20, 17, 3, 36, 113790 },
+ { "1975-10-29T17:23:56.3859", 1975, 10, 29, 17, 23, 56, 385900 },
+ { "2024-06-08T13:06:14.897", 2024, 6, 8, 13, 6, 14, 897000 },
+ { "2000-01-10T05:11:11.65", 2000, 1, 10, 5, 11, 11, 650000 },
+ { "2017-01-28T07:21:13.2", 2017, 1, 28, 7, 21, 13, 200000 },
+ { "2013-05-18T13:52:49", 2013, 5, 18, 13, 52, 49, 0 },
+ { "2020-05-14T15:28", 2020, 5, 14, 15, 28, 0, 0 },
+ { "2032-05-14T7:28", 2032, 5, 14, 7, 28, 0, 0 },
+ { "2020-5-14T15:28", 2020, 5, 14, 15, 28, 0, 0 },
+ { "2020-05-1T15:28", 2020, 5, 1, 15, 28, 0, 0 },
+ /* YYYYMMDD */
+ { "20100226", 2010, 2, 26, 0, 0, 0, 0 },
+ /* YYYYMMDD[Thhmm[ss[.u[u[u[u[u[u] */
+ { "19860214T050745.6", 1986, 2, 14, 5, 7, 45, 600000 },
+ { "20230219T0045", 2023, 2, 19, 0, 45, 0, 0 },
+ /* YYYY-M[M]-D[D] [h]h:mm[:ss[.u[u[u[u[u[u] */
+ { "1975-07-11 06:31:49.749504", 1975, 7, 11, 6, 31, 49, 749504 },
+ { "2037-05-06 00:08", 2037, 5, 6, 0, 8, 0, 0 },
+ { "2037-5-6 7:01", 2037, 5, 6, 7, 1, 0, 0 },
+ /* Make sure we can do leap days. */
+ { "1976-02-29", 1976, 02, 29, 0, 0, 0, 0 },
+ { "2000-02-29", 2000, 02, 29, 0, 0, 0, 0 },
+ { NULL }
+};
+
+/* These date strings specify an explicit time zone, so we expand the
+ svn_parse_date result in UTC and verify that against the desired
+ values (which have been adjusted for the specified time zone). */
+static struct date_test gmt_tests[] = {
+ /* YYYY-MM-DDThh:mm[:ss[.u[u[u[u[u[u]Z */
+ { "1979-05-05T15:16:04.39Z", 1979, 5, 5, 15, 16, 4, 390000 },
+ { "2012-03-25T12:14Z", 2012, 3, 25, 12, 14, 0, 0 },
+ /* YYYY-MM-DDThh:mm[:ss[.u[u[u[u[u[u]+OO[:oo] */
+ { "1991-09-13T20:13:01.12779+02:26", 1991, 9, 13, 17, 47, 1, 127790 },
+ { "1971-07-20T06:11-10", 1971, 7, 20, 16, 11, 0, 0 },
+ /* YYYYMMDDThhmm[ss[.u[u[u[u[u[u]Z */
+ { "20010808T105155.527Z", 2001, 8, 8, 10, 51, 55, 527000 },
+ { "19781204T1322Z", 1978, 12, 4, 13, 22, 0, 0 },
+ /* YYYYMMDDThhmm[ss[.u[u[u[u[u[u]+OO[oo] */
+ { "20030930T001647.8008-0230", 2003, 9, 30, 2, 46, 47, 800800 },
+ { "19810526T1705+22", 1981, 5, 25, 19, 5, 0, 0 },
+ /* YYYY-MM-DD hh:mm[:ss[.u[u[u[u[u[u][ +OO[oo] */
+ { "2024-06-02 11:30:36 +08", 2024, 6, 2, 3, 30, 36, 0 },
+ { "1994-10-07 08:08 -1457", 1994, 10, 07, 23, 5, 0, 0 },
+ { NULL }
+};
+
+/* These date strings only specify a time of day, so we fill the
+ current date into the desired values before comparing. (Currently
+ we do not allow a time zone with just a time of day.) */
+static struct date_test daytime_tests[] = {
+ /* hh:mm[:ss[.u[u[u[u[u[u] */
+ { "00:54:15.46", 0, 0, 0, 0, 54, 15, 460000 },
+ { "18:21", 0, 0, 0, 18, 21, 0, 0 },
+ { NULL }
+};
+
+/* These date strings should not parse correctly. */
+static const char *failure_tests[] = {
+ "2000-00-02", /* Invalid month */
+ "2000-13-02", /* Invalid month */
+ "2000-01-32", /* Invalid day */
+ "2000-01-00", /* Invalid day */
+ "1999-02-29", /* Invalid leap day */
+ "2000-01-01 24:00:00", /* Invalid hour */
+ "2000-01-01 00:60:00", /* Invalid minute */
+ "2000-01-01 00:00:61", /* Invalid second (60 is okay for leap seconds) */
+ "2000-01-01+24:00", /* Invalid timezone hours */
+ "2000-01-01-02:60", /* Invalid timezone minutes */
+ "2000-01-01Z", /* Date with timezone, but no time */
+ "2000-01-01+01:00",
+ "20000101Z",
+ "20000101-0100",
+ "2000-01-01T10", /* Time with hours but no minutes */
+ "20000101T10",
+ "2000-01-01 10",
+ NULL
+};
+
+static svn_error_t *
+compare_results(struct date_test *dt,
+ apr_time_exp_t *expt)
+{
+ if (expt->tm_year + 1900 != dt->year || expt->tm_mon + 1 != dt->mon
+ || expt->tm_mday != dt->mday || expt->tm_hour != dt->hour
+ || expt->tm_min != dt->min || expt->tm_sec != dt->sec
+ || expt->tm_usec != dt->usec)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Comparison failed for '%s'", dt->str);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parse_date(apr_pool_t *pool)
+{
+ apr_time_t now, result;
+ apr_time_exp_t nowexp, expt;
+ svn_boolean_t matched;
+ struct date_test *dt;
+ const char **ft;
+
+ now = apr_time_now();
+ if (apr_time_exp_lt(&nowexp, now) != APR_SUCCESS)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "Can't expand time");
+
+ for (dt = localtz_tests; dt->str; dt++)
+ {
+ SVN_ERR(svn_parse_date(&matched, &result, dt->str, now, pool));
+ if (!matched)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Match failed for '%s'", dt->str);
+ if (apr_time_exp_lt(&expt, result) != APR_SUCCESS)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Expand failed for '%s'", dt->str);
+ SVN_ERR(compare_results(dt, &expt));
+ }
+
+ for (dt = gmt_tests; dt->str; dt++)
+ {
+ SVN_ERR(svn_parse_date(&matched, &result, dt->str, now, pool));
+ if (!matched)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Match failed for '%s'", dt->str);
+ if (apr_time_exp_gmt(&expt, result) != APR_SUCCESS)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Expand failed for '%s'", dt->str);
+ SVN_ERR(compare_results(dt, &expt));
+ }
+
+ for (dt = daytime_tests; dt->str; dt++)
+ {
+ SVN_ERR(svn_parse_date(&matched, &result, dt->str, now, pool));
+ if (!matched)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Match failed for '%s'", dt->str);
+ if (apr_time_exp_lt(&expt, result) != APR_SUCCESS)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Expand failed for '%s'", dt->str);
+ dt->year = nowexp.tm_year + 1900;
+ dt->mon = nowexp.tm_mon + 1;
+ dt->mday = nowexp.tm_mday;
+ SVN_ERR(compare_results(dt, &expt));
+ }
+
+ for (ft = failure_tests; *ft; ft++)
+ {
+ SVN_ERR(svn_parse_date(&matched, &result, *ft, now, pool));
+ if (matched)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "Match succeeded for '%s'", *ft);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_time_to_cstring,
+ "test svn_time_to_cstring"),
+ SVN_TEST_PASS2(test_time_from_cstring,
+ "test svn_time_from_cstring"),
+ SVN_TEST_PASS2(test_time_from_cstring_old,
+ "test svn_time_from_cstring (old format)"),
+ SVN_TEST_PASS2(test_time_invariant,
+ "test svn_time_[to/from]_cstring() invariant"),
+ SVN_TEST_PASS2(test_parse_date,
+ "test svn_parse_date"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/translate-test.c b/subversion/tests/libsvn_subr/translate-test.c
new file mode 100644
index 0000000..2436bc7
--- /dev/null
+++ b/subversion/tests/libsvn_subr/translate-test.c
@@ -0,0 +1,1333 @@
+/*
+ * translate-test.c -- test the eol and keyword translation subroutine
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+/* Advice to those adding new tests to this file:
+ * ==============================================
+ *
+ * Read the doc string for substitute_and_verify(), then read the
+ * test functions themselves -- they're small, and they'll be very
+ * easy to understand once you know substitute_and_verify().
+ */
+
+
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_general.h>
+#include <apr_file_io.h>
+
+#include "../svn_test.h"
+
+#include "svn_pools.h"
+#include "svn_subst.h"
+
+
+
+/*** Helpers ***/
+
+/* (Almost) all the tests share the same test data. */
+static const char *lines[] =
+ {
+ "Line 1: fairly boring subst test data... blah blah",
+ "Line 2: fairly boring subst test data... blah blah.",
+ "Line 3: Valid $LastChangedRevision$, started unexpanded.",
+ "Line 4: fairly boring subst test data... blah blah.",
+ "Line 5: Valid $Rev$, started unexpanded.",
+ "Line 6: fairly boring subst test data... blah blah.",
+ "Line 7: fairly boring subst test data... blah blah.",
+ "Line 8: Valid $LastChangedBy$, started unexpanded.",
+ "Line 9: Valid $Author$, started unexpanded.",
+ "Line 10: fairly boring subst test data... blah blah.",
+ "Line 11: fairly boring subst test data... blah blah.",
+ "Line 12: Valid $LastChangedDate$, started unexpanded.",
+ "Line 13: Valid $Date$, started unexpanded.",
+ "Line 14: fairly boring subst test data... blah blah.",
+ "Line 15: fairly boring subst test data... blah blah.",
+ "Line 16: Valid $HeadURL$, started unexpanded.",
+ "Line 17: Valid $URL$, started unexpanded.",
+ "Line 18: fairly boring subst test data... blah blah.",
+ "Line 19: Invalid expanded keyword spanning two lines: $Author: ",
+ /* The idea here is that, were it not broken across two lines,
+ "$Author: Line 20: jrandom$" would be a valid if odd, keyword. */
+ "Line 20: jrandom$ remainder of invalid keyword spanning two lines.",
+ "Line 21: fairly boring subst test data... blah blah.",
+ "Line 22: an unknown keyword $LastChangedSocks$.",
+ "Line 23: fairly boring subst test data... blah blah.",
+ /* In line 24, the third dollar sign terminates the first, and the
+ fourth should therefore remain a literal dollar sign. */
+ "Line 24: keyword in a keyword: $Author: $Date$ $",
+ "Line 25: fairly boring subst test data... blah blah.",
+ "Line 26: Emptily expanded keyword $Rev: $.",
+ "Line 27: fairly boring subst test data... blah blah.",
+ "Line 28: fairly boring subst test data... blah blah.",
+ "Line 29: Valid $LastChangedRevision: 1729 $, started expanded.",
+ "Line 30: Valid $Rev: 1729 $, started expanded.",
+ "Line 31: fairly boring subst test data... blah blah.",
+ "Line 32: fairly boring subst test data... blah blah.",
+ "Line 33: Valid $LastChangedDate: 2002-01-01 $, started expanded.",
+ "Line 34: Valid $Date: 2002-01-01 $, started expanded.",
+ "Line 35: fairly boring subst test data... blah blah.",
+ "Line 36: fairly boring subst test data... blah blah.",
+ "Line 37: Valid $LastChangedBy: jrandom $, started expanded.",
+ "Line 38: Valid $Author: jrandom $, started expanded.",
+ "Line 39: fairly boring subst test data... blah blah.",
+ "Line 40: fairly boring subst test data... blah blah.",
+ "Line 41: Valid $HeadURL: http://tomato/mauve $, started expanded.",
+ "Line 42: Valid $URL: http://tomato/mauve $, started expanded.",
+ "Line 43: fairly boring subst test data... blah blah.",
+ "Line 44: fairly boring subst test data... blah blah.",
+ "Line 45: Invalid $LastChangedRevisionWithSuffix$, started unexpanded.",
+ "Line 46: Empty $Author:$, started expanded.",
+ "Line 47: fairly boring subst test data... blah blah.",
+ "Line 48: Two keywords back to back: $Author$$Rev$.",
+ "Line 49: One keyword, one not, back to back: $Author$Rev$.",
+ "Line 50: a series of dollar signs $$$$$$$$$$$$$$$$$$$$$$$$$$$$.",
+ "Line 51: same, but with embedded keyword $$$$$$$$Date$$$$$$$$$$.",
+ "Line 52: same, with expanded, empty keyword $$$$$$Date: $$$$$$.",
+ "Line 53: $This is a lengthy line designed to test a bug that was "
+ "reported about keyword expansion. The problem was that a line "
+ "had more than SVN_KEYWORD_MAX_LEN (255 at the time) characters "
+ "after an initial dollar sign, which triggered a buglet in our "
+ "svn_subst_copy_and_translate() function and resulted in, in some cases "
+ "a SEGFAULT, and in others a filthy corrupt commit. ",
+ "", /* Lines 54-69 are blank to test consecutive newlines */
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "$Author$Rev$.", /* Line 70-73 test places where '$' abuts a newline. */
+ ".$veR$Author$",
+ "$",
+ "$$",
+ /* Line 74-75 test for keywords containing '$', issue #1780 */
+ "Line 74: Valid $Author: jran$dom $, started expanded.",
+ "Line 75: Valid $URL: http://tomato/mau$ve $, started expanded.",
+ /* Line 76-78 tests for a string with an unknown keyword of 252-254 bytes
+ long */
+ "$ "
+ " "
+ " "
+ " $$",
+ "$ "
+ " "
+ " "
+ " $$",
+ "$ "
+ " "
+ " "
+ " $$",
+ /* Testing border cases, line 79-82 test for valid keywords, keywords on
+ line 83-84 are too long */
+ "Line 79: Valid $Author: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa $, started expanded.",
+ "Line 80: Valid $Author: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$aaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaa"
+ "aaaaaaa$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$aaaaaaa $, started "
+ "expanded.",
+ /* keyword from first dollar sign to last = 254 chars */
+ "Line 81: Valid $Author: aaaaaaaaaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaa$aaaaaaaaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$aaaaaa$$$ $, started "
+ "expanded.",
+ /* keyword from first dollar sign to last = 255 chars */
+ "Line 82: Valid $Author: aaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaa$$$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$$ $, started "
+ "expanded.",
+ /* keyword from first dollar sign to last = 256 chars */
+ "Line 83: Invalid $Author: aaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaa$$$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$$ $, started "
+ "expanded.",
+ "Line 84: Invalid $Author: aaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaa$$$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ "aaaaaaaaaaaaaa$$aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa$$$ $, started "
+ "expanded.",
+ "Line 85: end of subst test data."
+ };
+
+
+/* Return a randomly selected eol sequence. */
+static const char *
+random_eol_marker(void)
+{
+ /* Select a random eol marker from this set. */
+ static int seeded = 0;
+
+ /* ### todo: allowing '\r' to be in this list of possible random
+ eol_markers causes problems for the current testing framework
+ which expects a 1:1 ratio of input-line-count to output-line-count.
+ Problems occur when there are two consecutive line ending markers
+ where the first is '\r' and the second is '\n' -- our
+ translation routine reads that as a single '\r\n' which throws
+ off the linecount on the output side, and fouls up substitute_and_verify.
+ */
+ const char *eol_markers[] = { "\n", "\r\n" };
+
+ if (! seeded)
+ {
+ srand(1729); /* we want errors to be reproducible */
+ seeded = 1;
+ }
+
+ return eol_markers[rand()
+ % ((sizeof(eol_markers)) / (sizeof(*eol_markers)))];
+}
+
+
+/* Create FNAME with global `lines' as initial data. Use EOL_STR as
+ * the end-of-line marker between lines, or if EOL_STR is NULL, choose
+ * a random marker at each opportunity. Use POOL for any temporary
+ * allocation.
+ */
+static svn_error_t *
+create_file(const char *fname, const char *eol_str, apr_pool_t *pool)
+{
+ apr_file_t *f;
+ apr_size_t i, j;
+
+ SVN_ERR(svn_io_file_open(&f, fname,
+ (APR_WRITE | APR_CREATE | APR_EXCL | APR_BINARY),
+ APR_OS_DEFAULT, pool));
+
+ for (i = 0; i < (sizeof(lines) / sizeof(*lines)); i++)
+ {
+ const char *this_eol_str = eol_str ? eol_str : random_eol_marker();
+
+ apr_file_printf(f, "%s", lines[i]);
+
+ /* Is it overly paranoid to use putc(), because of worry about
+ fprintf() doing a newline conversion? */
+ for (j = 0; this_eol_str[j]; j++)
+ {
+ SVN_ERR(svn_io_file_putc(this_eol_str[j], f, pool));
+ }
+ }
+
+ return svn_error_trace(svn_io_file_close(f, pool));
+}
+
+/* Set up, run, and verify the results of a substitution.
+ *
+ * Create a file TEST_NAME.src using global `lines' as the initial
+ * data, with SRC_EOL as the line separator, then convert it to file
+ * TEST_NAME.dst (using DST_EOL, REPAIR, EXPAND, REV, AUTHOR, DATE,
+ * and URL as svn_subst_copy_and_translate() does), and verify that the
+ * conversion worked. Null SRC_EOL means create a mixed eol src
+ * file.
+ *
+ * If the verification succeeds, remove both files and return
+ * SVN_NO_ERROR.
+ *
+ * If the verification fails, leave the files for post-mortem. If the
+ * failure is due to non-eol data being wrong, return
+ * SVN_ERR_MALFORMED_FILE. If the problem is an incorrect eol marker,
+ * return SVN_ERR_IO_UNKNOWN_EOL. If the problem is that a mixed eol
+ * style was repaired even though no repair flag was passed, return
+ * SVN_ERR_TEST_FAILED.
+ *
+ * Use POOL for temporary allocation.
+ *
+ * Note: as with svn_subst_copy_and_translate(), if any of DST_EOL, REV,
+ * AUTHOR, DATE, and/or URL is null, then that substitution is not
+ * performed.
+ */
+static svn_error_t *
+substitute_and_verify(const char *test_name,
+ const char *src_eol,
+ const char *dst_eol,
+ svn_boolean_t repair,
+ const char *rev,
+ const char *date,
+ const char *author,
+ const char *url,
+ svn_boolean_t expand,
+ apr_pool_t *pool)
+{
+ svn_error_t *err;
+ svn_stringbuf_t *contents;
+ apr_hash_t *keywords = apr_hash_make(pool);
+ apr_size_t idx = 0;
+ apr_size_t i;
+ const char *expect[(sizeof(lines) / sizeof(*lines))];
+ const char *src_fname = apr_pstrcat(pool, test_name, ".src", SVN_VA_NULL);
+ const char *dst_fname = apr_pstrcat(pool, test_name, ".dst", SVN_VA_NULL);
+ svn_string_t *val;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /** Clean up from previous tests, set up src data, and convert. **/
+ SVN_ERR(svn_io_remove_file2(src_fname, TRUE, pool));
+ SVN_ERR(svn_io_remove_file2(dst_fname, TRUE, pool));
+ SVN_ERR(create_file(src_fname, src_eol, pool));
+
+ if (rev)
+ {
+ val = svn_string_create(rev, pool);
+ apr_hash_set(keywords, SVN_KEYWORD_REVISION_LONG,
+ APR_HASH_KEY_STRING, val);
+ apr_hash_set(keywords, SVN_KEYWORD_REVISION_MEDIUM,
+ APR_HASH_KEY_STRING, val);
+ apr_hash_set(keywords, SVN_KEYWORD_REVISION_SHORT,
+ APR_HASH_KEY_STRING, val);
+ }
+ if (date)
+ {
+ val = svn_string_create(date, pool);
+ apr_hash_set(keywords, SVN_KEYWORD_DATE_LONG,
+ APR_HASH_KEY_STRING, val);
+ apr_hash_set(keywords, SVN_KEYWORD_DATE_SHORT,
+ APR_HASH_KEY_STRING, val);
+ }
+ if (author)
+ {
+ val = svn_string_create(author, pool);
+ apr_hash_set(keywords, SVN_KEYWORD_AUTHOR_LONG,
+ APR_HASH_KEY_STRING, val);
+ apr_hash_set(keywords, SVN_KEYWORD_AUTHOR_SHORT,
+ APR_HASH_KEY_STRING, val);
+ }
+ if (url)
+ {
+ val = svn_string_create(url, pool);
+ apr_hash_set(keywords, SVN_KEYWORD_URL_LONG,
+ APR_HASH_KEY_STRING, val);
+ apr_hash_set(keywords, SVN_KEYWORD_URL_SHORT,
+ APR_HASH_KEY_STRING, val);
+ }
+
+ err = svn_subst_copy_and_translate4(src_fname, dst_fname, dst_eol, repair,
+ keywords, expand, FALSE,
+ NULL, NULL, subpool);
+ svn_pool_destroy(subpool);
+
+ /* Conversion should have failed, if src has mixed eol, and the
+ repair flag was not set, and we requested eol translation. */
+ if ((! src_eol) && dst_eol && (! repair))
+ {
+ if (! err)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "translation of '%s' should have failed, but didn't", src_fname);
+ }
+ else if (err->apr_err != SVN_ERR_IO_INCONSISTENT_EOL)
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, err,
+ "translation of '%s' should fail, but not with this error",
+ src_fname);
+ }
+ else
+ {
+ svn_error_clear(err);
+ SVN_ERR(svn_io_remove_file2(src_fname, FALSE, pool));
+ return SVN_NO_ERROR;
+ }
+
+ }
+ else if (err)
+ return err;
+
+
+ /** Verify that the conversion worked. **/
+
+ for (i = 0; i < (sizeof(expect) / sizeof(*expect)); i++)
+ expect[i] = lines[i];
+
+ /* Certain lines contain keywords; expect their expansions. */
+ if (rev)
+ {
+ if (expand)
+ {
+ expect[3 - 1] =
+ apr_pstrcat(pool, "Line 3: ",
+ "Valid $LastChangedRevision: ",
+ rev,
+ " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[5 - 1] =
+ apr_pstrcat(pool, "Line 5: ",
+ "Valid $Rev: ", rev, " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[26 - 1] =
+ apr_pstrcat(pool, "Line 26: ",
+ "Emptily expanded keyword $Rev: ", rev," $.",
+ SVN_VA_NULL);
+ expect[29 - 1] =
+ apr_pstrcat(pool, "Line 29: ",
+ "Valid $LastChangedRevision: ",
+ rev,
+ " $, started expanded.",
+ SVN_VA_NULL);
+ expect[30 - 1] =
+ apr_pstrcat(pool, "Line 30: ",
+ "Valid $Rev: ",
+ rev,
+ " $, started expanded.",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ /* Lines 3 and 5 remain unchanged. */
+ expect[26 - 1] =
+ "Line 26: Emptily expanded keyword $Rev$.";
+ expect[29 - 1] =
+ "Line 29: Valid $LastChangedRevision$, started expanded.";
+ expect[30 - 1] =
+ "Line 30: Valid $Rev$, started expanded.";
+ }
+ }
+
+ if (date)
+ {
+ if (expand)
+ {
+ expect[12 - 1] =
+ apr_pstrcat(pool, "Line 12: ",
+ "Valid $LastChangedDate: ",
+ date,
+ " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[13 - 1] =
+ apr_pstrcat(pool, "Line 13: ",
+ "Valid $Date: ", date, " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[33 - 1] =
+ apr_pstrcat(pool, "Line 33: ",
+ "Valid $LastChangedDate: ",
+ date,
+ " $, started expanded.",
+ SVN_VA_NULL);
+ expect[34 - 1] =
+ apr_pstrcat(pool, "Line 34: ",
+ "Valid $Date: ", date, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[51 - 1] =
+ apr_pstrcat(pool, "Line 51: ",
+ "same, but with embedded keyword ",
+ "$$$$$$$$Date: ", date, " $$$$$$$$$$.",
+ SVN_VA_NULL);
+ expect[52 - 1] =
+ apr_pstrcat(pool, "Line 52: ",
+ "same, with expanded, empty keyword ",
+ "$$$$$$Date: ", date, " $$$$$$.",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ /* Lines 12 and 13 remain unchanged. */
+ expect[33 - 1] =
+ "Line 33: Valid $LastChangedDate$, started expanded.";
+ expect[34 - 1] =
+ "Line 34: Valid $Date$, started expanded.";
+ expect[51 - 1] =
+ "Line 51: same, but with embedded keyword $$$$$$$$Date$$$$$$$$$$.";
+ expect[52 - 1] =
+ "Line 52: same, with expanded, empty keyword $$$$$$Date$$$$$$.";
+ }
+ }
+
+ if (author)
+ {
+ if (expand)
+ {
+ expect[8 - 1] =
+ apr_pstrcat(pool, "Line 8: ",
+ "Valid $LastChangedBy: ",
+ author,
+ " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[9 - 1] =
+ apr_pstrcat(pool, "Line 9: ",
+ "Valid $Author: ", author, " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[37 - 1] =
+ apr_pstrcat(pool, "Line 37: ",
+ "Valid $LastChangedBy: ", author,
+ " $, started expanded.", SVN_VA_NULL);
+ expect[38 - 1] =
+ apr_pstrcat(pool, "Line 38: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[46 - 1] =
+ apr_pstrcat(pool, "Line 46: ",
+ "Empty $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[71 - 1] =
+ apr_pstrcat(pool, ".$veR$Author: ", author, " $", SVN_VA_NULL);
+
+ expect[74 - 1] =
+ apr_pstrcat(pool, "Line 74: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[79 - 1] =
+ apr_pstrcat(pool, "Line 79: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[80 - 1] =
+ apr_pstrcat(pool, "Line 80: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[81 - 1] =
+ apr_pstrcat(pool, "Line 81: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[82 - 1] =
+ apr_pstrcat(pool, "Line 82: ",
+ "Valid $Author: ", author, " $, started expanded.",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ /* Lines 8, 9, and 71 remain unchanged. */
+ expect[37 - 1] =
+ "Line 37: Valid $LastChangedBy$, started expanded.";
+ expect[38 - 1] =
+ "Line 38: Valid $Author$, started expanded.";
+ expect[46 - 1] =
+ "Line 46: Empty $Author$, started expanded.";
+ expect[74 - 1] =
+ "Line 74: Valid $Author$, started expanded.";
+ expect[79 - 1] =
+ "Line 79: Valid $Author$, started expanded.";
+ expect[80 - 1] =
+ "Line 80: Valid $Author$, started expanded.";
+ expect[81 - 1] =
+ "Line 81: Valid $Author$, started expanded.";
+ expect[82 - 1] =
+ "Line 82: Valid $Author$, started expanded.";
+ }
+ }
+
+ if (url)
+ {
+ if (expand)
+ {
+ expect[16 - 1] =
+ apr_pstrcat(pool, "Line 16: ",
+ "Valid $HeadURL: ", url, " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[17 - 1] =
+ apr_pstrcat(pool, "Line 17: ",
+ "Valid $URL: ", url, " $, started unexpanded.",
+ SVN_VA_NULL);
+ expect[41 - 1] =
+ apr_pstrcat(pool, "Line 41: ",
+ "Valid $HeadURL: ", url, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[42 - 1] =
+ apr_pstrcat(pool, "Line 42: ",
+ "Valid $URL: ", url, " $, started expanded.",
+ SVN_VA_NULL);
+ expect[75 - 1] =
+ apr_pstrcat(pool, "Line 75: ",
+ "Valid $URL: ", url, " $, started expanded.",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ /* Lines 16 and 17 and remain unchanged. */
+ expect[41 - 1] =
+ "Line 41: Valid $HeadURL$, started expanded.";
+ expect[42 - 1] =
+ "Line 42: Valid $URL$, started expanded.";
+ expect[75 - 1] =
+ "Line 75: Valid $URL$, started expanded.";
+ }
+ }
+
+ /* Handle lines 48, 49, and 70 specially, as they contains two valid
+ keywords. */
+ if (rev && author)
+ {
+ if (expand)
+ {
+ expect[48 - 1] =
+ apr_pstrcat(pool, "Line 48: ",
+ "Two keywords back to back: "
+ "$Author: ", author, " $"
+ "$Rev: ", rev, " $.",
+ SVN_VA_NULL);
+ expect[49 - 1] =
+ apr_pstrcat(pool, "Line 49: ",
+ "One keyword, one not, back to back: "
+ "$Author: ", author, " $Rev$.",
+ SVN_VA_NULL);
+ expect[70 - 1] =
+ apr_pstrcat(pool, "$Author: ", author, " $Rev$.", SVN_VA_NULL);
+ }
+ /* Else Lines 48, 49, and 70 remain unchanged. */
+ }
+ else if (rev && (! author))
+ {
+ if (expand)
+ {
+ expect[48 - 1] =
+ apr_pstrcat(pool, "Line 48: ",
+ "Two keywords back to back: "
+ "$Author$$Rev: ", rev, " $.",
+ SVN_VA_NULL);
+ expect[49 - 1] =
+ apr_pstrcat(pool, "Line 49: ",
+ "One keyword, one not, back to back: "
+ "$Author$Rev: ", rev, " $.",
+ SVN_VA_NULL);
+ expect[70 - 1] =
+ apr_pstrcat(pool, "$Author$Rev: ", rev, " $.", SVN_VA_NULL);
+ }
+ /* Else Lines 48, 49, and 70 remain unchanged. */
+ }
+ else if ((! rev) && author)
+ {
+ if (expand)
+ {
+ expect[48 - 1] =
+ apr_pstrcat(pool, "Line 48: ",
+ "Two keywords back to back: "
+ "$Author: ", author, " $$Rev$.",
+ SVN_VA_NULL);
+ expect[49 - 1] =
+ apr_pstrcat(pool, "Line 49: ",
+ "One keyword, one not, back to back: "
+ "$Author: ", author, " $Rev$.",
+ SVN_VA_NULL);
+ expect[70 - 1] =
+ apr_pstrcat(pool, "$Author: ", author, " $Rev$.", SVN_VA_NULL);
+ }
+ /* Else Lines 48, 49, and 70 remain unchanged. */
+ }
+ /* Else neither rev nor author, so Lines 48, 49, and 70 remain
+ unchanged. */
+
+ /* Handle line 24 specially, as it contains two valid keywords. */
+ if (date && author)
+ {
+ if (expand)
+ {
+ expect[24 - 1] =
+ apr_pstrcat(pool, "Line 24: ",
+ "keyword in a keyword: $Author: ",
+ author,
+ " $Date$ $",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ expect[24 - 1] =
+ apr_pstrcat(pool, "Line 24: ",
+ "keyword in a keyword: $Author$Date$ $",
+ SVN_VA_NULL);
+ }
+ }
+ else if (date && (! author))
+ {
+ if (expand)
+ {
+ expect[24 - 1] =
+ apr_pstrcat(pool, "Line 24: ",
+ "keyword in a keyword: $Author: $Date: ",
+ date,
+ " $ $",
+ SVN_VA_NULL);
+ }
+ /* Else Line 24 remains unchanged. */
+ }
+ else if ((! date) && author)
+ {
+ if (expand)
+ {
+ expect[24 - 1] =
+ apr_pstrcat(pool, "Line 24: ",
+ "keyword in a keyword: $Author: ",
+ author,
+ " $Date$ $",
+ SVN_VA_NULL);
+ }
+ else /* unexpand */
+ {
+ expect[24 - 1] =
+ apr_pstrcat(pool, "Line 24: ",
+ "keyword in a keyword: $Author$Date$ $",
+ SVN_VA_NULL);
+ }
+ }
+ /* Else neither author nor date, so Line 24 remains unchanged. */
+
+ /** Ready to verify. **/
+
+ SVN_ERR(svn_stringbuf_from_file(&contents, dst_fname, pool));
+
+ for (i = 0; i < (sizeof(expect) / sizeof(*expect)); i++)
+ {
+ if (contents->len < idx)
+ return svn_error_createf
+ (SVN_ERR_MALFORMED_FILE, NULL,
+ "'%s' has short contents at line %" APR_SIZE_T_FMT,
+ dst_fname, i + 1);
+
+ if (strncmp(contents->data + idx, expect[i], strlen(expect[i])) != 0)
+ return svn_error_createf
+ (SVN_ERR_MALFORMED_FILE, NULL,
+ "'%s' has wrong contents at line %" APR_SIZE_T_FMT,
+ dst_fname, i + 1);
+
+ /* Else, the data is correct, at least up to the next eol. */
+
+ idx += strlen(expect[i]);
+
+ if (dst_eol) /* verify the promised consistent eol style */
+ {
+ if (strncmp(contents->data + idx, dst_eol, strlen(dst_eol)) != 0)
+ return svn_error_createf
+ (SVN_ERR_IO_UNKNOWN_EOL, NULL,
+ "'%s' has wrong eol style at line %" APR_SIZE_T_FMT, dst_fname,
+ i + 1);
+ else
+ idx += strlen(dst_eol);
+ }
+ else /* allow any eol style, even inconsistent ones, loosely */
+ {
+ while ((*(contents->data + idx) == '\r')
+ || (*(contents->data + idx) == '\n'))
+ idx++;
+ }
+ }
+
+ /* Clean up this test, since successful. */
+ SVN_ERR(svn_io_remove_file2(src_fname, FALSE, pool));
+ SVN_ERR(svn_io_remove_file2(dst_fname, FALSE, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+static svn_error_t *
+noop(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("noop", NULL, NULL, 0, NULL, NULL, NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("noop", "\r", NULL, 0, NULL, NULL, NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("noop", "\n", NULL, 0, NULL, NULL, NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("noop", "\r\n", NULL, 0, NULL, NULL, NULL, NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+
+/** EOL conversion alone. **/
+
+static svn_error_t *
+crlf_to_crlf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("crlf_to_crlf", "\r\n", "\r\n", 0,
+ NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+lf_to_crlf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("lf_to_crlf", "\n", "\r\n", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+cr_to_crlf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_crlf", "\r", "\r\n", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+mixed_to_crlf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_crlf", NULL, "\r\n", 1,
+ NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+lf_to_lf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("lf_to_lf", "\n", "\n", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+crlf_to_lf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("crlf_to_lf", "\r\n", "\n", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+cr_to_lf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_lf", "\r", "\n", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+mixed_to_lf(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_lf", NULL, "\n", 1, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+crlf_to_cr(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("crlf_to_cr", "\r\n", "\r", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+lf_to_cr(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("lf_to_cr", "\n", "\r", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+cr_to_cr(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_cr", "\r", "\r", 0, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+mixed_to_cr(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_cr", NULL, "\r", 1, NULL, NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+mixed_no_repair(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("mixed_no_repair", NULL, "\n", 0,
+ NULL, NULL, NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("mixed_no_repair", NULL, "\r\n", 0,
+ NULL, NULL, NULL, NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/** Keyword expansion alone. **/
+
+static svn_error_t *
+expand_author(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("author", "\n", NULL, 0, NULL, NULL, "jrandom", NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("author", "\r\n", NULL, 0, NULL, NULL, "jrandom", NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_date(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("date", "\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("date", "\r\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_author_date(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("author_date", "\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("author_date", "\r\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_author_rev(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("author_rev", "\n", NULL, 0,
+ "1729", NULL, "jrandom", NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("author_rev", "\r\n", NULL, 0,
+ "1729", NULL, "jrandom", NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_rev(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("rev", "\n", NULL, 0,
+ "1729", NULL, NULL, NULL, 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("rev", "\r\n", NULL, 0,
+ "1729", NULL, NULL, NULL, 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_rev_url(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("rev_url", "\n", NULL, 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("rev_url", "\r\n", NULL, 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+expand_author_date_rev_url(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("author_date_rev_url", "\n", NULL, 0,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("author_date_rev_url", "\r\n", NULL, 0,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/** Keyword expansion and EOL conversion together. **/
+
+static svn_error_t *
+lf_to_crlf_expand_author(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("lf_to_crlf_author", "\n", "\r\n", 0,
+ NULL, NULL, "jrandom", NULL, 1, pool);
+}
+
+
+static svn_error_t *
+mixed_to_lf_expand_author_date(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_lf_author_date", NULL, "\n", 1,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 1, pool);
+}
+
+
+static svn_error_t *
+crlf_to_cr_expand_author_rev(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("crlf_to_cr_author_rev", "\r\n", "\r", 0,
+ "1729", NULL, "jrandom", NULL, 1, pool);
+}
+
+
+static svn_error_t *
+cr_to_crlf_expand_rev(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_crlf_rev", "\r", "\r\n", 0,
+ "1729", NULL, NULL, NULL, 1, pool);
+}
+
+
+static svn_error_t *
+cr_to_crlf_expand_rev_url(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_crlf_rev_url", "\r", "\r\n", 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 1, pool);
+}
+
+
+static svn_error_t *
+mixed_to_crlf_expand_author_date_rev_url(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_crlf_author_date_rev_url", NULL, "\r\n", 1,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 1,
+ pool);
+}
+
+
+
+/** Keyword unexpansion alone. **/
+
+static svn_error_t *
+unexpand_author(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author", "\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author", "\r\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_date(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_date", "\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_date", "\r\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_author_date(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_date", "\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_date", "\r\n", NULL, 0,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_author_rev(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_rev", "\n", NULL, 0,
+ "1729", NULL, "jrandom", NULL, 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_rev", "\r\n", NULL, 0,
+ "1729", NULL, "jrandom", NULL, 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_rev(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_rev", "\n", NULL, 0,
+ "1729", NULL, NULL, NULL, 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_rev", "\r\n", NULL, 0,
+ "1729", NULL, NULL, NULL, 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_rev_url(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_rev_url", "\n", NULL, 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_rev_url", "\r\n", NULL, 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+unexpand_author_date_rev_url(apr_pool_t *pool)
+{
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_date_rev_url", "\n", NULL, 0,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 1, pool));
+
+ SVN_ERR(substitute_and_verify
+ ("unexpand_author_date_rev_url", "\r\n", NULL, 0,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 1, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/** Keyword unexpansion and EOL conversion together. **/
+
+static svn_error_t *
+lf_to_crlf_unexpand_author(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("lf_to_crlf_unexpand_author", "\n", "\r\n", 0,
+ NULL, NULL, "jrandom", NULL, 0, pool);
+}
+
+
+static svn_error_t *
+mixed_to_lf_unexpand_author_date(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_lf_unexpand_author_date", NULL, "\n", 1,
+ NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool);
+}
+
+
+static svn_error_t *
+crlf_to_cr_unexpand_author_rev(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("crlf_to_cr_unexpand_author_rev", "\r\n", "\r", 0,
+ "1729", NULL, "jrandom", NULL, 0, pool);
+}
+
+
+static svn_error_t *
+cr_to_crlf_unexpand_rev(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_crlf_unexpand_rev", "\r", "\r\n", 0,
+ "1729", NULL, NULL, NULL, 0, pool);
+}
+
+
+static svn_error_t *
+cr_to_crlf_unexpand_rev_url(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("cr_to_crlf_unexpand_rev_url", "\r", "\r\n", 0,
+ "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool);
+}
+
+
+static svn_error_t *
+mixed_to_crlf_unexpand_author_date_rev_url(apr_pool_t *pool)
+{
+ return substitute_and_verify
+ ("mixed_to_crlf_unexpand_author_date_rev_url", NULL, "\r\n", 1,
+ "1729",
+ "Wed Jan 9 07:49:05 2002",
+ "jrandom",
+ "http://subversion.tigris.org",
+ 0,
+ pool);
+}
+
+
+
+/* The test table. */
+
+static int max_threads = 7;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ /* The no-op conversion. */
+ SVN_TEST_PASS2(noop,
+ "no conversions"),
+ /* Conversions resulting in crlf, no keywords involved. */
+ SVN_TEST_PASS2(crlf_to_crlf,
+ "convert CRLF to CRLF"),
+ SVN_TEST_PASS2(lf_to_crlf,
+ "convert LF to CRLF"),
+ SVN_TEST_PASS2(cr_to_crlf,
+ "convert CR to CRLF"),
+ SVN_TEST_PASS2(mixed_to_crlf,
+ "convert mixed line endings to CRLF"),
+ /* Conversions resulting in lf, no keywords involved. */
+ SVN_TEST_PASS2(lf_to_lf,
+ "convert LF to LF"),
+ SVN_TEST_PASS2(crlf_to_lf,
+ "convert CRLF to LF"),
+ SVN_TEST_PASS2(cr_to_lf,
+ "convert CR to LF"),
+ SVN_TEST_PASS2(mixed_to_lf,
+ "convert mixed line endings to LF"),
+ /* Conversions resulting in cr, no keywords involved. */
+ SVN_TEST_PASS2(crlf_to_cr,
+ "convert CRLF to CR"),
+ SVN_TEST_PASS2(lf_to_cr,
+ "convert LF to CR"),
+ SVN_TEST_PASS2(cr_to_cr,
+ "convert CR to CR"),
+ SVN_TEST_PASS2(mixed_to_cr,
+ "convert mixed line endings to CR"),
+ /* Random eol stuff. */
+ SVN_TEST_PASS2(mixed_no_repair,
+ "keep mixed line endings without repair flag"),
+ /* Keyword expansion alone, no eol conversion involved. */
+ SVN_TEST_PASS2(expand_author,
+ "expand author"),
+ SVN_TEST_PASS2(expand_date,
+ "expand date"),
+ SVN_TEST_PASS2(expand_author_date,
+ "expand author and date"),
+ SVN_TEST_PASS2(expand_author_rev,
+ "expand author and rev"),
+ SVN_TEST_PASS2(expand_rev,
+ "expand rev"),
+ SVN_TEST_PASS2(expand_rev_url,
+ "expand rev and url"),
+ SVN_TEST_PASS2(expand_author_date_rev_url,
+ "expand author, date, rev, and url"),
+ /* Keyword expansion and eol conversion together. */
+ SVN_TEST_PASS2(lf_to_crlf_expand_author,
+ "lf_to_crlf; expand author"),
+ SVN_TEST_PASS2(mixed_to_lf_expand_author_date,
+ "mixed_to_lf; expand author and date"),
+ SVN_TEST_PASS2(crlf_to_cr_expand_author_rev,
+ "crlf_to_cr; expand author and rev"),
+ SVN_TEST_PASS2(cr_to_crlf_expand_rev,
+ "cr_to_crlf; expand rev"),
+ SVN_TEST_PASS2(cr_to_crlf_expand_rev_url,
+ "cr_to_crlf; expand rev and url"),
+ SVN_TEST_PASS2(mixed_to_crlf_expand_author_date_rev_url,
+ "mixed_to_crlf; expand author, date, rev, and url"),
+ /* Keyword unexpansion alone, no eol conversion involved. */
+ SVN_TEST_PASS2(unexpand_author,
+ "unexpand author"),
+ SVN_TEST_PASS2(unexpand_date,
+ "unexpand date"),
+ SVN_TEST_PASS2(unexpand_author_date,
+ "unexpand author and date"),
+ SVN_TEST_PASS2(unexpand_author_rev,
+ "unexpand author and rev"),
+ SVN_TEST_PASS2(unexpand_rev,
+ "unexpand rev"),
+ SVN_TEST_PASS2(unexpand_rev_url,
+ "unexpand rev and url"),
+ SVN_TEST_PASS2(unexpand_author_date_rev_url,
+ "unexpand author, date, rev, and url"),
+ /* Keyword unexpansion and eol conversion together. */
+ SVN_TEST_PASS2(lf_to_crlf_unexpand_author,
+ "lf_to_crlf; unexpand author"),
+ SVN_TEST_PASS2(mixed_to_lf_unexpand_author_date,
+ "mixed_to_lf; unexpand author and date"),
+ SVN_TEST_PASS2(crlf_to_cr_unexpand_author_rev,
+ "crlf_to_cr; unexpand author and rev"),
+ SVN_TEST_PASS2(cr_to_crlf_unexpand_rev,
+ "cr_to_crlf; unexpand rev"),
+ SVN_TEST_PASS2(cr_to_crlf_unexpand_rev_url,
+ "cr_to_crlf; unexpand rev and url"),
+ SVN_TEST_PASS2(mixed_to_crlf_unexpand_author_date_rev_url,
+ "mixed_to_crlf; unexpand author, date, rev, url"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/utf-test.c b/subversion/tests/libsvn_subr/utf-test.c
new file mode 100644
index 0000000..8e5fb15
--- /dev/null
+++ b/subversion/tests/libsvn_subr/utf-test.c
@@ -0,0 +1,1034 @@
+/*
+ * utf-test.c -- test the utf functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "../svn_test.h"
+#include "svn_utf.h"
+#include "svn_pools.h"
+
+#include "private/svn_string_private.h"
+#include "private/svn_utf_private.h"
+
+/* Random number seed. Yes, it's global, just pretend you can't see it. */
+static apr_uint32_t diff_diff3_seed;
+
+/* Return the value of the current random number seed, initializing it if
+ necessary */
+static apr_uint32_t
+seed_val(void)
+{
+ static svn_boolean_t first = TRUE;
+
+ if (first)
+ {
+ diff_diff3_seed = (apr_uint32_t) apr_time_now();
+ first = FALSE;
+ }
+
+ return diff_diff3_seed;
+}
+
+/* Return a random number N such that MIN_VAL <= N <= MAX_VAL */
+static apr_uint32_t
+range_rand(apr_uint32_t min_val,
+ apr_uint32_t max_val)
+{
+ apr_uint64_t diff = max_val - min_val;
+ apr_uint64_t val = diff * svn_test_rand(&diff_diff3_seed);
+ val /= 0xffffffff;
+ return min_val + (apr_uint32_t) val;
+}
+
+/* Explicit tests of various valid/invalid sequences */
+static svn_error_t *
+utf_validate(apr_pool_t *pool)
+{
+ struct data {
+ svn_boolean_t valid;
+ char string[20];
+ } tests[] = {
+ {TRUE, {'a', 'b', '\0'}},
+ {FALSE, {'a', 'b', '\x80', '\0'}},
+
+ {FALSE, {'a', 'b', '\xC0', '\0'}},
+ {FALSE, {'a', 'b', '\xC0', '\x81', 'x', 'y', '\0'}},
+
+ {TRUE, {'a', 'b', '\xC5', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xC5', '\xC0', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xE0', '\0'}},
+ {FALSE, {'a', 'b', '\xE0', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE0', '\xA0', '\0'}},
+ {FALSE, {'a', 'b', '\xE0', '\xA0', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xE0', '\xA0', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE0', '\x9F', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE0', '\xCF', '\x81', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xE5', '\0'}},
+ {FALSE, {'a', 'b', '\xE5', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE5', '\x81', '\0'}},
+ {FALSE, {'a', 'b', '\xE5', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xE5', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE5', '\xE1', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xE5', '\x81', '\xE1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xED', '\0'}},
+ {FALSE, {'a', 'b', '\xED', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xED', '\x81', '\0'}},
+ {FALSE, {'a', 'b', '\xED', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xED', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xED', '\xA0', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xED', '\x81', '\xC1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xEE', '\0'}},
+ {FALSE, {'a', 'b', '\xEE', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xEE', '\x81', '\0'}},
+ {FALSE, {'a', 'b', '\xEE', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xEE', '\x81', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xEE', '\xA0', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xEE', '\xC0', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xEE', '\x81', '\xC1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xF0', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', '\x81', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xF0', '\x91', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x81', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\xC1', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', '\xC1', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF0', '\x91', '\x81', '\xC1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xF2', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF2', '\x91', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF2', '\x91', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xF2', '\x91', '\x81', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xF2', '\x81', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF2', '\xC1', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF2', '\x91', '\xC1', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF2', '\x91', '\x81', '\xC1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xF4', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x91', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x91', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x91', '\x81', '\x81', 'x', 'y', '\0'}},
+ {TRUE, {'a', 'b', '\xF4', '\x81', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\xC1', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x91', '\xC1', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x91', '\x81', '\xC1', 'x', 'y', '\0'}},
+
+ {FALSE, {'a', 'b', '\xF5', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF5', '\x81', 'x', 'y', '\0'}},
+
+ {TRUE, {'a', 'b', '\xF4', '\x81', '\x81', '\x81', 'x', 'y',
+ 'a', 'b', '\xF2', '\x91', '\x81', '\x81', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x81', '\x81', '\x81', 'x', 'y',
+ 'a', 'b', '\xF2', '\x91', '\x81', '\xC1', 'x', 'y', '\0'}},
+ {FALSE, {'a', 'b', '\xF4', '\x81', '\x81', '\x81', 'x', 'y',
+ 'a', 'b', '\xF2', '\x91', '\x81', 'x', 'y', '\0'}},
+
+ {-1},
+ };
+ int i = 0;
+
+ while (tests[i].valid != -1)
+ {
+ const char *last = svn_utf__last_valid(tests[i].string,
+ strlen(tests[i].string));
+ apr_size_t len = strlen(tests[i].string);
+
+ if ((svn_utf__cstring_is_valid(tests[i].string) != tests[i].valid)
+ ||
+ (svn_utf__is_valid(tests[i].string, len) != tests[i].valid))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "is_valid test %d failed", i);
+
+ if (!svn_utf__is_valid(tests[i].string, last - tests[i].string)
+ ||
+ (tests[i].valid && *last))
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "last_valid test %d failed", i);
+
+ ++i;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Compare the two different implementations using random data. */
+static svn_error_t *
+utf_validate2(apr_pool_t *pool)
+{
+ int i;
+
+ seed_val();
+
+ /* We want enough iterations so that most runs get both valid and invalid
+ strings. We also want enough iterations such that a deliberate error
+ in one of the implementations will trigger a failure. By experiment
+ the second requirement requires a much larger number of iterations
+ that the first. */
+ for (i = 0; i < 100000; ++i)
+ {
+ unsigned int j;
+ char str[64];
+ apr_size_t len;
+
+ /* A random string; experiment shows that it's occasionally (less
+ than 1%) valid but usually invalid. */
+ for (j = 0; j < sizeof(str) - 1; ++j)
+ str[j] = (char)range_rand(0, 255);
+ str[sizeof(str) - 1] = 0;
+ len = strlen(str);
+
+ if (svn_utf__last_valid(str, len) != svn_utf__last_valid2(str, len))
+ {
+ /* Duplicate calls for easy debugging */
+ svn_utf__last_valid(str, len);
+ svn_utf__last_valid2(str, len);
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL, "is_valid2 test %d failed", i);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test conversion from different codepages to utf8. */
+static svn_error_t *
+test_utf_cstring_to_utf8_ex2(apr_pool_t *pool)
+{
+ apr_size_t i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ struct data {
+ const char *string;
+ const char *expected_result;
+ const char *from_page;
+ } tests[] = {
+ {"ascii text\n", "ascii text\n", "unexistent-page"},
+ {"Edelwei\xdf", "Edelwei\xc3\x9f", "ISO-8859-1"}
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *dest;
+
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_utf_cstring_to_utf8_ex2(&dest, tests[i].string,
+ tests[i].from_page, pool));
+
+ if (strcmp(dest, tests[i].expected_result))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_utf_cstring_to_utf8_ex2 ('%s', '%s') returned ('%s') "
+ "instead of ('%s')",
+ tests[i].string, tests[i].from_page,
+ dest,
+ tests[i].expected_result);
+ }
+ }
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+/* Test conversion to different codepages from utf8. */
+static svn_error_t *
+test_utf_cstring_from_utf8_ex2(apr_pool_t *pool)
+{
+ apr_size_t i;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ struct data {
+ const char *string;
+ const char *expected_result;
+ const char *to_page;
+ } tests[] = {
+ {"ascii text\n", "ascii text\n", "unexistent-page"},
+ {"Edelwei\xc3\x9f", "Edelwei\xdf", "ISO-8859-1"}
+ };
+
+ for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++)
+ {
+ const char *dest;
+
+ svn_pool_clear(subpool);
+
+ SVN_ERR(svn_utf_cstring_from_utf8_ex2(&dest, tests[i].string,
+ tests[i].to_page, pool));
+
+ if (strcmp(dest, tests[i].expected_result))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "svn_utf_cstring_from_utf8_ex2 ('%s', '%s') returned ('%s') "
+ "instead of ('%s')",
+ tests[i].string, tests[i].to_page,
+ dest,
+ tests[i].expected_result);
+ }
+ }
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+/* Test normalization-independent UTF-8 string comparison */
+static svn_error_t *
+test_utf_collated_compare(apr_pool_t *pool)
+{
+ /* Normalized: NFC */
+ static const char nfc[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe1\xbb\x9d" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* Normalized: NFD */
+ static const char nfd[] =
+ "S\xcc\xa3\xcc\x87" /* S with dot above and below */
+ "u\xcc\x8a" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "v\xcc\x83" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "r\xcc\x8f" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "i\xcc\x88\xcc\x81" /* i with diaeresis and acute */
+ "o\xcc\x9b\xcc\x80" /* o with grave and hook */
+ "n\xcc\xad"; /* n with circumflex below */
+
+ /* Mixed, denormalized */
+ static const char mixup[] =
+ "S\xcc\x87\xcc\xa3" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "o\xcc\x80\xcc\x9b" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ static const char longer[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe1\xbb\x9d" /* o with grave and hook */
+ "\xe1\xb9\x8b" /* n with circumflex below */
+ "X";
+
+ static const char shorter[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe1\xbb\x9d"; /* o with grave and hook */
+
+ static const char lowcase[] =
+ "s\xcc\x87\xcc\xa3" /* s with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "o\xcc\x80\xcc\x9b" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ static const struct utfcmp_test_t {
+ const char *stra;
+ char op;
+ const char *strb;
+ const char *taga;
+ const char *tagb;
+ } utfcmp_tests[] = {
+ /* Empty key */
+ {"", '=', "", "empty", "empty"},
+ {"", '<', "a", "empty", "nonempty"},
+ {"a", '>', "", "nonempty", "empty"},
+
+ /* Deterministic ordering */
+ {"a", '<', "b", "a", "b"},
+ {"b", '<', "c", "b", "c"},
+ {"a", '<', "c", "a", "c"},
+
+ /* Normalized equality */
+ {nfc, '=', nfd, "nfc", "nfd"},
+ {nfd, '=', nfc, "nfd", "nfc"},
+ {nfc, '=', mixup, "nfc", "mixup"},
+ {nfd, '=', mixup, "nfd", "mixup"},
+ {mixup, '=', nfd, "mixup", "nfd"},
+ {mixup, '=', nfc, "mixup", "nfc"},
+
+ /* Key length */
+ {nfc, '<', longer, "nfc", "longer"},
+ {longer, '>', nfc, "longer", "nfc"},
+ {nfd, '>', shorter, "nfd", "shorter"},
+ {shorter, '<', nfd, "shorter", "nfd"},
+ {mixup, '<', lowcase, "mixup", "lowcase"},
+ {lowcase, '>', mixup, "lowcase", "mixup"},
+
+ {NULL, 0, NULL, NULL, NULL}
+ };
+
+
+ const struct utfcmp_test_t *ut;
+ svn_membuf_t bufa, bufb;
+ svn_membuf__create(&bufa, 0, pool);
+ svn_membuf__create(&bufb, 0, pool);
+
+ srand(111);
+ for (ut = utfcmp_tests; ut->stra; ++ut)
+ {
+ const svn_boolean_t implicit_size = (rand() % 17) & 1;
+ const apr_size_t lena = (implicit_size
+ ? SVN_UTF__UNKNOWN_LENGTH : strlen(ut->stra));
+ const apr_size_t lenb = (implicit_size
+ ? SVN_UTF__UNKNOWN_LENGTH : strlen(ut->strb));
+ int result;
+
+ SVN_ERR(svn_utf__normcmp(&result,
+ ut->stra, lena, ut->strb, lenb,
+ &bufa, &bufb));
+
+ /* UCS-4 debugging dump of the decomposed strings
+ {
+ const apr_int32_t *const ucsbufa = bufa.data;
+ const apr_int32_t *const ucsbufb = bufb.data;
+ apr_size_t i;
+
+ printf("(%c)%7s %c %s\n", ut->op,
+ ut->taga, (!result ? '=' : (result < 0 ? '<' : '>')), ut->tagb);
+
+ for (i = 0; i < bufa.size || i < bufb.size; ++i)
+ {
+ if (i < bufa.size && i < bufb.size)
+ printf(" U+%04X U+%04X\n", ucsbufa[i], ucsbufb[i]);
+ else if (i < bufa.size)
+ printf(" U+%04X\n", ucsbufa[i]);
+ else
+ printf(" U+%04X\n", ucsbufb[i]);
+ }
+ }
+ */
+
+ if (('=' == ut->op && 0 != result)
+ || ('<' == ut->op && 0 <= result)
+ || ('>' == ut->op && 0 >= result))
+ {
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, NULL,
+ "Ut->Op '%s' %c '%s' but '%s' %c '%s'",
+ ut->taga, ut->op, ut->tagb,
+ ut->taga, (!result ? '=' : (result < 0 ? '<' : '>')), ut->tagb);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+static svn_error_t *
+test_utf_pattern_match(apr_pool_t *pool)
+{
+ static const struct glob_test_t {
+ svn_boolean_t sql_like;
+ svn_boolean_t matches;
+ const char *pattern;
+ const char *string;
+ const char *escape;
+ } glob_tests[] = {
+#define LIKE_MATCH TRUE, TRUE
+#define LIKE_FAIL TRUE, FALSE
+#define GLOB_MATCH FALSE, TRUE
+#define GLOB_FAIL FALSE, FALSE
+
+ {LIKE_FAIL, "", "test", NULL},
+ {GLOB_FAIL, "", "test", NULL},
+ {LIKE_FAIL, "", "%", NULL},
+ {GLOB_FAIL, "", "*", NULL},
+ {LIKE_FAIL, "test", "%", NULL},
+ {GLOB_FAIL, "test", "*", NULL},
+ {LIKE_MATCH, "test", "test", NULL},
+ {GLOB_MATCH, "test", "test", NULL},
+ {LIKE_MATCH, "t\xe1\xb8\x9dst", "te\xcc\xa7\xcc\x86st", NULL},
+ {GLOB_MATCH, "te\xcc\xa7\xcc\x86st", "t\xe1\xb8\x9dst", NULL},
+
+ {LIKE_FAIL, "test", "test", "\xe1\xb8\x9d"}, /* escape char not ascii */
+ {LIKE_FAIL, "test", "test", ""}, /* empty escape string */
+
+ {LIKE_MATCH, "te#st", "test", "#"},
+ {LIKE_FAIL, "te#st", "test", NULL},
+ {GLOB_MATCH, "te\\st", "test", NULL},
+ {LIKE_MATCH, "te##st", "te#st", "#"},
+ {LIKE_FAIL, "te##st", "te#st", NULL},
+ {GLOB_MATCH, "te\\\\st", "te\\st", NULL},
+ {GLOB_FAIL, "te\\\\st", "te\\st", "\\"}, /* escape char with glob */
+ {LIKE_FAIL, "te#%t", "te%t", NULL},
+ {LIKE_MATCH, "te#%t", "te%t", "#"},
+ {GLOB_MATCH, "te\\*t", "te*t", NULL},
+ {LIKE_FAIL, "te#%t", "test", NULL},
+ {GLOB_FAIL, "te\\*t", "test", NULL},
+ {LIKE_FAIL, "te#_t", "te_t", NULL},
+ {LIKE_MATCH, "te#_t", "te_t", "#"},
+ {GLOB_MATCH, "te\\?t", "te?t", NULL},
+ {LIKE_FAIL, "te#_t", "test", NULL},
+ {LIKE_FAIL, "te#_t", "test", "#"},
+ {GLOB_FAIL, "te\\?t", "test", NULL},
+
+ {LIKE_MATCH, "_est", "test", NULL},
+ {GLOB_MATCH, "?est", "test", NULL},
+ {LIKE_MATCH, "te_t", "test", NULL},
+ {GLOB_MATCH, "te?t", "test", NULL},
+ {LIKE_MATCH, "tes_", "test", NULL},
+ {GLOB_MATCH, "tes?", "test", NULL},
+ {LIKE_FAIL, "test_", "test", NULL},
+ {GLOB_FAIL, "test?", "test", NULL},
+
+ {LIKE_MATCH, "[s%n]", "[subversion]", NULL},
+ {GLOB_FAIL, "[s*n]", "[subversion]", NULL},
+ {LIKE_MATCH, "#[s%n]", "[subversion]", "#"},
+ {GLOB_MATCH, "\\[s*n]", "[subversion]", NULL},
+
+ {GLOB_MATCH, ".[\\-\\t]", ".t", NULL},
+ {GLOB_MATCH, "test*?*[a-z]*", "testgoop", NULL},
+ {GLOB_MATCH, "te[^x]t", "test", NULL},
+ {GLOB_MATCH, "te[^abc]t", "test", NULL},
+ {GLOB_MATCH, "te[^x]t", "test", NULL},
+ {GLOB_MATCH, "te[!x]t", "test", NULL},
+ {GLOB_FAIL, "te[^x]t", "text", NULL},
+ {GLOB_FAIL, "te[^\\x]t", "text", NULL},
+ {GLOB_FAIL, "te[^x\\", "text", NULL},
+ {GLOB_FAIL, "te[/]t", "text", NULL},
+ {GLOB_MATCH, "te[r-t]t", "test", NULL},
+ {GLOB_MATCH, "te[r-Tz]t", "tezt", NULL},
+ {GLOB_FAIL, "te[R-T]t", "tent", NULL},
+/* {GLOB_MATCH, "tes[]t]", "test", NULL}, */
+ {GLOB_MATCH, "tes[t-]", "test", NULL},
+ {GLOB_MATCH, "tes[t-]]", "test]", NULL},
+ {GLOB_FAIL, "tes[t-]]", "test", NULL},
+ {GLOB_FAIL, "tes[u-]", "test", NULL},
+ {GLOB_FAIL, "tes[t-]", "tes[t-]", NULL},
+ {GLOB_MATCH, "test[/-/]", "test/", NULL},
+ {GLOB_MATCH, "test[\\/-/]", "test/", NULL},
+ {GLOB_MATCH, "test[/-\\/]", "test/", NULL},
+
+#undef LIKE_MATCH
+#undef LIKE_FAIL
+#undef GLOB_MATCH
+#undef GLOB_FAIL
+
+ {FALSE, FALSE, NULL, NULL, NULL}
+ };
+
+ const struct glob_test_t *gt;
+ svn_membuf_t bufa, bufb, bufc;
+ svn_membuf__create(&bufa, 0, pool);
+ svn_membuf__create(&bufb, 0, pool);
+ svn_membuf__create(&bufc, 0, pool);
+
+ srand(79);
+ for (gt = glob_tests; gt->pattern; ++gt)
+ {
+ const svn_boolean_t implicit_size = (rand() % 13) & 1;
+ const apr_size_t lenptn = (implicit_size
+ ? SVN_UTF__UNKNOWN_LENGTH
+ : strlen(gt->pattern));
+ const apr_size_t lenstr = (implicit_size
+ ? SVN_UTF__UNKNOWN_LENGTH
+ : strlen(gt->string));
+ const apr_size_t lenesc = (implicit_size
+ ? SVN_UTF__UNKNOWN_LENGTH
+ : (gt->escape ? strlen(gt->escape) : 0));
+ svn_boolean_t match;
+ svn_error_t *err;
+
+
+ err = svn_utf__glob(&match,
+ gt->pattern, lenptn,
+ gt->string, lenstr,
+ gt->escape, lenesc,
+ gt->sql_like, &bufa, &bufb, &bufc);
+
+ if (!gt->sql_like && gt->escape && !err)
+ return svn_error_create
+ (SVN_ERR_TEST_FAILED, err, "Failed to detect GLOB ESCAPE");
+
+ if ((err && gt->matches)
+ || (!err && !match != !gt->matches))
+ {
+ if (gt->sql_like)
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, err,
+ "Wrong result: %s'%s' LIKE '%s'%s%s%s%s",
+ (gt->matches ? "NOT " : ""), gt->string, gt->pattern,
+ (gt->escape ? " ESCAPE " : ""), (gt->escape ? "'" : ""),
+ (gt->escape ? gt->escape : ""), (gt->escape ? "'" : ""));
+ else
+ return svn_error_createf
+ (SVN_ERR_TEST_FAILED, err, "Wrong result: %s%s GLOB %s",
+ (gt->matches ? "NOT " : ""), gt->string, gt->pattern);
+ }
+
+ if (err)
+ svn_error_clear(err);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_utf_fuzzy_escape(apr_pool_t *pool)
+{
+
+ /* Accented latin, mixed normalization */
+ static const char mixup[] =
+ "S\xcc\x87\xcc\xa3" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "o\xcc\x80\xcc\x9b" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* As above, but latin lowercase 'o' replaced with Greek 'omicron' */
+ static const char greekish[] =
+ "S\xcc\x87\xcc\xa3" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xce\xbf\xcc\x80\xcc\x9b" /* omicron with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* More interesting invalid characters. */
+ static const char invalid[] =
+ "Not Unicode: \xef\xb7\x91;" /* U+FDD1 */
+ "Out of range: \xf4\x90\x80\x81;" /* U+110001 */
+ "Not UTF-8: \xe6;"
+ "Null byte: \0;";
+
+ const char *fuzzy;
+
+ fuzzy = svn_utf__fuzzy_escape(mixup, strlen(mixup), pool);
+ SVN_TEST_ASSERT(0 == strcmp(fuzzy, "Subversion"));
+
+ fuzzy = svn_utf__fuzzy_escape(greekish, strlen(greekish), pool);
+ SVN_TEST_ASSERT(0 == strcmp(fuzzy, "Subversi{U+03BF}n"));
+
+ fuzzy = svn_utf__fuzzy_escape(invalid, sizeof(invalid) - 1, pool);
+
+ /* utf8proc 1.1.15 produces {U?FDD1} while 2.x produces {U+FDD1} */
+ SVN_TEST_ASSERT(0 == strcmp(fuzzy,
+ "Not Unicode: {U?FDD1};"
+ "Out of range: ?\\F4?\\90?\\80?\\81;"
+ "Not UTF-8: ?\\E6;"
+ "Null byte: \\0;")
+ ||
+ 0 == strcmp(fuzzy,
+ "Not Unicode: {U+FDD1};"
+ "Out of range: ?\\F4?\\90?\\80?\\81;"
+ "Not UTF-8: ?\\E6;"
+ "Null byte: \\0;"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_utf_is_normalized(apr_pool_t *pool)
+{
+ /* Normalized: NFC */
+ static const char nfc[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe1\xbb\x9d" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* Normalized: NFD */
+ static const char nfd[] =
+ "S\xcc\xa3\xcc\x87" /* S with dot above and below */
+ "u\xcc\x8a" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "v\xcc\x83" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "r\xcc\x8f" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "i\xcc\x88\xcc\x81" /* i with diaeresis and acute */
+ "o\xcc\x9b\xcc\x80" /* o with grave and hook */
+ "n\xcc\xad"; /* n with circumflex below */
+
+ /* Mixed, denormalized */
+ static const char mixup[] =
+ "S\xcc\x87\xcc\xa3" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "o\xcc\x80\xcc\x9b" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* Invalid UTF-8 */
+ static const char invalid[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe6" /* Invalid byte */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ SVN_ERR_ASSERT(svn_utf__is_normalized(nfc, pool));
+ SVN_ERR_ASSERT(!svn_utf__is_normalized(nfd, pool));
+ SVN_ERR_ASSERT(!svn_utf__is_normalized(mixup, pool));
+ SVN_ERR_ASSERT(!svn_utf__is_normalized(invalid, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_utf_conversions(apr_pool_t *pool)
+{
+ static const struct cvt_test_t
+ {
+ svn_boolean_t sixteenbit;
+ svn_boolean_t bigendian;
+ apr_size_t sourcelen;
+ const char *source;
+ const char *result;
+ svn_boolean_t counted;
+ } tests[] = {
+
+#define UTF_32_LE FALSE, FALSE
+#define UTF_32_BE FALSE, TRUE
+#define UTF_16_LE TRUE, FALSE
+#define UTF_16_BE TRUE, TRUE
+
+ /* Normal character conversion */
+ { UTF_32_LE, 4, "t\0\0\0" "e\0\0\0" "s\0\0\0" "t\0\0\0" "\0\0\0\0", "test", FALSE },
+ { UTF_32_BE, 4, "\0\0\0t" "\0\0\0e" "\0\0\0s" "\0\0\0t" "\0\0\0\0", "test", FALSE },
+ { UTF_16_LE, 4, "t\0" "e\0" "s\0" "t\0" "\0\0", "test", FALSE },
+ { UTF_16_BE, 4, "\0t" "\0e" "\0s" "\0t" "\0\0", "test", FALSE },
+
+ /* Valid surrogate pairs */
+ { UTF_16_LE, 2, "\x00\xD8" "\x00\xDC" "\0\0", "\xf0\x90\x80\x80", FALSE }, /* U+010000 */
+ { UTF_16_LE, 2, "\x34\xD8" "\x1E\xDD" "\0\0", "\xf0\x9d\x84\x9e", FALSE }, /* U+01D11E */
+ { UTF_16_LE, 2, "\xFF\xDB" "\xFD\xDF" "\0\0", "\xf4\x8f\xbf\xbd", FALSE }, /* U+10FFFD */
+
+ { UTF_16_BE, 2, "\xD8\x00" "\xDC\x00" "\0\0", "\xf0\x90\x80\x80", FALSE }, /* U+010000 */
+ { UTF_16_BE, 2, "\xD8\x34" "\xDD\x1E" "\0\0", "\xf0\x9d\x84\x9e", FALSE }, /* U+01D11E */
+ { UTF_16_BE, 2, "\xDB\xFF" "\xDF\xFD" "\0\0", "\xf4\x8f\xbf\xbd", FALSE }, /* U+10FFFD */
+
+ /* Swapped, single and trailing surrogate pairs */
+ { UTF_16_LE, 4, "*\0" "\x00\xDC" "\x00\xD8" "*\0\0\0", "*\xed\xb0\x80" "\xed\xa0\x80*", FALSE },
+ { UTF_16_LE, 3, "*\0" "\x1E\xDD" "*\0\0\0", "*\xed\xb4\x9e*", FALSE },
+ { UTF_16_LE, 3, "*\0" "\xFF\xDB" "*\0\0\0", "*\xed\xaf\xbf*", FALSE },
+ { UTF_16_LE, 1, "\x1E\xDD" "\0\0", "\xed\xb4\x9e", FALSE },
+ { UTF_16_LE, 1, "\xFF\xDB" "\0\0", "\xed\xaf\xbf", FALSE },
+
+ { UTF_16_BE, 4, "\0*" "\xDC\x00" "\xD8\x00" "\0*\0\0", "*\xed\xb0\x80" "\xed\xa0\x80*", FALSE },
+ { UTF_16_BE, 3, "\0*" "\xDD\x1E" "\0*\0\0", "*\xed\xb4\x9e*", FALSE },
+ { UTF_16_BE, 3, "\0*" "\xDB\xFF" "\0*\0\0", "*\xed\xaf\xbf*", FALSE },
+ { UTF_16_BE, 1, "\xDD\x1E" "\0\0", "\xed\xb4\x9e", FALSE },
+ { UTF_16_BE, 1, "\xDB\xFF" "\0\0", "\xed\xaf\xbf", FALSE },
+
+ /* Counted strings with NUL characters */
+ { UTF_16_LE, 3, "x\0" "\0\0" "y\0" "*\0", "x\0y", TRUE },
+ { UTF_32_BE, 3, "\0\0\0x" "\0\0\0\0" "\0\0\0y" "\0\0\0*", "x\0y", TRUE },
+
+#undef UTF_32_LE
+#undef UTF_32_BE
+#undef UTF_16_LE
+#undef UTF_16_BE
+
+ { 0 }
+ };
+
+ const struct cvt_test_t *tc;
+ const svn_string_t *result;
+ apr_size_t maxlen = 0;
+
+ /* To assure proper alignment of the source string, it needs to be copied
+ into an array of the appropriate type before calling
+ svn_utf__utf{16,32}_to_utf8. */
+ apr_uint16_t *source16;
+ apr_int32_t *source32;
+
+ for (tc = tests; tc->source; ++tc)
+ if (tc->sourcelen > maxlen)
+ maxlen = tc->sourcelen;
+ maxlen++;
+
+ source16 = apr_pcalloc(pool, maxlen * sizeof(*source16));
+ source32 = apr_pcalloc(pool, maxlen * sizeof(*source32));
+
+ for (tc = tests; tc->source; ++tc)
+ {
+ if (tc->sixteenbit)
+ {
+ memset(source16, 0, maxlen * sizeof(*source16));
+ memcpy(source16, tc->source, (tc->sourcelen + 1) * sizeof(*source16));
+ SVN_ERR(svn_utf__utf16_to_utf8(&result, source16,
+ tc->counted ? tc->sourcelen : SVN_UTF__UNKNOWN_LENGTH,
+ tc->bigendian, pool, pool));
+ }
+ else
+ {
+ memset(source32, 0, maxlen * sizeof(*source32));
+ memcpy(source32, tc->source, (tc->sourcelen + 1) * sizeof(*source32));
+ SVN_ERR(svn_utf__utf32_to_utf8(&result, source32,
+ tc->counted ? tc->sourcelen : SVN_UTF__UNKNOWN_LENGTH,
+ tc->bigendian, pool, pool));
+ }
+ if (tc->counted)
+ SVN_ERR_ASSERT(0 == memcmp(result->data, tc->result, tc->sourcelen));
+ else
+ SVN_ERR_ASSERT(0 == strcmp(result->data, tc->result));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_utf_normalize(apr_pool_t *pool)
+{
+ /* Normalized: NFC */
+ static const char nfc[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe1\xbb\x9d" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* Normalized: NFD */
+ static const char nfd[] =
+ "S\xcc\xa3\xcc\x87" /* S with dot above and below */
+ "u\xcc\x8a" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "v\xcc\x83" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "r\xcc\x8f" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "i\xcc\x88\xcc\x81" /* i with diaeresis and acute */
+ "o\xcc\x9b\xcc\x80" /* o with grave and hook */
+ "n\xcc\xad"; /* n with circumflex below */
+
+ /* Mixed, denormalized */
+ static const char mixup[] =
+ "S\xcc\x87\xcc\xa3" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "b\xcc\xb1" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "s\xcc\x8c" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "o\xcc\x80\xcc\x9b" /* o with grave and hook */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ /* Invalid UTF-8 */
+ static const char invalid[] =
+ "\xe1\xb9\xa8" /* S with dot above and below */
+ "\xc5\xaf" /* u with ring */
+ "\xe1\xb8\x87" /* b with macron below */
+ "\xe1\xb9\xbd" /* v with tilde */
+ "\xe1\xb8\x9d" /* e with breve and cedilla */
+ "\xc8\x91" /* r with double grave */
+ "\xc5\xa1" /* s with caron */
+ "\xe1\xb8\xaf" /* i with diaeresis and acute */
+ "\xe6" /* Invalid byte */
+ "\xe1\xb9\x8b"; /* n with circumflex below */
+
+ const char *result;
+ svn_membuf_t buf;
+
+ svn_membuf__create(&buf, 0, pool);
+ SVN_ERR(svn_utf__normalize(&result, nfc, strlen(nfc), &buf));
+ SVN_TEST_STRING_ASSERT(result, nfc);
+ SVN_ERR(svn_utf__normalize(&result, nfd, strlen(nfd), &buf));
+ SVN_TEST_STRING_ASSERT(result, nfc);
+ SVN_ERR(svn_utf__normalize(&result, mixup, strlen(mixup), &buf));
+ SVN_TEST_STRING_ASSERT(result, nfc);
+
+ SVN_TEST_ASSERT_ERROR(svn_utf__normalize(&result, invalid, strlen(invalid),
+ &buf),
+ SVN_ERR_UTF8PROC_ERROR);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_utf_xfrm(apr_pool_t *pool)
+{
+ const char *str;
+ const char *result;
+ svn_membuf_t buf;
+
+ svn_membuf__create(&buf, 0, pool);
+
+ /* ASCII string */
+ str = "Subversion";
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Subversion");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "subversion");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Subversion");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "subversion");
+
+ /* M (u with diaeresis) (sharp s) en */
+ str = "M" "\xc3\xbc" "\xc3\x9f" "en";
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "M" "\xc3\xbc" "\xc3\x9f" "en");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "m" "\xc3\xbc" "ssen");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Mu" "\xc3\x9f" "en");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "mussen");
+
+ /* Na (i with diaeresis) vet (e with acute), decomposed */
+ str = "Nai" "\xcc\x88" "vete" "\xcc\x81";
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Na" "\xc3\xaf" "vet" "\xc3\xa9");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "na" "\xc3\xaf" "vet" "\xc3\xa9");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Naivete");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "naivete");
+
+ /* (I with dot above) stanbul */
+ str = "\xc4\xb0" "stanbul";
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "\xc4\xb0" "stanbul");
+
+ /* The Latin Capital Letter I with Dot Above (0130) should fold into
+ Latin Small Letter I (0069) with Combining Dot Above (0307) per full
+ mapping in http://www.unicode.org/Public/UNIDATA/CaseFolding.txt */
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, FALSE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "i" "\xcc\x87" "stanbul");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), FALSE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "Istanbul");
+ SVN_ERR(svn_utf__xfrm(&result, str, strlen(str), TRUE, TRUE, &buf));
+ SVN_TEST_STRING_ASSERT(result, "istanbul");
+
+ /* Invalid UTF-8 */
+ str = "a" "\xe6" "bc";
+ SVN_TEST_ASSERT_ERROR(svn_utf__xfrm(&result, str, strlen(str),
+ FALSE, FALSE, &buf),
+ SVN_ERR_UTF8PROC_ERROR);
+ SVN_TEST_ASSERT_ERROR(svn_utf__xfrm(&result, str, strlen(str),
+ TRUE, FALSE, &buf),
+ SVN_ERR_UTF8PROC_ERROR);
+ SVN_TEST_ASSERT_ERROR(svn_utf__xfrm(&result, str, strlen(str),
+ FALSE, TRUE, &buf),
+ SVN_ERR_UTF8PROC_ERROR);
+ SVN_TEST_ASSERT_ERROR(svn_utf__xfrm(&result, str, strlen(str),
+ TRUE, TRUE, &buf),
+ SVN_ERR_UTF8PROC_ERROR);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(utf_validate,
+ "test is_valid/last_valid"),
+ SVN_TEST_PASS2(utf_validate2,
+ "test last_valid/last_valid2"),
+ SVN_TEST_PASS2(test_utf_cstring_to_utf8_ex2,
+ "test svn_utf_cstring_to_utf8_ex2"),
+ SVN_TEST_PASS2(test_utf_cstring_from_utf8_ex2,
+ "test svn_utf_cstring_from_utf8_ex2"),
+ SVN_TEST_PASS2(test_utf_collated_compare,
+ "test svn_utf__normcmp"),
+ SVN_TEST_PASS2(test_utf_pattern_match,
+ "test svn_utf__glob"),
+ SVN_TEST_PASS2(test_utf_fuzzy_escape,
+ "test svn_utf__fuzzy_escape"),
+ SVN_TEST_PASS2(test_utf_is_normalized,
+ "test svn_utf__is_normalized"),
+ SVN_TEST_PASS2(test_utf_conversions,
+ "test svn_utf__utf{16,32}_to_utf8"),
+ SVN_TEST_PASS2(test_utf_normalize,
+ "test svn_utf__normalize"),
+ SVN_TEST_PASS2(test_utf_xfrm,
+ "test svn_utf__xfrm"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/x509-test.c b/subversion/tests/libsvn_subr/x509-test.c
new file mode 100644
index 0000000..a5cf874
--- /dev/null
+++ b/subversion/tests/libsvn_subr/x509-test.c
@@ -0,0 +1,905 @@
+/*
+ * x509-test.c -- test the x509 parser functions
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <string.h>
+#include "svn_x509.h"
+#include "svn_base64.h"
+#include "svn_time.h"
+#include "svn_pools.h"
+#include "svn_string.h"
+
+#include "../svn_test.h"
+
+struct x509_test {
+ const char *base64_cert; /* Base64 encoded DER X.509 cert */
+ const char *subject; /* Subject Distinguished Name */
+ const char *subject_oids; /* Space separated list of oids in Subject */
+ const char *issuer; /* Issuer Distinguished Name */
+ const char *issuer_oids; /* Space separated list of oids in Issuer */
+
+ /* These timesamps are in the format that svn_time_to_cstring() produces.
+ * This is not the same string as the parser returns since it returns
+ * the ressult of svn_time_to_human_cstring(), which is in the local
+ * timezone. So we can't store exactly what the parser will output. */
+ const char *valid_from;
+ const char *valid_to;
+ const char *hostnames;
+ const char *sha1_digest;
+};
+
+static struct x509_test cert_tests[] = {
+ /* contains extensions and uses a sha256 algorithm */
+ { "MIIEtzCCA5+gAwIBAgIQWGBOrapkezd+BWVsAtmtmTANBgkqhkiG9w0BAQsFADA8"
+ "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMVGhhd3RlLCBJbmMuMRYwFAYDVQQDEw1U"
+ "aGF3dGUgU1NMIENBMB4XDTE0MDQxMTAwMDAwMFoXDTE2MDQwNzIzNTk1OVowgYsx"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxQLRm9yZXN0"
+ "IEhpbGwxIzAhBgNVBAoUGkFwYWNoZSBTb2Z0d2FyZSBGb3VuZGF0aW9uMRcwFQYD"
+ "VQQLFA5JbmZyYXN0cnVjdHVyZTEVMBMGA1UEAxQMKi5hcGFjaGUub3JnMIIBIjAN"
+ "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Tq4mH+stRoxe4xth8tUCgLt+P4L"
+ "D/JWZz4a2IecaaAk57vIlTxEyP16fUShUfxVJnD0KV11zv2qaEUXNaA6hKd4H/oB"
+ "u2OyGev+quRM+aFCjWqASkXt7fLGsIkHAwP3XwBVBpARbcXJeCjCBxqaYrQqS8LT"
+ "wfPUD9eYncGlQ+ixb3Bosy7TmkWKeLsRdS90cAO/rdgQ8OI7kLT/1tr5GpF9RmXo"
+ "RnVqMP+U0zGd/BNNSneg7emb7TxLzxeMKZ7QbF4MZi8RRN11spvx8/f92CiYrGGu"
+ "y67VdOGPaomYc+VZ2syLwduHGK40ADrEK3+MQpsRFB0dM08j9bhpr5A44wIDAQAB"
+ "o4IBYzCCAV8wFwYDVR0RBBAwDoIMKi5hcGFjaGUub3JnMAkGA1UdEwQCMAAwQgYD"
+ "VR0gBDswOTA3BgpghkgBhvhFAQc2MCkwJwYIKwYBBQUHAgEWG2h0dHBzOi8vd3d3"
+ "LnRoYXd0ZS5jb20vY3BzLzAOBgNVHQ8BAf8EBAMCBaAwHwYDVR0jBBgwFoAUp6KD"
+ "uzRFQD381TBPErk+oQGf9tswOgYDVR0fBDMwMTAvoC2gK4YpaHR0cDovL3N2ci1v"
+ "di1jcmwudGhhd3RlLmNvbS9UaGF3dGVPVi5jcmwwHQYDVR0lBBYwFAYIKwYBBQUH"
+ "AwEGCCsGAQUFBwMCMGkGCCsGAQUFBwEBBF0wWzAiBggrBgEFBQcwAYYWaHR0cDov"
+ "L29jc3AudGhhd3RlLmNvbTA1BggrBgEFBQcwAoYpaHR0cDovL3N2ci1vdi1haWEu"
+ "dGhhd3RlLmNvbS9UaGF3dGVPVi5jZXIwDQYJKoZIhvcNAQELBQADggEBAF52BLvl"
+ "x5or9/aO7+cPhxuPxwiNRgbvHdCakD7n8vzjNyct9fKp6/XxB6GQiTZ0nZPJOyIu"
+ "Pi1QDLKOXvaPeLKDBilL/+mrn/ev3s/aRQSrUsieKDoQnqtmlxEHc/T3+Ni/RZob"
+ "PD4GzPuNKpK3BIc0fk/95T8R1DjBSQ5/clvkzOKtcl3VffAwnHiE9TZx9js7kZwO"
+ "b9nOKX8DFao3EpQcS7qn63Ibzbq5A6ry8ZNRQSIJK/xlCAWoyUd1uxnqGFnus8wb"
+ "9RVZJQe8YvyytBjgbE3QjnfPOxoEJA3twupnPmH+OCTM6V3TZqpRZj/sZ5rtIQ++"
+ "hI5FdJWUWVSgnSw=",
+ "C=US, ST=Maryland, L=Forest Hill, O=Apache Software Foundation, "
+ "OU=Infrastructure, CN=*.apache.org",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.11 2.5.4.3",
+ "C=US, O=Thawte, Inc., CN=Thawte SSL CA",
+ "2.5.4.6 2.5.4.10 2.5.4.3",
+ "2014-04-11T00:00:00.000000Z",
+ "2016-04-07T23:59:59.000000Z",
+ "*.apache.org",
+ "151d8ad1e1bac21466bc2836ba80b5fcf872f37c" },
+ /* the expiration is after 2049 so the expiration is in the
+ * generalized format, while the start date is still in the UTC
+ * format. Note this is actually a CA cert but that really doesn't
+ * matter here. */
+ { "MIIDtzCCAp+gAwIBAgIJAJKX85dqh3RvMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV"
+ "BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX"
+ "aWRnaXRzIFB0eSBMdGQwIBcNMTQwNjI3MTczMTUxWhgPMjExNDA2MDMxNzMxNTFa"
+ "MEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJ"
+ "bnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw"
+ "ggEKAoIBAQDaa4gwNBB6vgWrlOIEMdzvD06zmmiocEt6UnTHtmAcfrBuDnKrBwEh"
+ "f5JxneL16XIuKwK6n/4omBtem/PPjjpOLM9PMQuoO0cpQ0UGFnfpmko6PSQoqRHl"
+ "qTbDGv4usn7qdZV+FKz/B9CMonRSzWHMz5YPmqfob6BqaaJY/qJEzHJA24bm4jPH"
+ "IsaVCInEGpqAUpejwBzNujfbLibBNrVX7K846zk+tnsNR90kP5h3IRP3SdWVywKC"
+ "AMN2izzhmaDhuPzaTBobovr+ySJShmX6gdB5PpWkm6rcBl6RJ+tM0ZBSJjQvkYp4"
+ "seV+rcXFgpJP/aQL3vhDON32tjWh3A2JAgMBAAGjgacwgaQwHQYDVR0OBBYEFF+N"
+ "7TyDI8THpAbx1pfzFFtl5z4iMHUGA1UdIwRuMGyAFF+N7TyDI8THpAbx1pfzFFtl"
+ "5z4ioUmkRzBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8G"
+ "A1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkggkAkpfzl2qHdG8wDAYDVR0T"
+ "BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAo4t9fYe2I+XIQn8i/KI9UFEE9fue"
+ "w6rQMnf9yyd8nwL+IcV84hvyNrq0+7SptUBMq3rsEf5UIBIBI4Oa614mJ/Kt976O"
+ "S7Sa1IPH7j+zb/jqH/xGskEVi25dZz7psFCmi7Hm9dnVz9YKa2yLW6R2KZcTVxCx"
+ "SSdDRlD7SonsYeq2fGrAo7Y9xfZsiJ2ZbJ18kHs2coMWuhgSrN9jrML6mb5B+k22"
+ "/rgsCJgFsBDPBYR3ju0Ahqg7v6kwg9O2PJzyb4ljsw8oI0sCwHTZW5I5FMq2D9g6"
+ "hj80N2fhS9QWoLyeKoMTNB2Do6VaNrLrCJiscZWrsnM1f+XBqV8hMuHX8A==",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-06-27T17:31:51.000000Z",
+ "2114-06-03T17:31:51.000000Z",
+ NULL,
+ "db3a959e145acc2741f9eeecbeabce53cc5b7362" },
+ /* The subject (except for country code) is UTF-8 encoded.
+ * created with openssl using utf8-yes and string_mask=utf8only */
+ { "MIIDrTCCApWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTET"
+ "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ"
+ "dHkgTHRkMB4XDTE0MDcwMjE4MzYxMFoXDTE1MDcwMjE4MzYxMFowcjELMAkGA1UE"
+ "BhMCR1IxFTATBgNVBAgMDM6Rz4TPhM65zrrOrjETMBEGA1UEBwwKzpHOuM6uzr3O"
+ "sTEdMBsGA1UECgwUz4DOsc+BzqzOtM61zrnOs868zrExGDAWBgNVBAMMD3d3dy5l"
+ "eGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMVPuQPz"
+ "INjsiXl+GeiXMzXV1Bfm8vzbQnMLAFY/ZKKK4gpy58xcNrmur//Fd38naTM/DetO"
+ "PEoDa+vQ48CnUWCDT3CKUA3BnrjtR3/EITC7XRcfk5lyk0IZr9RZB1WedQxK1n5E"
+ "Ecz8EBrm9+1442Nmg/y1F8d/2F2CjKB+PgfOP1WWaIQcsjLsftXec+kGjc34kwbS"
+ "9D9H+bRrPVcOzBZOqC+K0K7MMOxKA5mMi4b/Nlep76gTaUyonclRIADanAyaK5WG"
+ "0IkEI/nxufaP3AcPksCbroWLTkPKIe97Yj6mnzNhK9TA9w5RgdBrjNyfrwUaYiYR"
+ "FxVJN0VrHWSsRnECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYd"
+ "T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFNOobRTPfoWP"
+ "EGgXVkHfwrqz7PVzMB8GA1UdIwQYMBaAFIV8JZkZ88X7MTQSsJ6/qF3KboHKMA0G"
+ "CSqGSIb3DQEBBQUAA4IBAQAam6vJUv6kcWWrEAfdnwwRmmJ4X1Jey3Sp48G35MOE"
+ "KkHtwqbtL+QU1VA2X98bEYobqZinM3e3zrlbpgbe1xoJ00MnT9CgQObXr+cum/Ql"
+ "PwWXB5fK3BrNwqRMRGc9w27FevyFeybdKhc47jEKMOANrB/aziNHaq9gBtU/HZdy"
+ "rm9TEaOHMy6vNrdpOZKpwXPxYqsQxMLpen9D64t/3P6hsV5FMQTaxSFhszidG44t"
+ "xaU4O0BOq4x//THCWguMxzO5RxW/V8wI/rkpvhAH1wljHTusnsAZea4PpstZ7+W7"
+ "43GME1DwjYdUK9HhqRNrDkiJLox4Tmegw9A7m4XLt4zu",
+ "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, "
+ "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, "
+ "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3"
+ "\xce\xbc\xce\xb1, CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-07-02T18:36:10.000000Z",
+ "2015-07-02T18:36:10.000000Z",
+ "www.example.com",
+ "b3b9789d8a53868f418619565f6b56af0033bdd3" },
+ /* The issuer and subject (except for the country code) is
+ * UnversalString encoded. Created with a hacked version of openssl
+ * using utf8=yes and string_mask=MASK:256. In order for that to
+ * output UniversalString encoded data you need to change the
+ * DIRSTRING_TYPE in crypto/asn1/asn1.h to be defined as
+ * B_ASN1_DIRECTORYSTRING so that UnviersalString is available to be
+ * used in the DirectoryStrings. OpenSSL by default avoids
+ * this type (for the reasonable reason that it's wasteful and
+ * UTF-8 can encoded everything it can in the most efficient way).
+ * OU uses the mathematical monospace digits 0-9 to test characters
+ * outside of the range of the Basic Multilingual Plane */
+ { "MIIEnzCCA4egAwIBAgIBATANBgkqhkiG9w0BAQUFADCBqzELMAkGA1UEBhMCQVUx"
+ "MTAvBgNVBAgcKAAAAFMAAABvAAAAbQAAAGUAAAAtAAAAUwAAAHQAAABhAAAAdAAA"
+ "AGUxaTBnBgNVBAocYAAAAEkAAABuAAAAdAAAAGUAAAByAAAAbgAAAGUAAAB0AAAA"
+ "IAAAAFcAAABpAAAAZAAAAGcAAABpAAAAdAAAAHMAAAAgAAAAUAAAAHQAAAB5AAAA"
+ "IAAAAEwAAAB0AAAAZDAeFw0xNDA3MjIyMjM3MzBaFw0xNTA3MjIyMjM3MzBaMIH8"
+ "MQswCQYDVQQGEwJHUjEhMB8GA1UECBwYAAADkQAAA8QAAAPEAAADuQAAA7oAAAOu"
+ "MR0wGwYDVQQHHBQAAAORAAADuAAAA64AAAO9AAADsTExMC8GA1UEChwoAAADwAAA"
+ "A7EAAAPBAAADrAAAA7QAAAO1AAADuQAAA7MAAAO8AAADsTExMC8GA1UECxwoAAHX"
+ "9gAB1/cAAdf4AAHX+QAB1/oAAdf7AAHX/AAB1/0AAdf+AAHX/zFFMEMGA1UEAxw8"
+ "AAAAdwAAAHcAAAB3AAAALgAAAGUAAAB4AAAAYQAAAG0AAABwAAAAbAAAAGUAAAAu"
+ "AAAAYwAAAG8AAABtMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuYUb"
+ "aNt22rsR5Qc/2zsenSvrlbvv1CwwRPNxcWTKdLl4lJEUy5YCnQXIq3qTi+eAFetQ"
+ "MwUOZem6kgNdwmGvCz3lrLwOobd1D5mG9agzKLVUVj72csbNNFzHr8z/7oaHvYYs"
+ "eYxW3oRm6vDYtHw5spXrxTzRIAnG6foxXFYAtDDHQpdjsofxqXO67aUmmGvE5ffX"
+ "gD3dvTvjejzcjjVsLQP/HG4MQOqeIyvyyHg1E3dyOrG+3qR6RN1ZveROdvU38Udm"
+ "s0KSGVX2lDLsUTQSKg5L8CLWDHqgGQWjLZQRgRiKZId/f9ubaJdLN6KfAQ3UvYAP"
+ "bKL5/k2GpsPDE21X0QIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf"
+ "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUccHhM6C7"
+ "nGMpclkG7YLIRuFueYQwHwYDVR0jBBgwFoAUz0X1b2Ok9MVVzxqxX6MgtTwSKmYw"
+ "DQYJKoZIhvcNAQEFBQADggEBAEpqEa08JkPG+XBlLemnoJsnoaRuQnLZvSCoAwIt"
+ "fugTE8686EigTZyYVFQ+GaI+EqVeiMjpAEhS3IMbhx5VIr61S3Nta2BG9OPjr4Xf"
+ "01oUeh4egL93CpIGNwu6M1SrQv2UVAKTwahxNmNuvx6Ojx5P2tne+KJtRUiwM3dE"
+ "of78/0NJD27OwjW0ruZAifF5CAR7mhy3NOMARpE2kqZk5695OF+QCahe00Y/9ulz"
+ "sCjgjpCUYv87OTbBGC5XGRd/ZopTRqtBVxpEHX/fux5/wqxBawrCuQsVw1Kfw0Ur"
+ "30aYWLsOsRwhiQkukjQfcMra1AHLujWaAHuLIDls1ozc8xo=",
+ "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, "
+ "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, "
+ "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3"
+ "\xce\xbc\xce\xb1, "
+ "OU=\xf0\x9d\x9f\xb6\xf0\x9d\x9f\xb7\xf0\x9d\x9f\xb8\xf0\x9d\x9f\xb9"
+ "\xf0\x9d\x9f\xba\xf0\x9d\x9f\xbb\xf0\x9d\x9f\xbc\xf0\x9d\x9f\xbd"
+ "\xf0\x9d\x9f\xbe\xf0\x9d\x9f\xbf, "
+ "CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.11 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-07-22T22:37:30.000000Z",
+ "2015-07-22T22:37:30.000000Z",
+ "www.example.com",
+ "cfa15310189cf89f1dadc9c989db46f287fff7a7"
+ },
+ /* The issuer and subject (except for the country code) is BMPString
+ * encoded. Created with openssl using utf8-yes and string_mask=MASK:2048.
+ */
+ { "MIID3zCCAsegAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJBVTEd"
+ "MBsGA1UECB4UAFMAbwBtAGUALQBTAHQAYQB0AGUxOTA3BgNVBAoeMABJAG4AdABl"
+ "AHIAbgBlAHQAIABXAGkAZABnAGkAdABzACAAUAB0AHkAIABMAHQAZDAeFw0xNDA3"
+ "MjIyMzAyMDlaFw0xNTA3MjIyMzAyMDlaMIGBMQswCQYDVQQGEwJHUjEVMBMGA1UE"
+ "CB4MA5EDxAPEA7kDugOuMRMwEQYDVQQHHgoDkQO4A64DvQOxMR0wGwYDVQQKHhQD"
+ "wAOxA8EDrAO0A7UDuQOzA7wDsTEnMCUGA1UEAx4eAHcAdwB3AC4AZQB4AGEAbQBw"
+ "AGwAZQAuAGMAbwBtMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqzof"
+ "mf9YANAl2I5AcUjfAAJhqc2BL6z6k0J9bWyDL7DZf6AJtD5stRjs8cgiSGfJt9Cg"
+ "YQ0Cvnwz9ztNVXLliMmiJ4V0HzG80GI6SBK0PoCVbddUV/PN7REgPNjTwMYlys5w"
+ "Yt/GR8OJJV+eb02rpAfVigDlh7CFjY/uKMs2ThPi+yQb2V6qxLk3ZKIHh5IbKQjt"
+ "zIX/W1t+hiBjojnuOmhAoEefZ583k7amR5GBZO4GS5Qfj+4kjL5xiwB3bjTC8pnV"
+ "Iv4+mN2F6xKW/9IOWZtdySDADaU2ioyuMDzzjp5N5Nt0ZGhrEG2cDC3CatZaV4U7"
+ "9yBbi6kzlo3fCbCOlQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf"
+ "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUNvwKR1v/"
+ "R0FQU1WnzqT3brNxaQQwHwYDVR0jBBgwFoAUSM/JbJVWuYFp+awSOEXZcKn1ddQw"
+ "DQYJKoZIhvcNAQEFBQADggEBABna/SiYMBJvbnI+lj7j8ddSFihaFheqtouxOB2d"
+ "tiVz5mcc5KsAFlkrxt7YcYB7SEc+K28nqGb3bfbZ18JayRBY3JS/h4WGu4eL5XkX"
+ "rceWUy60zF7DHs6p8E8HZVF1CdCC/LXr2BAdYTc/y1f37bLKVFF4mMJMP4b8/nSL"
+ "z8+oOO9CxaEjzRoCawf2+jaajXTSTDXBgIx1t6bJMAS6S6RKPaCketyAmpsOZVBS"
+ "VtBVfVIOB2zFqs6iqkXtdiOXWlZ0DBQRX0G1VD5G80RlZXs0yEfufCwLUl/TyOhM"
+ "WisUSEOzd4RlbsBj30JQkVG9+jXb2KChPkiMpg0tFi8HU3s=",
+ "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, "
+ "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, "
+ "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3"
+ "\xce\xbc\xce\xb1, CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-07-22T23:02:09.000000Z",
+ "2015-07-22T23:02:09.000000Z",
+ "www.example.com",
+ "6e2cd969350979d3741b9abb66c71159a94ff971"
+ },
+ /* The issuer and subject (except for the country code) is T61String
+ * (aka TeletexString) encoded. Created with openssl using utf8=yes
+ * and string_mask=MASK:4. Note that the example chosen specifically
+ * includes the Norwegian OE (slashed O) to highlight that this is
+ * being treated as ISO-8859-1 despite what the X.509 says.
+ * See the following for the horrible details on
+ * this encoding: https://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt
+ */
+ { "MIIDnTCCAoWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTET"
+ "MBEGA1UECBQKU29tZS1TdGF0ZTEhMB8GA1UEChQYSW50ZXJuZXQgV2lkZ2l0cyBQ"
+ "dHkgTHRkMB4XDTE0MDcyMjIzNDQxOFoXDTE1MDcyMjIzNDQxOFowYjELMAkGA1UE"
+ "BhMCTk8xGDAWBgNVBAgUD034cmUgb2cgUm9tc2RhbDEQMA4GA1UEBxQHxWxlc3Vu"
+ "ZDENMAsGA1UEChQEZPhtZTEYMBYGA1UEAxQPd3d3LmV4YW1wbGUuY29tMIIBIjAN"
+ "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz8uD5f2KRXvB//mKOpCXM3h/MOjK"
+ "xUgC4TIHi3BmnYR0IDElMPJrC263/eU0hKycyegyMjXkwIN5eEx4/Nl///RrzJBQ"
+ "+uXKfEJ4hTJ5x1uUYxhmtq4djZFxfjFH5yobT/LRDkEw9b/+NiRb30P+WrxhrAKW"
+ "7GRsE2pIdPdbM2IB5v/wORB4TK0kLYkmeEPWNJd63SmX4BEC6dRAaMxLIXKn75r5"
+ "GhMHKbUdt2Yy+5s0JlN9hMWqhnavCmGquzl7y/1E1OOUIm0jhL0sJn6wVTc+UO+Q"
+ "7u/w0xf38J8SU7lW6zbcQyYaSIQCMikgpprUSXdQZZUZGmHS7Gis39SiLwIDAQAB"
+ "o3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRl"
+ "ZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUQa2QLy+4QUH8hKNdR2LcvDKYImcwHwYD"
+ "VR0jBBgwFoAUpX6YP04yWqNiziUM7h0KgrRHMF4wDQYJKoZIhvcNAQEFBQADggEB"
+ "AElYUTQp5MOQk+ykIV0MHTw9OsEvLc1ZDmChls5WKYAu6KWgBbcjcTlkTpDlydrO"
+ "6JFxvCCg0K13dYOI3K/O9icGRauIrxrJOTtaIMryj7F51C52TOVPzkjL05eZTh+q"
+ "MmP3KI3uYSpXI6D6RI6hOKIRnFiUOQuXW3I8Z7s03KScBc9PSsVrMBLBz/Vpklaf"
+ "Tv/3jVBVIZwCW67SnFQ+vqEzaM4Ns2TBodlVqB1w0enPpow8bNnUwElLQJx3GXnl"
+ "z0JTpA6AwIRCF8n+VJgNN218fo2t2vvDDW/cZ+XMXzGNVhAqQ1F8B36esxy3P8+o"
+ "Bcwx241dxeGSYFHerqrTJIU=",
+ "C=NO, ST=M\xc3\xb8re og Romsdal, L=\xc3\x85lesund, O=d\xc3\xb8me, "
+ "CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-07-22T23:44:18.000000Z",
+ "2015-07-22T23:44:18.000000Z",
+ "www.example.com",
+ "787d1577ae77b79649d8f99cf4ed58a332dc48da"
+ },
+ /* Certificate with several Subject Alt Name dNSNames. Note that
+ * the CommonName is not duplicated in the Subject Alt Name to
+ * test that the Common Name is excluded when Subject Alt Name
+ * exists. */
+ { "MIIEMTCCAxmgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJBVTET"
+ "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ"
+ "dHkgTHRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMB4XDTE0MDcyNTE3"
+ "NDEwNFoXDTE1MDcyNTE3NDEwNFowdDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldh"
+ "c2hpbmd0b24xEzARBgNVBAcTCk5vcnRoIEJlbmQxITAfBgNVBAoTGEludGVybmV0"
+ "IFdpZGdpdHMgUHR5IEx0ZDEYMBYGA1UEAxMPd3d3LmV4YW1wbGUuY29tMIIBIjAN"
+ "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxlryoK6hMhGI/UlHi7v1m+Z3tCvg"
+ "ZG1twDFNvBACpFVbJtC/v+fiy1eG7ooZ1PsdCINQ1iXLh1igevlw/4w6iTDpeSZg"
+ "OCPYqK6ejnS0bKtSB4TuP8yiQtqwaVz4yPP88lXuQJDRJzgaAR0VAhooLgEpl1z1"
+ "n9wQO15AW5swzpKcEOi4n6Zmf1t7oxOt9awAOhkL1FfFwkpbiK9yQv3TPVo+xzbx"
+ "BJxwx55RY8Dpiu0kuiTYWsd02pocb0uIqd7a5B4y05PhJseqwyX0Mw57HBBnbru1"
+ "lCetP4PkoM2gf7Uoj9e61nmM1mustKTIPvh7tZHWW3UW9JxAFG+6FkKDewIDAQAB"
+ "o4HeMIHbMAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh"
+ "dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBQ4A9k8VwI0wv7u5rB4+1D9cuHiqTAf"
+ "BgNVHSMEGDAWgBS6O+MdRDDrD715AXdrnuNZ7wDSyjALBgNVHQ8EBAMCBeAwUwYD"
+ "VR0RBEwwSoINKi5leGFtcGxlLmNvbYIRKi5mb28uZXhhbXBsZS5jb22CESouYmFy"
+ "LmV4YW1wbGUuY29tghN6aWctemFnLmV4YW1wbGUuY29tMA0GCSqGSIb3DQEBBQUA"
+ "A4IBAQAf4IrSOL741IUkyFQrDdof39Cp87VdNEo4Bl8fUSuCjqZONxJfiAFx7GcB"
+ "Cd7h7Toe6CYCeQLHSEXQ1S1eWYLIq0ZoP3Q/huJdoH7yskDyC5Faexph0obKM5hj"
+ "+EYGW2W/UYBzEZai+eePBovARDlupiMaTJGvtdU/AcgMhXCoGNK6egesXoiNgfFh"
+ "h+lXUNWUWm2gZlKwRJff8tkR7bIG7MGzyL6Rqav2/tQdbFVXN5AFPdYPFLf0Vo5m"
+ "eGYM87TILfSo7n7Kh0aZovwcuF/vPUWRJl3B1HaPt9k6DhcFyAji0SJyZWyM4v88"
+ "GSq5Dk8dnTdL2otToll+r4IqFLlp",
+ "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, "
+ "CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA",
+ "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3",
+ "2014-07-25T17:41:04.000000Z",
+ "2015-07-25T17:41:04.000000Z",
+ "*.example.com, *.foo.example.com, *.bar.example.com, zig-zag.example.com",
+ "9c365d27b7b6cc438576a8e465685ea7a4f61129"
+ },
+ /* This is a CA cert that has a Common Name that doesn't look like
+ * a hostname. Make sure that the hostnames field remains blank for it. */
+ { "MIIEEjCCAvqgAwIBAgIJAKJarRWbvbCjMA0GCSqGSIb3DQEBBQUAMGMxCzAJBgNV"
+ "BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX"
+ "aWRnaXRzIFB0eSBMdGQxHDAaBgNVBAMTE0ludGVybmV0IFdpZGdpdHMgQ0EwHhcN"
+ "MTQwNzI1MTc0MTAzWhcNMjQwNzIyMTc0MTAzWjBjMQswCQYDVQQGEwJBVTETMBEG"
+ "A1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkg"
+ "THRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMIIBIjANBgkqhkiG9w0B"
+ "AQEFAAOCAQ8AMIIBCgKCAQEAv0f0TAiE13WHaFv8j6M9uuniO40+Aj8cuhZtJ1GC"
+ "GI/mW56wq2BJrP6N4+jyxYbZ/13S3ypPu+N087Nc/4xaPtUD/eKqMlU+o8gHM/Lf"
+ "BEs2dUuBsvkNM0KoC04NPNTOYDnfHOrzx8iHhqlDedwmP8FeQn3rNS8k4qDyJpG3"
+ "Ay8ICz5mB07Cy6NISohTxMtatfW5yKmhnhiS92X42QAEgI1pGB7jJl1g3u+KY1Bf"
+ "/10kcramYSYIM1uB7XHQjZI4bhEhQwuIWePMOSCOykdmbemM3ijF9f531Olq+0Nz"
+ "t7lA1b/aW4PGGJsZ6uIIjKMaX4npP+HHUaNGVssgTnTehQIDAQABo4HIMIHFMB0G"
+ "A1UdDgQWBBS6O+MdRDDrD715AXdrnuNZ7wDSyjCBlQYDVR0jBIGNMIGKgBS6O+Md"
+ "RDDrD715AXdrnuNZ7wDSyqFnpGUwYzELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNv"
+ "bWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEcMBoG"
+ "A1UEAxMTSW50ZXJuZXQgV2lkZ2l0cyBDQYIJAKJarRWbvbCjMAwGA1UdEwQFMAMB"
+ "Af8wDQYJKoZIhvcNAQEFBQADggEBAI442H8CpePFvOtdvcosu2N8juJrzACuayDI"
+ "Ze32EtHFN611azduqkWBgMJ3Fv74o0A7u5Gl8A7RZnfBTMX7cvpfHvWefau0xqgm"
+ "Mn8CcTUGel0qudCCMe+kPppmkgNaZFvawSqcAA/u2yni2yx8BakYYDZzyfmEf9dm"
+ "hZi5SmxFFba5UhNKOye0GKctT13s/7EgfFNyVhZA7hWU26Xm88QnGnN/qxJdpq+e"
+ "+Glctn9tyke4b1VZ2Yr+R4OktrId44ZQcRD44+88v5ThP8DQsvkXcjREMFAIPkvG"
+ "CEDOIem4l9KFfnsHn8/4KvoBRkmCkGaSwOwUdUG+jIjBpY/82kM=",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA",
+ "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA",
+ "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3",
+ "2014-07-25T17:41:03.000000Z",
+ "2024-07-22T17:41:03.000000Z",
+ NULL,
+ "b9decce236aa1da07b2bf088160bffe1469b9a4a"
+ },
+ /* Cert with a IP SAN entry. Make sure we properly skip them. */
+ { "MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJBVTET"
+ "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ"
+ "dHkgTHRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMB4XDTE0MDcyNTE4"
+ "NDMyOFoXDTE1MDcyNTE4NDMyOFowczELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldh"
+ "c2hpbmd0b24xEzARBgNVBAcTCk5vcnRoIEJlbmQxITAfBgNVBAoTGEludGVybmV0"
+ "IFdpZGdpdHMgUHR5IEx0ZDEXMBUGA1UEAxMOaXAuZXhhbXBsZS5jb20wggEiMA0G"
+ "CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDXKkSxg89tu5/n+lIC8ajj1T9vsO5B"
+ "nRH5Sne7UPc6pGMTNFi1MOVjdDWkmuCUzoI+HKLDc69/4V5RU12N1QNgsgcOzCSo"
+ "qgxa+dQk2s1shz1zhyaHkpdeMZU3/p9D4v+nRGAdYifwl/VOTwjWWucNzHDBwvb6"
+ "+Wm4pXE94Y5p8fY/lZi7VgtxdoPdSHGkIAps8psZGPjqKpLEjnLMp1n0v9cZhBF6"
+ "OoMUZpQuwcjT8vMQppgIWhZFLiH2jn7FTYWZyB0Dh9nMd097NQA87VtVfNc+g0oY"
+ "qLe3YldJgvVfyeSLhnyv68fBfGcTj310pNrGeE/m4tyxupiUT8BitfxPAgMBAAGj"
+ "geQwgeEwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0"
+ "ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFI09JZlhKV44Z+I5d58V/ZDqQ7yZMB8G"
+ "A1UdIwQYMBaAFDjQVnIU9pQI1nM8jjmxYiicMTdGMAsGA1UdDwQEAwIF4DBZBgNV"
+ "HREEUjBQgg0qLmV4YW1wbGUuY29tghEqLmZvby5leGFtcGxlLmNvbYcEfwAAAYIR"
+ "Ki5iYXIuZXhhbXBsZS5jb22CE3ppZy16YWcuZXhhbXBsZS5jb20wDQYJKoZIhvcN"
+ "AQEFBQADggEBAEK+XIGwavf+5Ht44ifHrGog0CDr4ESg7wFjzk+BJwYDtIPp9b8A"
+ "EG8qbfmOS+2trG3zc74baf2rmrfn0YGZ/GV826NMTaf7YU1/tJQTo+RX9g3aHg6f"
+ "pUBfIyAV8ELq84sgwd1PIgleVgIiDrz+a0UZ05Z5S+GbR2pwNH6+fO0O5E9clt2a"
+ "Cute1UMBqAMGKiFaP8HD6SUFTdTKZNxHtQzYmmuvoC1nzVatMFdkTuQgSQ/uNlzg"
+ "+yUFoufMZhs3gPx9PfXGOQ7f3nKE+WCK4KNGv+OILYsk4zUjMznfAwBRs9PyITN2"
+ "BKe64WsF6ZxTq3zLVGy5I8LpbtlvSmAaBp4=",
+ "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, "
+ "CN=ip.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA",
+ "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3",
+ "2014-07-25T18:43:28.000000Z",
+ "2015-07-25T18:43:28.000000Z",
+ "*.example.com, *.foo.example.com, *.bar.example.com, zig-zag.example.com",
+ "3525fb617c232fdc738d736c1cbd5d97b19b51e4"
+ },
+ /* Cert with the signature algorithm OID set to sha1WithRSA instead of
+ * sha1WithRSAEncryption. Both have the same meaning but the sha1WithRSA
+ * doesn't seem to be used anymore and is shorter */
+ { "MIIDgDCCAmygAwIBAgIBATAJBgUrDgMCHQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYD"
+ "VQQIFApTb21lLVN0YXRlMSEwHwYDVQQKFBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBM"
+ "dGQwHhcNMTQwODE4MDk1OTQ1WhcNMTUwODE4MDk1OTQ1WjBNMQswCQYDVQQGEwJV"
+ "SzEQMA4GA1UECBQHRW5nbGFuZDESMBAGA1UEBxQJU2hlZmZpZWxkMRgwFgYDVQQD"
+ "FA93d3cuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB"
+ "AQCkvtieKg33RSzhn5JMDPPRlDS8Q16CN96A4lLI9YrJCy33z46PrbR2mq2hOz5l"
+ "MdgbAaRF0MUGhcKv4msJ0bsWhkybaSBAVgnoC7ObQWPNF7ppMzUjeDAlUBXNfheR"
+ "ZcgcgGWqUkoB1uUMhvmVuPrzvxn+WCwyoP6zQCviYLsR8AygGQgdhV6c9wJ/x9HS"
+ "MRUvUOeo7SCmx9GK5Hc11QV2K3rwKXABeAxXNzbyQe7hFfQYCI2SB5s3bEnhIvg7"
+ "BG0BQmoprHjXWBftc0+msKQTFw7+jZ21NsfwGoPonuVsCOJjJ51jp2oKqk3b1GGc"
+ "DEmmMQ0JtqfHO5a7JACBaHbTAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4"
+ "QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBSo"
+ "jICtcIgZL6OCCB5BJ5PGf1UIyTAfBgNVHSMEGDAWgBT5KQMLMylrXSQvhMtONHZc"
+ "22Jm9TAJBgUrDgMCHQUAA4IBAQCvCJ4i2kRzSRhnlDxd0UbQtytVIJFFJlfREPTM"
+ "j8+VqqtCVyPSX8T5NU+HCiEmhVrTlm/W0i8ygJXr8izyIMGRqbyhn2M9b8hAY6Jl"
+ "0edztu/FV/YHsJbPznWkXWpMMaXDEX4wI329f5odccIbB5VSaaoAdKZ6Ne4nf6oV"
+ "95KRFWkXoYjm24TnpALsNnK1Kjjed6h5ApB+IANOpXYFbGcsfbuKhWbFd2nd6t5U"
+ "NpUcv4H9Tgdl6KgrfsbQtAeouWCgoiNzrul8FOaQTdJLZfCsjuE+IkGpM+DX8PiF"
+ "5M41EqkSKia8sChFIln+lkRY41OWP9uQ1VXCfdRIzOnXWh9U",
+ "C=UK, ST=England, L=Sheffield, CN=www.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2014-08-18T09:59:45.000000Z",
+ "2015-08-18T09:59:45.000000Z",
+ "www.example.com",
+ "0e0869961d508b13bb22aa8da675b2e9951c0e70"
+ },
+ /* X.509 v1 certificate, we used to crash on these prior to r1619861. */
+ { "MIIDDTCCAfUCAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV"
+ "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0"
+ "ZDAeFw0xNTAxMTkyMjEyNDhaFw0xNjAxMTkyMjEyNDhaMFQxCzAJBgNVBAYTAlVT"
+ "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRswGQYD"
+ "VQQDExJ4NTA5djEuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw"
+ "ggEKAoIBAQDniW3DmGGtA0MoYqE9H55/RmjtTJD2WVmM/STEsw+RW74UGsZ62qfi"
+ "ADedl4ukZYKlk3TwJrGEwDBKOMWHuzCYVxhclyHkHwX7QqamvZRgaOonEu82KHuE"
+ "dZo4FhOWDC9D0yS4RFbfqvSu/JG19FYsnRQn1RPFYji6jG9TRwavplVBiMhR68kc"
+ "8HTW1Wu7uJ5SV0UtTicFes8MGek3+zWceGt+Egwd2UlIYXwTPzB5m7UPuufEdvFL"
+ "ED3pusVatohFzjCbYsuJIR5ppYd49uTxPWGvRidJ2C8GbDf9PCgDduS0Gz91Txnw"
+ "h+WiVYCQ6SxAJWp/xeZWE71k88N0vJEzAgMBAAEwDQYJKoZIhvcNAQEFBQADggEB"
+ "ABoBaObsHnIrkd3RvvGb5q7fnEfiT1DXsufS3ypf4Z8IST/z+NeaUaiRN1oLcvDz"
+ "qC7ygTYZ2BZoEw3ReCGqQWT4iYET+lH8DM+U5val3gVlSWqx1jj/wiV1OAxQsakM"
+ "BnmNs/MDshiv54irvSlqnxEp2o/BU/vMrN656C5DJkZpYoMpIWxdFnd+bzNzuN1k"
+ "pJfTjzWlGckKfdblNPOfdtccTqtQ5d4mWtYNJ8DfL5rRRwCuzXvZtbVHKxqkXaXr"
+ "CYUfFUobapgPfvvMc1QcDY+2nvhC2ij+HAPIHgZPuzJsjZRC1zwg074cfgjZbgbm"
+ "R0HVF486p3vS8HFv4lndRZA=",
+ "C=US, ST=Washington, L=North Bend, CN=x509v1.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2015-01-19T22:12:48.000000Z",
+ "2016-01-19T22:12:48.000000Z",
+ "x509v1.example.com",
+ "5730dd65a7f77fdf0dfd90e5a53119f38854af29"
+ },
+ /* X.509 v1 certificate with an X.509 v3 Subject Alternative Name
+ * extension. Although these are ill-formed per RFC 5280 s. 4.1, we
+ * suspect that they could exist in the real world. Make sure we do
+ * not error out, and that we pick up SAN (b.example.com) from the
+ * extension. */
+ { "MIIDLzCCAhcCAQ8wDQYJKoZIhvcNAQEFBQAwKzEpMCcGA1UEAwwgSW50ZXJuZXQg"
+ "V2lkZ2l0cyBJbnRlcm1lZGlhdGUgQ0EwHhcNMTUwMTI5MDAzMzU1WhcNMTYwMTI5"
+ "MDAzMzU1WjByMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjETMBEG"
+ "A1UEBwwKTm9ydGggQmVuZDEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkg"
+ "THRkMRYwFAYDVQQDDA1hLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC"
+ "AQ8AMIIBCgKCAQEAs0hj2xPRQZpecqk0Ih1l4juAuQZeSgv3yD/VtSq/9sTBH6iA"
+ "4XjJQcHROYxYaK0QS/qlCjpl+Q3mOaVIu+59TLy3T2YVgqMYmgB453ntuJPkdF1C"
+ "fJ2j19YAQZHHdOFaP1G+auBwjmHns3+MkG4s7EPuJP7TBCcSFlOmz5D4GUui3NVG"
+ "LBYUog1ZhF4oe/7d4jc2Cn8uypNT/Hc1ViIlCT4rFoAirv9Uob+4zjQ3Z18I1Ql1"
+ "t8oszVCj3kKDboEty2RduwPLx/2ztWYBCvFhd49JGdi/nzMi+j2d5HCI3V8W06pN"
+ "mvrVU4G0ImVRa8wpmQCSm2Tp0s42FAVHWw8yMwIDAQABoxwwGjAYBgNVHREEETAP"
+ "gg1iLmV4YW1wbGUuY29tMA0GCSqGSIb3DQEBBQUAA4IBAQDI/n0NYakuRP/485/A"
+ "dan71qBy3sljjOreq71IfBdtq+GEjCL1B0TD0V338LXki9NicCLeD/MWfceDjV0u"
+ "AjPTxaZEn/NWqXo0mpNC535Y6G46mIHYDGC8JyvCJjaXF+GVstNt6lXzZp2Yn3Si"
+ "K57uVb+zz5zAGSO982I2HACZPnF/oAtp7bwxzwvBsLqSLw3hh0ATVPp6ktE+WMoI"
+ "X75CVcDmU0zjXqzKiFPKeTVjQG6YxgvplMaag/iNngkgEhX4PIrxdIEsHf8l9ogC"
+ "dz51MFxetsC4D2KRq8IblF9i+9r3hlv+Dbf9ovYe9Hu0usloSinImoWOw42iWWmP"
+ "vT4l",
+ "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, "
+ "CN=a.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3",
+ "CN=Internet Widgits Intermediate CA",
+ "2.5.4.3",
+ "2015-01-29T00:33:55.000000Z",
+ "2016-01-29T00:33:55.000000Z",
+ "b.example.com",
+ "47fa5c76fee6e21e37def6da3746bba84a5a09bf"
+ },
+ /* X.509 certificate with multiple Relative Distinguished Names
+ * Borrowed form the Chromium test suite see thier bug here
+ * https://code.google.com/p/chromium/issues/detail?id=101009
+ */
+ { "MIICsDCCAhmgAwIBAgIJAO9sL1fZ/VoPMA0GCSqGSIb3DQEBBQUAMHExbzAJBgNV"
+ "BAYTAlVTMA8GA1UECgwIQ2hyb21pdW0wFgYKCZImiZPyLGQBGRYIQ2hyb21pdW0w"
+ "GgYDVQQDDBNNdWx0aXZhbHVlIFJETiBUZXN0MB0GA1UECwwWQ2hyb21pdW0gbmV0"
+ "X3VuaXR0ZXN0czAeFw0xMTEyMDIwMzQ3MzlaFw0xMjAxMDEwMzQ3MzlaMHExbzAJ"
+ "BgNVBAYTAlVTMA8GA1UECgwIQ2hyb21pdW0wFgYKCZImiZPyLGQBGRYIQ2hyb21p"
+ "dW0wGgYDVQQDDBNNdWx0aXZhbHVlIFJETiBUZXN0MB0GA1UECwwWQ2hyb21pdW0g"
+ "bmV0X3VuaXR0ZXN0czCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAnSMQ7YeC"
+ "sOuk+0n128F7TfDtG/X48sG10oTe65SC8N6LBLfo7YYiQZlWVHEzjsFpaiv0dx4k"
+ "cIFbVghXAky/r5qgM1XiAGuzzFw7R27cBTC9DPlRwHArP3CiEKO3iz8i+qu9x0il"
+ "/9N70LcSSAu/kGLxikDbHRoM9d2SKhy2LGsCAwEAAaNQME4wHQYDVR0OBBYEFI1e"
+ "cfoqc7qfjmMyHF2rh9CrR6u3MB8GA1UdIwQYMBaAFI1ecfoqc7qfjmMyHF2rh9Cr"
+ "R6u3MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAGKwN01A47nxVHOkw"
+ "wFdbT8t9FFkY3pIg5meoqO3aATNaSEzkZoUljWtWgWfzr+n4ElwZBxeYv9cPurVk"
+ "a+wXygzWzsOzCUMKBI/aS8ijRervyvh6LpGojPGn1HttnXNLmhy+BLECs7cq6f0Z"
+ "hvImrEWhD5uZGlOxaZk+bFEjQHA=",
+ "C=US, O=Chromium, 0.9.2342.19200300.100.1.25=Chromium, "
+ "CN=Multivalue RDN Test, OU=Chromium net_unittests",
+ "2.5.4.6 2.5.4.10 0.9.2342.19200300.100.1.25 2.5.4.3 2.5.4.11",
+ "C=US, O=Chromium, 0.9.2342.19200300.100.1.25=Chromium, "
+ "CN=Multivalue RDN Test, OU=Chromium net_unittests",
+ "2.5.4.6 2.5.4.10 0.9.2342.19200300.100.1.25 2.5.4.3 2.5.4.11",
+ "2011-12-02T03:47:39.000000Z",
+ "2012-01-01T03:47:39.000000Z",
+ NULL,
+ "99302ca2824f585a117bb41302a388daa0519765"
+ },
+ /* certificate with subject that includes an attribute that has an
+ * object id that has leading zeros. This isn't technically legal
+ * but a simplistic parser might parser it the same as an object
+ * id that doesn't have a leading zero. In this case the object id
+ * with a leading zero could parse to the same object id as the
+ * Common Name. Make sure we don't treat it as such. */
+ { "MIIDDjCCAfYCAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV"
+ "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0"
+ "ZDAeFw0xNTAxMjcwNzQ5MDhaFw0xNjAxMjcwNzQ5MDhaMFUxCzAJBgNVBAYTAlVT"
+ "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRwwGgYE"
+ "VQSAAxMSbm90YWNuLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A"
+ "MIIBCgKCAQEAvXCJv0gr9d3GNYiukPrbse0FdXmuBx2mPf665WyZVHk9JiPnDcb2"
+ "ng8gHLgJe8izou6I0vN2iJgy91rUPvX9zA3qVhml+cboVY2jHCPWo/v5PQsXAgLV"
+ "5gVjp2POn3N0O1xcS1yNe249LkP0Di3kAMp5gkzdprm3fD3JDW1Q+ocQylnbjzG0"
+ "FtNQSUJLITvPXjR7ny46Fci2mv8scHOvlEXTK5/2RoBaoK2jWQimqGfFj1sr1vqZ"
+ "Wcb6NAdZso64Xg1V6CWX8zymlA7gAhTQWveq+ovUWcXpmR8aj9pYNuy0aZW3BANz"
+ "N6L0G7OZiVUvvzpfnn0V3Z/sR/iQs7q3nQIDAQABMA0GCSqGSIb3DQEBBQUAA4IB"
+ "AQACZwruCiesCRkT08AtHl0WQnQui58e9/7En+iqxNQO6+fx84SfWGcUFYZtvzdO"
+ "KkHNTs06km+471OjLSDcotRkdqO1JxQCkNxbrPat7T6FrO9n2JFivx6eijRqK/jB"
+ "cBYW92dK4BfXU4+FyeB2OIpyPjuqLU2j7S5p7qNU50i/1J7Qt669nXeaPINIfZdW"
+ "sDjjWkFR1VOgXS/zeu/GOxlQFmmcde+X/qkFI+L352VX7Ktf95j4ms4vG2yZgNfe"
+ "jbNb9a7LMcqlop/PlX5WBGv8GGKUNZO0LvukFYOULf1oL8VQsN0x/gRHGC7m9kVM"
+ "3hojWZDXAY4mYqdBCRX7/gkt",
+ "C=US, ST=Washington, L=North Bend, 2.5.4.03=notacn.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.03",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2015-01-27T07:49:08.000000Z",
+ "2016-01-27T07:49:08.000000Z",
+ NULL,
+ "6f24b834ba00fb4ef863df63b8fbeddab25e4838"
+ },
+ /* certificate with subject that includes an attribute that has an
+ * object id that has an overflow such that it calculates to
+ * the same object id as the Common Name (2.5.4.3). OpenSSL
+ * with its bignum support shows this as 2.5.4.2361183241434822606851.
+ * It would be wrong to display this as a Common Name to the user. */
+ { "MIIDGTCCAgECAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV"
+ "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0"
+ "ZDAeFw0xNTAxMjcwODMxNDNaFw0xNjAxMjcwODMxNDNaMGAxCzAJBgNVBAYTAlVT"
+ "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMScwJQYN"
+ "VQSCgICAgICAgICAAxMUb3ZlcmZsb3cuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3"
+ "DQEBAQUAA4IBDwAwggEKAoIBAQDHL1e8zSPyRND3tI42Vqca2FoCiWn881Czv2ct"
+ "tGFwyjUM8R1yHXEP+doS9KN9L29xRWZRxyCQ18S+QbjNQCh6Ay22qnkBu0uPdVB6"
+ "iIVKiW9RzU8dZSFMnveUZYLloG12kK++ooJGIstTJwkI8Naw1X1D29gZaY9oSKAc"
+ "Gs5c92po61RoetB744dUfUbAXi8eEd4ShdsdnCoswpEI4WTLdYLZ/cH/sU1a5Djm"
+ "cAfEBzZSOseEQSG7Fa/HvHyW+jDNnKG2r73M45TDcXAunSFcAYl1ioBaRwwdcTbK"
+ "SMGORThIX5UwpJDZI5sTVmTTRuCjbMxXXki/g9fTYD6mlaavAgMBAAEwDQYJKoZI"
+ "hvcNAQEFBQADggEBABvZSzFniMK4lqJcubzzk410NqZQEDBxdNZTNGrQYIDV8fDU"
+ "LLoQ2/2Y6kOQbx8r3RNcaJ6JtJeVqAq05It9oR5lMJFA2r0YMl4eB2V6o35+eaKY"
+ "FXrJzwx0rki2mX+iKsgRbJTv6mFb4I7vny404WKHNgYIfB8Z5jgbwWgrXH9M6BMb"
+ "FL9gZHMmU+6uqvCPYeIIZaAjT4J4E9322gpcumI9KGVApmbQhi5lC1hBh+eUprG7"
+ "4Brl9GeCLSTnTTf4GHIpqaUsKMtJ1sN/KJGwEB7Z4aszr80P5/sjHXOyqJ78tx46"
+ "pwH7/Fx0pM7nZjJVGvcxGBBOMeKy/o2QUVvEYPU=",
+ "C=US, ST=Washington, L=North Bend, \?\?=overflow.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 \?\?",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2015-01-27T08:31:43.000000Z",
+ "2016-01-27T08:31:43.000000Z",
+ NULL,
+ "c1f063daf23e402fe58bab1a3fa2ba05c1106158"
+ },
+ /* certificate with multiple common names, make sure this behaves
+ * the same way as serf. */
+ { "MIIDJjCCAg4CAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV"
+ "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0"
+ "ZDAeFw0xNTAxMjExNzUwMDZaFw0xNjAxMjExNzUwMDZaMG0xCzAJBgNVBAYTAlVT"
+ "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRkwFwYD"
+ "VQQDExBnb29kLmV4YW1wbGUuY29tMRkwFwYDVQQDExBldmlsLmV4YW1wbGUuY29t"
+ "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5pfrXkiiDGCWSYhMQNHJ"
+ "gNBLEBNcFzsGpW8i6rMKVephwG7p4VqIvc0pSsmpD9IYuIxxq/2E2cziaTWyqCBp"
+ "hKKipqt8eMcu6u45LduHGiCcnN7rHORbQZTdvwzTmiVN1eI1oCVejB4zgHNkHUko"
+ "DyaALCHGRz8l7Qq6hSbiOnhH1qlscIIEsgQEyDlMZpbsWVTQKPxluhtgqVEn7wPN"
+ "qScrf2evq050NuNYYFzCmuqOGKq2gKbD/BlUqCNmEM2JPg/bdcAQxFCf0HcvDiS9"
+ "e29suMKWZAzJkbzrWhlDMG1Xt5c7dd82PcGwnL//Q7muE57luCw38Gp2vQQ3/Uki"
+ "vQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBry9wfxYia/dCSKvDXOBKUgWFQtI8j"
+ "7vYHuouTvIb5m6b62kiUdtuaVKi3jnUbHUFohOi/6o+HIwbXSgz5CbiLjgUvONBU"
+ "BLekaguIYX9tTmg+vhWchcmVMHufj6HdQkzWtyojSQD9GjHGInNDG102KlN1cdL8"
+ "jGTrru4vnef+xA24EvYPdcS2+H2yYH0THL3JPKo1GtO4NCEGWQbS6Ygwcy+BQpbU"
+ "TBIWhlbleuCalB8qhWyijcHeszT7mFR0CarEaSLeZj6FaQpZB636iHuELmxcgiFw"
+ "j3r3QZyAMEGvPPBPKYSTgmol31pX9LYvuFGA9ADQ2in/n9WdMfYzFzOn",
+ "C=US, ST=Washington, L=North Bend, "
+ "CN=good.example.com, CN=evil.example.com",
+ "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3 2.5.4.3",
+ "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd",
+ "2.5.4.6 2.5.4.8 2.5.4.10",
+ "2015-01-21T17:50:06.000000Z",
+ "2016-01-21T17:50:06.000000Z",
+ "good.example.com",
+ "9693f17e59205f41ca2e14450d151b945651b2d7"
+ },
+ /* Signed using RSASSA-PSS algorithm with algorithm parameters */
+ {
+ "MIICsjCCAWkCCQDHslXYA8hCxTA+BgkqhkiG9w0BAQowMaANMAsGCWCGSAFlAwQC"
+ "AaEaMBgGCSqGSIb3DQEBCDALBglghkgBZQMEAgGiBAICAN4wKjEUMBIGA1UECgwL"
+ "TXkgTG9jYWwgQ0ExEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xODAyMDIxNjQ4MzVa"
+ "Fw0xODAyMDMxNjQ4MzVaMC4xGDAWBgNVBAoMD015IExvY2FsIFNlcnZlcjESMBAG"
+ "A1UEAwwJbG9jYWxob3N0MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCues61"
+ "JXXpLQI5yeg4aCLWRfvnJY7wnuU6FSA++3wwCJREx1/7ebnP9RRRqqKM+ZeeFMC+"
+ "UlJE3ft2tJTDOVk9j6qjvKrJUKM1YkIe0lARxs4RtZKDGfOdBhw/+iD+6fZzhL0n"
+ "+w+dIJGzl6ADWsE/x9yjDTkdgbtxHrx/76K0KQIDAQABMD4GCSqGSIb3DQEBCjAx"
+ "oA0wCwYJYIZIAWUDBAIBoRowGAYJKoZIhvcNAQEIMAsGCWCGSAFlAwQCAaIEAgIA"
+ "3gOCAQEABYRAijCSGyFdSuUYALUnNzPylqYXlW+dMKPywlUrFEhKnvS+FD9twerI"
+ "8kT4MDW6XvhScmL1MCDPNAkFY92UqaUrgT80oyrbpuakVrxFSS1i28xy8+kXAWYq"
+ "RNQVaME1NqnATYF0ZMD5xQK4rpa76gvWj3K8Lt++9EjjbkNiirIIMQEOxh1lwnDQ"
+ "81q1Rk6iujlnVDGHDQ+w8reE6fKfSWfv1EaQRcjNKCuzrW8WNN387G2byvwaaKeL"
+ "M7lV7wiV6PwrTNTZzVG3cWKDOEP1mGE7gyMu66siLECo8U95+ahK7O6vfeT3m3gv"
+ "7kzWNYozAQtBSC7b0WqWbVrzWI4HSg==",
+ "O=My Local Server, CN=localhost",
+ "2.5.4.10 2.5.4.3",
+ "O=My Local CA, CN=localhost",
+ "2.5.4.10 2.5.4.3",
+ "2018-02-02T16:48:35.000000Z ",
+ "2018-02-03T16:48:35.000000Z ",
+ "localhost",
+ "25ab5a059acfc793fc0d3734d426794a4ca7b631"
+ },
+ { NULL }
+};
+
+static svn_error_t *
+compare_dates(const char *expected,
+ apr_time_t actual,
+ const char *type,
+ const char *subject,
+ apr_pool_t *pool)
+{
+ apr_time_t expected_tm;
+
+ if (!actual)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "No %s for cert '%s'", type, subject);
+
+ SVN_ERR(svn_time_from_cstring(&expected_tm, expected, pool));
+ if (!expected_tm)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Problem converting expected %s '%s' to text "
+ "output for cert '%s'", type, expected,
+ subject);
+
+ if (expected_tm != actual)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "The %s didn't match expected '%s',"
+ " got '%s' for cert '%s'",
+ type, expected,
+ svn_time_to_cstring(actual, pool),
+ subject);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+compare_hostnames(const char *expected,
+ const apr_array_header_t *actual,
+ const char *subject,
+ apr_pool_t *pool)
+{
+
+ int i;
+ svn_stringbuf_t *buf;
+
+ if (!actual)
+ {
+ if (expected)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "The hostnames didn't match expected '%s',"
+ " got NULL for cert '%s'",
+ expected, subject);
+ return SVN_NO_ERROR;
+ }
+
+ buf = svn_stringbuf_create_empty(pool);
+ for (i = 0; i < actual->nelts; ++i)
+ {
+ const char *hostname = APR_ARRAY_IDX(actual, i, const char*);
+ if (i > 0)
+ svn_stringbuf_appendbytes(buf, ", ", 2);
+ svn_stringbuf_appendbytes(buf, hostname, strlen(hostname));
+ }
+
+ if (strcmp(expected, buf->data))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "The hostnames didn't match expected '%s',"
+ " got '%s' for cert '%s'",
+ expected, buf->data, subject);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+compare_oids(const char *expected,
+ const apr_array_header_t *actual,
+ const char *subject,
+ apr_pool_t *pool)
+{
+ int i;
+ svn_stringbuf_t *buf;
+
+ if (!actual)
+ {
+ if (expected)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "The oids didn't match expected '%s',"
+ " got NULL for cert '%s'",
+ expected, subject);
+ return SVN_NO_ERROR;
+ }
+
+ buf = svn_stringbuf_create_empty(pool);
+ for (i = 0; i < actual->nelts; ++i)
+ {
+ apr_size_t len;
+ const svn_x509_name_attr_t *attr = APR_ARRAY_IDX(actual, i, const svn_x509_name_attr_t *);
+ const void *oid = svn_x509_name_attr_get_oid(attr, &len);
+ const char *oid_string = svn_x509_oid_to_string(oid, len, pool, pool);
+ if (i > 0)
+ svn_stringbuf_appendbyte(buf, ' ');
+ if (oid_string)
+ svn_stringbuf_appendcstr(buf, oid_string);
+ else
+ svn_stringbuf_appendcstr(buf, "??");
+ }
+
+ if (strcmp(expected, buf->data))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "The oids didn't match expected '%s',"
+ " got '%s' for cert '%s'",
+ expected, buf->data, subject);
+ return SVN_NO_ERROR;
+
+}
+
+
+static svn_error_t *
+compare_results(struct x509_test *xt,
+ svn_x509_certinfo_t *certinfo,
+ apr_pool_t *pool)
+{
+ const char *v;
+
+ v = svn_x509_certinfo_get_subject(certinfo, pool);
+ if (!v)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "No subject for cert '%s'", xt->subject);
+ if (strcmp(v, xt->subject))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Subject didn't match for cert '%s', "
+ "expected '%s', got '%s'", xt->subject,
+ xt->subject, v);
+
+ SVN_ERR(compare_oids(xt->subject_oids, svn_x509_certinfo_get_subject_attrs(certinfo),
+ xt->subject, pool));
+
+ v = svn_x509_certinfo_get_issuer(certinfo, pool);
+ if (!v)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "No issuer for cert '%s'", xt->subject);
+ if (strcmp(v, xt->issuer))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Issuer didn't match for cert '%s', "
+ "expected '%s', got '%s'", xt->subject,
+ xt->issuer, v);
+
+ SVN_ERR(compare_oids(xt->issuer_oids, svn_x509_certinfo_get_issuer_attrs(certinfo),
+ xt->subject, pool));
+
+ SVN_ERR(compare_dates(xt->valid_from,
+ svn_x509_certinfo_get_valid_from(certinfo),
+ "valid-from",
+ xt->subject,
+ pool));
+
+ SVN_ERR(compare_dates(xt->valid_to,
+ svn_x509_certinfo_get_valid_to(certinfo),
+ "valid-to",
+ xt->subject,
+ pool));
+
+ SVN_ERR(compare_hostnames(xt->hostnames,
+ svn_x509_certinfo_get_hostnames(certinfo),
+ xt->subject,
+ pool));
+
+ v = svn_checksum_to_cstring_display(
+ svn_x509_certinfo_get_digest(certinfo), pool);
+ if (!v)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "No SHA1 digest for cert '%s'", xt->subject);
+ if (strcmp(v, xt->sha1_digest))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "SHA1 digest didn't match for cert '%s', "
+ "expected '%s', got '%s'", xt->subject,
+ xt->sha1_digest, v);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_x509_parse_cert(apr_pool_t *pool)
+{
+ struct x509_test *xt;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ for (xt = cert_tests; xt->base64_cert; xt++)
+ {
+ const svn_string_t *der_cert;
+ svn_x509_certinfo_t *certinfo;
+
+ svn_pool_clear(iterpool);
+
+ /* Convert header-less PEM to DER by undoing base64 encoding. */
+ der_cert = svn_base64_decode_string(svn_string_create(xt->base64_cert,
+ pool),
+ iterpool);
+
+ SVN_ERR(svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len,
+ iterpool, iterpool));
+
+ SVN_ERR(compare_results(xt, certinfo, iterpool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+struct x509_broken {
+ const char *base64_cert;
+ apr_status_t apr_err;
+};
+static struct x509_broken broken_cert_tests[] = {
+ /* Invalid zero-length name that caused a SEGV, found using AFL. */
+ {
+ "MIIDDTCCAfUCAQEwDQYJKoZIhvcNAQEFBQAwRTEAMAkGA1UEBhMCQVUxEzARBgNV"
+ "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0"
+ "ZDAeFw0xNTAxMTkyMjEyNDhaFw0xNjAxMTkyMjEyNDhaMFQxCzAJBgNVBAYTAlVT"
+ "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRswGQYD"
+ "VQQDExJ4NTA5djEuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw"
+ "ggEKAoIBAQDniW3DmGGtA0MoYqE9H55/RmjtTJD2WVmM/STEsw+RW74UGsZ62qfi"
+ "ADedl4ukZYKlk3TwJrGEwDBKOMWHuzCYVxhclyHkHwX7QqamvZRgaOonEu82KHuE"
+ "dZo4FhOWDC9D0yS4RFbfqvSu/JG19FYsnRQn1RPFYji6jG9TRwavplVBiMhR68kc"
+ "8HTW1Wu7uJ5SV0UtTicFes8MGek3+zWceGt+Egwd2UlIYXwTPzB5m7UPuufEdvFL"
+ "ED3pusVatohFzjCbYsuJIR5ppYd49uTxPWGvRidJ2C8GbDf9PCgDduS0Gz91Txnw"
+ "h+WiVYCQ6SxAJWp/xeZWE71k88N0vJEzAgMBAAEwDQYJKoZIhvcNAQEFBQADggEB"
+ "ABoBaObsHnIrkd3RvvGb5q7fnEfiT1DXsufS3ypf4Z8IST/z+NeaUaiRN1oLcvDz"
+ "qC7ygTYZ2BZoEw3ReCGqQWT4iYET+lH8DM+U5val3gVlSWqx1jj/wiV1OAxQsakM"
+ "BnmNs/MDshiv54irvSlqnxEp2o/BU/vMrN656C5DJkZpYoMpIWxdFnd+bzNzuN1k"
+ "pJfTjzWlGckKfdblNPOfdtccTqtQ5d4mWtYNJ8DfL5rRRwCuzXvZtbVHKxqkXaXr"
+ "CYUfFUobapgPfvvMc1QcDY+2nvhC2ij+HAPIHgZPuzJsjZRC1zwg074cfgjZbgbm"
+ "R0HVF486p3vS8HFv4lndRZA=",
+ SVN_ERR_X509_CERT_INVALID_NAME,
+ },
+ { NULL }
+};
+
+static svn_error_t *
+test_x509_parse_cert_broken(apr_pool_t *pool)
+{
+ struct x509_broken *xt;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ for (xt = broken_cert_tests; xt->base64_cert; xt++)
+ {
+ const svn_string_t *der_cert;
+ svn_x509_certinfo_t *certinfo;
+ svn_error_t *err;
+
+ svn_pool_clear(iterpool);
+
+ /* Convert header-less PEM to DER by undoing base64 encoding. */
+ der_cert = svn_base64_decode_string(svn_string_create(xt->base64_cert,
+ pool),
+ iterpool);
+
+ err = svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len,
+ iterpool, iterpool);
+ if (!err)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected parse error E%d got SUCCESS",
+ xt->apr_err);
+ if (err && err->apr_err != xt->apr_err)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, err,
+ "Expected parse error E%d got E%d",
+ xt->apr_err, err->apr_err);
+ svn_error_clear(err);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_x509_parse_cert,
+ "test svn_x509_parse_cert"),
+ SVN_TEST_PASS2(test_x509_parse_cert_broken,
+ "test broken certs"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/xml-test.c b/subversion/tests/libsvn_subr/xml-test.c
new file mode 100644
index 0000000..77d7c65
--- /dev/null
+++ b/subversion/tests/libsvn_subr/xml-test.c
@@ -0,0 +1,360 @@
+/* xml-test.c --- tests for the XML parser
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr.h>
+
+#include "svn_pools.h"
+#include "svn_string.h"
+#include "svn_xml.h"
+
+#include "../svn_test.h"
+
+typedef struct xml_callbacks_baton_t
+{
+ svn_stringbuf_t *buf;
+ svn_xml_parser_t *parser;
+} xml_callbacks_baton_t;
+
+/* Implements svn_xml_start_elem. Logs all invocations to svn_stringbuf_t
+ * provided via BATTON. */
+static void
+strbuf_start_elem(void *baton, const char *name, const char **atts)
+{
+ xml_callbacks_baton_t *b = baton;
+ svn_stringbuf_appendcstr(b->buf, "<");
+ svn_stringbuf_appendcstr(b->buf, name);
+ while (*atts)
+ {
+ svn_stringbuf_appendcstr(b->buf, " ");
+ svn_stringbuf_appendcstr(b->buf, atts[0]);
+ svn_stringbuf_appendcstr(b->buf, "=");
+ svn_stringbuf_appendcstr(b->buf, atts[1]);
+ atts += 2;
+ }
+ svn_stringbuf_appendcstr(b->buf, ">");
+}
+
+/* Implements svn_xml_end_elem. Logs all invocations to svn_stringbuf_t
+ * provided via BATTON. */
+static void
+strbuf_end_elem(void *baton, const char *name)
+{
+ xml_callbacks_baton_t *b = baton;
+ svn_stringbuf_appendcstr(b->buf, "</");
+ svn_stringbuf_appendcstr(b->buf, name);
+ svn_stringbuf_appendcstr(b->buf, ">");
+}
+
+/* Implements svn_xml_char_data. Logs all invocations to svn_stringbuf_t
+ * provided via BATTON. */
+static void
+strbuf_cdata(void *baton, const char *data, apr_size_t len)
+{
+ xml_callbacks_baton_t *b = baton;
+ svn_stringbuf_appendbytes(b->buf, data, len);
+}
+
+/* Implements svn_xml_char_data. Calls strbuf_end_elem() but also
+ * signals XML parser bailout. */
+static void
+err_end_elem(void *baton, const char *name)
+{
+ xml_callbacks_baton_t *b = baton;
+
+ /* Log invocation first. */
+ strbuf_end_elem(baton, name);
+
+ svn_xml_signal_bailout(svn_error_create(APR_EGENERAL, NULL, NULL),
+ b->parser);
+}
+
+static svn_error_t *
+test_simple(apr_pool_t *pool)
+{
+ const char *xml = "<root><tag1>value</tag1><tag2 a='v' /></root>";
+ const char *p;
+ xml_callbacks_baton_t b;
+
+ /* Test parsing XML in one chunk.*/
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ SVN_ERR(svn_xml_parse(b.parser, xml, strlen(xml), TRUE));
+
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<root><tag1>value</tag1><tag2 a=v></tag2></root>");
+ svn_xml_free_parser(b.parser);
+
+ /* Test parsing XML byte by byte.*/
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ for (p = xml; *p; p++)
+ {
+ SVN_ERR(svn_xml_parse(b.parser, p, 1, FALSE));
+ }
+ SVN_ERR(svn_xml_parse(b.parser, NULL, 0, TRUE));
+ svn_xml_free_parser(b.parser);
+
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<root><tag1>value</tag1><tag2 a=v></tag2></root>");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_invalid_xml(apr_pool_t *pool)
+{
+ /* Invalid XML (missing </root>) */
+ const char *xml = "<root><tag1>value</tag1>";
+ xml_callbacks_baton_t b;
+ svn_error_t *err;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ err = svn_xml_parse(b.parser, xml, strlen(xml), TRUE);
+
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_XML_MALFORMED);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_signal_bailout(apr_pool_t *pool)
+{
+ /* Invalid XML (missing </root>) */
+ const char *xml = "<root><tag1></tag1></root>";
+ xml_callbacks_baton_t b;
+ svn_error_t *err;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, err_end_elem,
+ strbuf_cdata, pool);
+ err = svn_xml_parse(b.parser, xml, strlen(xml), TRUE);
+ SVN_TEST_ASSERT_ERROR(err, APR_EGENERAL);
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<root><tag1></tag1>");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_invalid_xml_signal_bailout(apr_pool_t *pool)
+{
+ /* Invalid XML (missing </root>) */
+ const char *xml = "<root><tag1></tag1>";
+ xml_callbacks_baton_t b;
+ svn_error_t *err;
+ apr_status_t status;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, NULL, err_end_elem, NULL, pool);
+ err = svn_xml_parse(b.parser, xml, strlen(xml), TRUE);
+
+ /* We may get SVN_ERR_XML_MALFORMED or error from err_end_elem() callback.
+ * This behavior depends how XML parser works: it may pre-parse data before
+ * callback invocation. */
+ status = err->apr_err;
+ SVN_TEST_ASSERT_ANY_ERROR(err); /* This clears err! */
+
+ if (status != SVN_ERR_XML_MALFORMED && status != APR_EGENERAL)
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Got unexpected error '%s'",
+ svn_error_symbolic_name(status));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_parser_free(apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *iterpool;
+
+ /* Test explicit svn_xml_free_parser() calls. */
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < 100; i++)
+ {
+ svn_xml_parser_t *parser;
+
+ svn_pool_clear(iterpool);
+
+ parser = svn_xml_make_parser(&parser, NULL, NULL, NULL, iterpool);
+ svn_xml_free_parser(parser);
+ }
+ svn_pool_destroy(iterpool);
+
+ /* Test parser free using pool cleanup. */
+ iterpool = svn_pool_create(pool);
+ for (i = 0; i < 100; i++)
+ {
+ svn_xml_parser_t *parser;
+
+ svn_pool_clear(iterpool);
+
+ parser = svn_xml_make_parser(&parser, NULL, NULL, NULL, iterpool);
+ /* We didn't call svn_xml_free_parser(): the parser will be freed on
+ pool cleanup. */
+ }
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that builtin XML entities are expanded as expected. */
+static svn_error_t *
+test_xml_builtin_entity_expansion(apr_pool_t *pool)
+{
+ const char *xml =
+ "<?xml version='1.0'?>\n"
+ "<root a='&amp;'>&amp;&#9;</root>";
+
+ xml_callbacks_baton_t b;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ SVN_ERR(svn_xml_parse(b.parser, xml, strlen(xml), TRUE));
+
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<root a=&>&\t</root>");
+
+ return SVN_NO_ERROR;
+}
+
+/* Test that custom XML entities are not allowed. */
+static svn_error_t *
+test_xml_custom_entity_expansion(apr_pool_t *pool)
+{
+ const char *xml =
+ "<?xml version='1.0'?>\n"
+ "<!DOCTYPE test ["
+ "<!ELEMENT root (#PCDATA)>"
+ "<!ENTITY xmlentity 'val'>"
+ "]>"
+ "<root>&xmlentity;</root>";
+
+ xml_callbacks_baton_t b;
+ svn_error_t *err;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ err = svn_xml_parse(b.parser, xml, strlen(xml), TRUE);
+
+ /* XML entity declarations will be either silently ignored or error
+ will be returned depending on Expat version. */
+ if (err)
+ {
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_XML_MALFORMED);
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "");
+ }
+ else
+ {
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<root></root>");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_xml_doctype_declaration(apr_pool_t *pool)
+{
+ const char *xml =
+ "<?xml version='1.0'?>\n"
+ "<?xml-stylesheet type='text/xsl' href='/svnindex.xsl'?>"
+ "<!DOCTYPE svn ["
+ " <!ELEMENT svn (index)>"
+ " <!ATTLIST svn version CDATA #REQUIRED"
+ " href CDATA #REQUIRED>"
+ " <!ELEMENT index (updir?, (file | dir)*)>"
+ " <!ATTLIST index name CDATA #IMPLIED"
+ " path CDATA #IMPLIED"
+ " rev CDATA #IMPLIED"
+ " base CDATA #IMPLIED>"
+ " <!ELEMENT updir EMPTY>"
+ " <!ATTLIST updir href CDATA #REQUIRED>"
+ " <!ELEMENT file EMPTY>"
+ " <!ATTLIST file name CDATA #REQUIRED"
+ " href CDATA #REQUIRED>"
+ " <!ELEMENT dir EMPTY>"
+ " <!ATTLIST dir name CDATA #REQUIRED"
+ " href CDATA #REQUIRED>"
+ "]>"
+ "<svn version='1.9.4'>"
+ " <index rev='0' path='Collection of Repositories'>"
+ " </index>"
+ "</svn>";
+
+ xml_callbacks_baton_t b;
+
+ b.buf = svn_stringbuf_create_empty(pool);
+ b.parser = svn_xml_make_parser(&b, strbuf_start_elem, strbuf_end_elem,
+ strbuf_cdata, pool);
+
+ SVN_ERR(svn_xml_parse(b.parser, xml, strlen(xml), TRUE));
+
+ SVN_TEST_STRING_ASSERT(b.buf->data,
+ "<svn version=1.9.4>"
+ " <index rev=0 path=Collection of Repositories>"
+ " </index>"
+ "</svn>");
+
+ return SVN_NO_ERROR;
+}
+
+/* The test table. */
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_simple,
+ "simple XML parser test"),
+ SVN_TEST_PASS2(test_invalid_xml,
+ "invalid XML test"),
+ SVN_TEST_PASS2(test_signal_bailout,
+ "test svn_xml_signal_bailout()"),
+ SVN_TEST_PASS2(test_invalid_xml_signal_bailout,
+ "test svn_xml_signal_bailout() for invalid XML"),
+ SVN_TEST_PASS2(test_parser_free,
+ "test svn_xml_parser_free()"),
+ SVN_TEST_PASS2(test_xml_builtin_entity_expansion,
+ "test XML builtin entity expansion"),
+ SVN_TEST_PASS2(test_xml_custom_entity_expansion,
+ "test XML custom entity expansion"),
+ SVN_TEST_PASS2(test_xml_doctype_declaration,
+ "test XML doctype declaration"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_subr/zlib.deflated b/subversion/tests/libsvn_subr/zlib.deflated
new file mode 100644
index 0000000..73dc6ae
--- /dev/null
+++ b/subversion/tests/libsvn_subr/zlib.deflated
Binary files differ
diff --git a/subversion/tests/libsvn_wc/conflict-data-test.c b/subversion/tests/libsvn_wc/conflict-data-test.c
new file mode 100644
index 0000000..7d89825
--- /dev/null
+++ b/subversion/tests/libsvn_wc/conflict-data-test.c
@@ -0,0 +1,981 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+/*
+ * conflict-data-test.c -- test the storage of tree conflict data
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <apr_hash.h>
+#include <apr_tables.h>
+
+#include "svn_props.h"
+#include "svn_pools.h"
+#include "svn_hash.h"
+#include "svn_types.h"
+#include "svn_wc.h"
+#include "private/svn_wc_private.h"
+#include "utils.h"
+#include "../svn_test.h"
+#include "../../libsvn_wc/tree_conflicts.h"
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#include "../../libsvn_wc/conflicts.h"
+
+/* A quick way to create error messages. */
+static svn_error_t *
+fail(apr_pool_t *pool, const char *fmt, ...)
+{
+ va_list ap;
+ char *msg;
+
+ va_start(ap, fmt);
+ msg = apr_pvsprintf(pool, fmt, ap);
+ va_end(ap);
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, 0, msg);
+}
+
+/* Assert that two integers are equal. Return an error if not. */
+#define ASSERT_INT_EQ(a, b) \
+ do { \
+ if ((a) != (b)) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "failed: ASSERT_INT_EQ(" #a ", " #b ") " \
+ "-> (%d == %d)", a, b); \
+ } while (0)
+
+/* Assert that two strings are equal or both null. Return an error if not. */
+#define ASSERT_STR_EQ(a, b) \
+ SVN_TEST_STRING_ASSERT(a, b)
+
+/* Assert that two version_t's are equal or both null. Return an error if not. */
+static svn_error_t *
+compare_version(const svn_wc_conflict_version_t *actual,
+ const svn_wc_conflict_version_t *expected)
+{
+ if (actual == NULL && expected == NULL)
+ return SVN_NO_ERROR;
+
+ SVN_TEST_ASSERT(actual && expected);
+ ASSERT_STR_EQ(actual->repos_url, expected->repos_url);
+ ASSERT_INT_EQ((int)actual->peg_rev, (int)expected->peg_rev);
+ ASSERT_STR_EQ(actual->path_in_repos, expected->path_in_repos);
+ ASSERT_INT_EQ(actual->node_kind, expected->node_kind);
+ return SVN_NO_ERROR;
+}
+
+/* Assert that two conflict descriptions contain exactly the same data
+ * (including names of temporary files), or are both NULL. Return an
+ * error if not. */
+static svn_error_t *
+compare_conflict(const svn_wc_conflict_description2_t *actual,
+ const svn_wc_conflict_description2_t *expected)
+{
+ if (actual == NULL && expected == NULL)
+ return SVN_NO_ERROR;
+
+ SVN_TEST_ASSERT(actual && expected);
+
+ ASSERT_INT_EQ(actual->kind, expected->kind);
+ ASSERT_STR_EQ(actual->local_abspath, expected->local_abspath);
+ ASSERT_INT_EQ(actual->node_kind, expected->node_kind);
+ ASSERT_STR_EQ(actual->property_name, expected->property_name);
+ ASSERT_INT_EQ(actual->is_binary, expected->is_binary);
+ ASSERT_STR_EQ(actual->mime_type, expected->mime_type);
+ ASSERT_INT_EQ(actual->action, expected->action);
+ ASSERT_INT_EQ(actual->reason, expected->reason);
+ ASSERT_STR_EQ(actual->base_abspath, expected->base_abspath);
+ ASSERT_STR_EQ(actual->their_abspath, expected->their_abspath);
+ ASSERT_STR_EQ(actual->my_abspath, expected->my_abspath);
+ ASSERT_STR_EQ(actual->merged_file, expected->merged_file);
+ ASSERT_INT_EQ(actual->operation, expected->operation);
+ SVN_ERR(compare_version(actual->src_left_version,
+ expected->src_left_version));
+ SVN_ERR(compare_version(actual->src_right_version,
+ expected->src_right_version));
+ return SVN_NO_ERROR;
+}
+
+/* Assert that a file contains the expected data. Return an
+ * error if not. */
+static svn_error_t *
+compare_file_content(const char *file_abspath,
+ const char *expected_val,
+ apr_pool_t *scratch_pool)
+{
+ svn_stringbuf_t *actual_val;
+
+ SVN_ERR(svn_stringbuf_from_file2(&actual_val, file_abspath, scratch_pool));
+ ASSERT_STR_EQ(actual_val->data, expected_val);
+ return SVN_NO_ERROR;
+}
+
+/* Assert that ACTUAL and EXPECTED both represent the same property
+ * conflict, or are both NULL. Return an error if not.
+ *
+ * Compare the property values found in files named by
+ * ACTUAL->base_abspath, ACTUAL->my_abspath, ACTUAL->merged_file
+ * with EXPECTED_BASE_VAL, EXPECTED_MY_VAL, EXPECTED_THEIR_VAL
+ * respectively, ignoring the corresponding fields in EXPECTED. */
+static svn_error_t *
+compare_prop_conflict(const svn_wc_conflict_description2_t *actual,
+ const svn_wc_conflict_description2_t *expected,
+ const char *expected_base_val,
+ const char *expected_my_val,
+ const char *expected_their_val,
+ apr_pool_t *scratch_pool)
+{
+ if (actual == NULL && expected == NULL)
+ return SVN_NO_ERROR;
+
+ SVN_TEST_ASSERT(actual && expected);
+ ASSERT_INT_EQ(actual->kind, svn_wc_conflict_kind_property);
+ ASSERT_INT_EQ(expected->kind, svn_wc_conflict_kind_property);
+
+ ASSERT_STR_EQ(actual->local_abspath, expected->local_abspath);
+ ASSERT_INT_EQ(actual->node_kind, expected->node_kind);
+ ASSERT_STR_EQ(actual->property_name, expected->property_name);
+ ASSERT_INT_EQ(actual->action, expected->action);
+ ASSERT_INT_EQ(actual->reason, expected->reason);
+ ASSERT_INT_EQ(actual->operation, expected->operation);
+ SVN_ERR(compare_version(actual->src_left_version,
+ expected->src_left_version));
+ SVN_ERR(compare_version(actual->src_right_version,
+ expected->src_right_version));
+
+ SVN_ERR(compare_file_content(actual->base_abspath, expected_base_val,
+ scratch_pool));
+ SVN_ERR(compare_file_content(actual->my_abspath, expected_my_val,
+ scratch_pool));
+ /* Historical wart: for a prop conflict, 'theirs' is in the 'merged_file'
+ * field, and the conflict artifact file is in the 'theirs_abspath' field. */
+ SVN_ERR(compare_file_content(actual->merged_file, expected_their_val,
+ scratch_pool));
+ /*ASSERT_STR_EQ(actual->theirs_abspath, conflict_artifact_file));*/
+
+ /* These are 'undefined' for a prop conflict */
+ /*ASSERT_INT_EQ(actual->is_binary, expected->is_binary);*/
+ /*ASSERT_STR_EQ(actual->mime_type, expected->mime_type);*/
+
+ return SVN_NO_ERROR;
+}
+
+/* Create and return a tree conflict description */
+static svn_wc_conflict_description2_t *
+tree_conflict_create(const char *local_abspath,
+ svn_node_kind_t node_kind,
+ svn_wc_operation_t operation,
+ svn_wc_conflict_action_t action,
+ svn_wc_conflict_reason_t reason,
+ const char *left_repo,
+ const char *left_path,
+ svn_revnum_t left_revnum,
+ svn_node_kind_t left_kind,
+ const char *right_repo,
+ const char *right_path,
+ svn_revnum_t right_revnum,
+ svn_node_kind_t right_kind,
+ apr_pool_t *result_pool)
+{
+ svn_wc_conflict_version_t *left, *right;
+ svn_wc_conflict_description2_t *conflict;
+
+ left = svn_wc_conflict_version_create2(left_repo, NULL, left_path,
+ left_revnum, left_kind, result_pool);
+ right = svn_wc_conflict_version_create2(right_repo, NULL, right_path,
+ right_revnum, right_kind,
+ result_pool);
+ conflict = svn_wc_conflict_description_create_tree2(
+ local_abspath, node_kind, operation,
+ left, right, result_pool);
+ conflict->action = action;
+ conflict->reason = reason;
+ return conflict;
+}
+
+static svn_error_t *
+test_deserialize_tree_conflict(apr_pool_t *pool)
+{
+ const svn_wc_conflict_description2_t *conflict;
+ svn_wc_conflict_description2_t *exp_conflict;
+ const char *tree_conflict_data;
+ const char *local_abspath;
+ const svn_skel_t *skel;
+
+ tree_conflict_data = "(conflict Foo.c file update deleted edited "
+ "(version 0 2 -1 0 0 ) (version 0 2 -1 0 0 ))";
+
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, "Foo.c", pool));
+ exp_conflict = svn_wc_conflict_description_create_tree2(
+ local_abspath, svn_node_file, svn_wc_operation_update,
+ NULL, NULL, pool);
+ exp_conflict->action = svn_wc_conflict_action_delete;
+ exp_conflict->reason = svn_wc_conflict_reason_edited;
+
+ skel = svn_skel__parse(tree_conflict_data, strlen(tree_conflict_data), pool);
+ SVN_ERR(svn_wc__deserialize_conflict(&conflict, skel, "", pool, pool));
+
+ if ((conflict->node_kind != exp_conflict->node_kind) ||
+ (conflict->action != exp_conflict->action) ||
+ (conflict->reason != exp_conflict->reason) ||
+ (conflict->operation != exp_conflict->operation) ||
+ (strcmp(conflict->local_abspath, exp_conflict->local_abspath) != 0))
+ return fail(pool, "Unexpected tree conflict");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialize_tree_conflict_data(apr_pool_t *pool)
+{
+ svn_wc_conflict_description2_t *conflict;
+ const char *tree_conflict_data;
+ const char *expected;
+ const char *local_abspath;
+ svn_skel_t *skel;
+
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, "Foo.c", pool));
+
+ conflict = svn_wc_conflict_description_create_tree2(
+ local_abspath, svn_node_file, svn_wc_operation_update,
+ NULL, NULL, pool);
+ conflict->action = svn_wc_conflict_action_delete;
+ conflict->reason = svn_wc_conflict_reason_edited;
+
+ SVN_ERR(svn_wc__serialize_conflict(&skel, conflict, pool, pool));
+ tree_conflict_data = svn_skel__unparse(skel, pool)->data;
+
+ expected = "(conflict Foo.c file update deleted edited "
+ "(version 0 2 -1 0 0 ) (version 0 2 -1 0 0 ))";
+
+ if (strcmp(expected, tree_conflict_data) != 0)
+ return fail(pool, "Unexpected text from tree conflict\n"
+ " Expected: %s\n"
+ " Actual: %s\n", expected, tree_conflict_data);
+
+ return SVN_NO_ERROR;
+}
+
+/* Test WC-DB-level conflict APIs. Especially tree conflicts. */
+static svn_error_t *
+test_read_write_tree_conflicts(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+
+ const char *parent_abspath;
+ const char *child1_abspath, *child2_abspath;
+ svn_wc_conflict_description2_t *conflict1, *conflict2;
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "read_write_tree_conflicts", opts, pool));
+ parent_abspath = svn_dirent_join(sbox.wc_abspath, "A", pool);
+ child1_abspath = svn_dirent_join(parent_abspath, "foo", pool);
+ child2_abspath = svn_dirent_join(parent_abspath, "bar", pool);
+ SVN_ERR(sbox_wc_mkdir(&sbox, "A"));
+ SVN_ERR(sbox_wc_mkdir(&sbox, "A/bar"));
+ SVN_ERR(sbox_file_write(&sbox, "A/foo", ""));
+ SVN_ERR(sbox_wc_add(&sbox, "A/foo"));
+
+ conflict1 = tree_conflict_create(child1_abspath, svn_node_file,
+ svn_wc_operation_merge,
+ svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_edited,
+ "dummy://localhost", "path/to/foo",
+ 51, svn_node_file,
+ "dummy://localhost", "path/to/foo",
+ 52, svn_node_none,
+ pool);
+
+ conflict2 = tree_conflict_create(child2_abspath, svn_node_dir,
+ svn_wc_operation_merge,
+ svn_wc_conflict_action_replace,
+ svn_wc_conflict_reason_edited,
+ "dummy://localhost", "path/to/bar",
+ 51, svn_node_dir,
+ "dummy://localhost", "path/to/bar",
+ 52, svn_node_file,
+ pool);
+
+ /* Write */
+ SVN_ERR(svn_wc__add_tree_conflict(sbox.wc_ctx, /*child1_abspath,*/
+ conflict1, pool));
+ SVN_ERR(svn_wc__add_tree_conflict(sbox.wc_ctx, /*child2_abspath,*/
+ conflict2, pool));
+
+ /* Query (conflict1 through WC-DB API, conflict2 through WC API) */
+ {
+ svn_boolean_t text_c, prop_c, tree_c;
+
+ SVN_ERR(svn_wc__internal_conflicted_p(&text_c, &prop_c, &tree_c,
+ sbox.wc_ctx->db, child1_abspath, pool));
+ SVN_TEST_ASSERT(tree_c);
+ SVN_TEST_ASSERT(! text_c && ! prop_c);
+
+ SVN_ERR(svn_wc_conflicted_p3(&text_c, &prop_c, &tree_c,
+ sbox.wc_ctx, child2_abspath, pool));
+ SVN_TEST_ASSERT(tree_c);
+ SVN_TEST_ASSERT(! text_c && ! prop_c);
+ }
+
+ /* Read conflicts back */
+ {
+ const svn_wc_conflict_description2_t *read_conflict;
+
+ SVN_ERR(svn_wc__get_tree_conflict(&read_conflict, sbox.wc_ctx,
+ child1_abspath, pool, pool));
+ SVN_ERR(compare_conflict(read_conflict, conflict1));
+
+ SVN_ERR(svn_wc__get_tree_conflict(&read_conflict, sbox.wc_ctx,
+ child2_abspath, pool, pool));
+ SVN_ERR(compare_conflict(read_conflict, conflict2));
+ }
+
+ /* Read many */
+ {
+ const apr_array_header_t *victims;
+
+ SVN_ERR(svn_wc__db_read_conflict_victims(&victims,
+ sbox.wc_ctx->db, parent_abspath,
+ pool, pool));
+ SVN_TEST_ASSERT(victims->nelts == 2);
+ }
+
+ /* ### TODO: to test...
+ * svn_wc__db_read_conflicts
+ * svn_wc__node_get_conflict_info
+ * svn_wc__del_tree_conflict
+ */
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialize_prop_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+ svn_skel_t *conflict_skel;
+ svn_boolean_t complete;
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "test_serialize_prop_conflict", opts, pool));
+
+ conflict_skel = svn_wc__conflict_skel_create(pool);
+
+ SVN_TEST_ASSERT(conflict_skel != NULL);
+ SVN_TEST_ASSERT(svn_skel__list_length(conflict_skel) == 2);
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(!complete); /* Nothing set */
+
+ {
+ apr_hash_t *mine = apr_hash_make(pool);
+ apr_hash_t *their_old = apr_hash_make(pool);
+ apr_hash_t *theirs = apr_hash_make(pool);
+ apr_hash_t *conflicts = apr_hash_make(pool);
+ const char *marker_abspath;
+
+ apr_hash_set(mine, "prop", APR_HASH_KEY_STRING,
+ svn_string_create("Mine", pool));
+
+ apr_hash_set(their_old, "prop", APR_HASH_KEY_STRING,
+ svn_string_create("Their-Old", pool));
+
+ apr_hash_set(theirs, "prop", APR_HASH_KEY_STRING,
+ svn_string_create("Theirs", pool));
+
+ apr_hash_set(conflicts, "prop", APR_HASH_KEY_STRING, "");
+
+ SVN_ERR(svn_io_open_unique_file3(NULL, &marker_abspath, sbox.wc_abspath,
+ svn_io_file_del_on_pool_cleanup, pool,
+ pool));
+
+ SVN_ERR(svn_wc__conflict_skel_add_prop_conflict(conflict_skel,
+ sbox.wc_ctx->db,
+ sbox.wc_abspath,
+ marker_abspath,
+ mine, their_old,
+ theirs, conflicts,
+ pool, pool));
+ }
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(!complete); /* Misses operation */
+
+ SVN_ERR(svn_wc__conflict_skel_set_op_update(
+ conflict_skel,
+ svn_wc_conflict_version_create2("http://my-repos/svn",
+ "uuid", "trunk", 12,
+ svn_node_dir, pool),
+ NULL /* wc_only */,
+ pool, pool));
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(complete); /* Everything available */
+
+ {
+ apr_hash_t *mine;
+ apr_hash_t *their_old;
+ apr_hash_t *theirs;
+ apr_hash_t *conflicts;
+ const char *marker_abspath;
+ svn_string_t *v;
+
+ SVN_ERR(svn_wc__conflict_read_prop_conflict(&marker_abspath,
+ &mine,
+ &their_old,
+ &theirs,
+ &conflicts,
+ sbox.wc_ctx->db,
+ sbox.wc_abspath,
+ conflict_skel,
+ pool, pool));
+
+ SVN_TEST_ASSERT(svn_dirent_is_ancestor(sbox.wc_abspath, marker_abspath));
+
+ v = apr_hash_get(mine, "prop", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(v->data, "Mine");
+
+ v = apr_hash_get(their_old, "prop", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(v->data, "Their-Old");
+
+ v = apr_hash_get(theirs, "prop", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(v->data, "Theirs");
+
+ SVN_TEST_ASSERT(apr_hash_count(conflicts) == 1);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialize_text_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+ svn_skel_t *conflict_skel;
+ svn_boolean_t complete;
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "test_serialize_text_conflict", opts, pool));
+
+ conflict_skel = svn_wc__conflict_skel_create(pool);
+
+ SVN_ERR(svn_wc__conflict_skel_add_text_conflict(
+ conflict_skel,
+ sbox.wc_ctx->db, sbox.wc_abspath,
+ svn_dirent_join(sbox.wc_abspath, "mine", pool),
+ svn_dirent_join(sbox.wc_abspath, "old-theirs", pool),
+ svn_dirent_join(sbox.wc_abspath, "theirs", pool),
+ pool, pool));
+
+ SVN_ERR(svn_wc__conflict_skel_set_op_merge(
+ conflict_skel,
+ svn_wc_conflict_version_create2("http://my-repos/svn",
+ "uuid", "trunk", 12,
+ svn_node_dir, pool),
+ svn_wc_conflict_version_create2("http://my-repos/svn",
+ "uuid", "branch/my", 8,
+ svn_node_dir, pool),
+ pool, pool));
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(complete); /* Everything available */
+
+ {
+ const char *mine_abspath;
+ const char *old_their_abspath;
+ const char *their_abspath;
+
+ SVN_ERR(svn_wc__conflict_read_text_conflict(&mine_abspath,
+ &old_their_abspath,
+ &their_abspath,
+ sbox.wc_ctx->db,
+ sbox.wc_abspath,
+ conflict_skel,
+ pool, pool));
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, mine_abspath),
+ "mine");
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, old_their_abspath),
+ "old-theirs");
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, their_abspath),
+ "theirs");
+ }
+
+ {
+ svn_wc_operation_t operation;
+ svn_boolean_t text_conflicted;
+ const apr_array_header_t *locs;
+ SVN_ERR(svn_wc__conflict_read_info(&operation, &locs,
+ &text_conflicted, NULL, NULL,
+ sbox.wc_ctx->db, sbox.wc_abspath,
+ conflict_skel, pool, pool));
+
+ SVN_TEST_ASSERT(text_conflicted);
+ SVN_TEST_ASSERT(operation == svn_wc_operation_merge);
+
+ SVN_TEST_ASSERT(locs != NULL && locs->nelts == 2);
+ SVN_TEST_ASSERT(APR_ARRAY_IDX(locs, 0, svn_wc_conflict_version_t*) != NULL);
+ SVN_TEST_ASSERT(APR_ARRAY_IDX(locs, 1, svn_wc_conflict_version_t*) != NULL);
+ }
+
+ {
+ const apr_array_header_t *markers;
+ const char *old_their_abspath;
+ const char *their_abspath;
+ const char *mine_abspath;
+
+ SVN_ERR(svn_wc__conflict_read_markers(&markers,
+ sbox.wc_ctx->db, sbox.wc_abspath,
+ conflict_skel, pool, pool));
+
+ SVN_TEST_ASSERT(markers != NULL);
+ SVN_TEST_ASSERT(markers->nelts == 3);
+
+ old_their_abspath = APR_ARRAY_IDX(markers, 0, const char *);
+ mine_abspath = APR_ARRAY_IDX(markers, 1, const char *);
+ their_abspath = APR_ARRAY_IDX(markers, 2, const char *);
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, mine_abspath),
+ "mine");
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, old_their_abspath),
+ "old-theirs");
+
+ SVN_TEST_STRING_ASSERT(
+ svn_dirent_skip_ancestor(sbox.wc_abspath, their_abspath),
+ "theirs");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_serialize_tree_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+ svn_skel_t *conflict_skel;
+ svn_boolean_t complete;
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "test_serialize_tree_conflict", opts, pool));
+
+ conflict_skel = svn_wc__conflict_skel_create(pool);
+
+ SVN_ERR(svn_wc__conflict_skel_add_tree_conflict(
+ conflict_skel,
+ sbox.wc_ctx->db, sbox_wc_path(&sbox, "A/B"),
+ svn_wc_conflict_reason_moved_away,
+ svn_wc_conflict_action_delete,
+ sbox_wc_path(&sbox, "A/B"),
+ pool, pool));
+
+ SVN_ERR(svn_wc__conflict_skel_set_op_switch(
+ conflict_skel,
+ svn_wc_conflict_version_create2("http://my-repos/svn",
+ "uuid", "trunk", 12,
+ svn_node_dir, pool),
+ NULL /* wc_only */,
+ pool, pool));
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(complete); /* Everything available */
+
+ {
+ svn_wc_conflict_reason_t reason;
+ svn_wc_conflict_action_t action;
+ const char *moved_away_op_root_abspath;
+
+ SVN_ERR(svn_wc__conflict_read_tree_conflict(&reason,
+ &action,
+ &moved_away_op_root_abspath,
+ sbox.wc_ctx->db,
+ sbox.wc_abspath,
+ conflict_skel,
+ pool, pool));
+
+ SVN_TEST_ASSERT(reason == svn_wc_conflict_reason_moved_away);
+ SVN_TEST_ASSERT(action == svn_wc_conflict_action_delete);
+ SVN_TEST_STRING_ASSERT(moved_away_op_root_abspath,
+ sbox_wc_path(&sbox, "A/B"));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* A conflict resolver callback baton for test_prop_conflicts(). */
+typedef struct test_prop_conflict_baton_t
+{
+ /* Sets of properties. */
+ apr_hash_t *mine;
+ apr_hash_t *their_old;
+ apr_hash_t *theirs;
+ /* The set of prop names in conflict. */
+ apr_hash_t *conflicts;
+
+ /* We use all the fields of DESC except the base/theirs/mine/merged paths. */
+ svn_wc_conflict_description2_t *desc;
+
+ int conflicts_seen;
+} test_prop_conflict_baton_t;
+
+/* Set *CONFLICT_SKEL_P to a new property conflict skel reflecting the
+ * conflict details given in B. */
+static svn_error_t *
+create_prop_conflict_skel(svn_skel_t **conflict_skel_p,
+ svn_wc_context_t *wc_ctx,
+ const test_prop_conflict_baton_t *b,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_skel_t *conflict_skel = svn_wc__conflict_skel_create(result_pool);
+ const char *marker_abspath;
+ svn_boolean_t complete;
+
+ SVN_ERR(svn_io_write_unique(&marker_abspath,
+ b->desc->local_abspath,
+ "conflict-artifact-file-content\n", 6,
+ svn_io_file_del_none, scratch_pool));
+
+ SVN_ERR(svn_wc__conflict_skel_add_prop_conflict(conflict_skel,
+ wc_ctx->db,
+ b->desc->local_abspath,
+ marker_abspath,
+ b->mine, b->their_old,
+ b->theirs, b->conflicts,
+ result_pool, scratch_pool));
+
+ switch (b->desc->operation)
+ {
+ case svn_wc_operation_update:
+ SVN_ERR(svn_wc__conflict_skel_set_op_update(
+ conflict_skel,
+ b->desc->src_left_version, b->desc->src_right_version,
+ result_pool, scratch_pool));
+ break;
+ case svn_wc_operation_switch:
+ SVN_ERR(svn_wc__conflict_skel_set_op_switch(
+ conflict_skel,
+ b->desc->src_left_version, b->desc->src_right_version,
+ result_pool, scratch_pool));
+ break;
+ case svn_wc_operation_merge:
+ SVN_ERR(svn_wc__conflict_skel_set_op_merge(
+ conflict_skel,
+ b->desc->src_left_version, b->desc->src_right_version,
+ result_pool, scratch_pool));
+ break;
+ default:
+ SVN_ERR_MALFUNCTION();
+ }
+
+ SVN_ERR(svn_wc__conflict_skel_is_complete(&complete, conflict_skel));
+ SVN_TEST_ASSERT(complete);
+ *conflict_skel_p = conflict_skel;
+ return SVN_NO_ERROR;
+}
+
+/* A conflict resolver callback for test_prop_conflicts(), that checks
+ * that the conflict described to it matches the one described in BATON,
+ * and also counts the number of times it is called. */
+static svn_error_t *
+prop_conflict_cb(svn_wc_conflict_result_t **result_p,
+ const svn_wc_conflict_description2_t *desc,
+ void *baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ test_prop_conflict_baton_t *b = baton;
+
+ SVN_ERR(compare_prop_conflict(
+ desc, b->desc,
+ svn_prop_get_value(b->their_old, desc->property_name),
+ svn_prop_get_value(b->mine, desc->property_name),
+ svn_prop_get_value(b->theirs, desc->property_name),
+ scratch_pool));
+ b->conflicts_seen++;
+
+ *result_p = svn_wc_create_conflict_result(svn_wc_conflict_choose_postpone,
+ NULL /*merged_file*/, result_pool);
+ return SVN_NO_ERROR;
+}
+
+/* Test for correct retrieval of property conflict descriptions from
+ * the WC DB.
+ *
+ * Presently it tests just one prop conflict, and only during the
+ * 'resolve' operation. We should also test during the 'update'/
+ * 'switch'/'merge' operations.
+ */
+static svn_error_t *
+test_prop_conflicts(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+ svn_skel_t *conflict_skel;
+ svn_error_t *err;
+ const char *lock_abspath;
+ test_prop_conflict_baton_t *b = apr_pcalloc(pool, sizeof(*b));
+ svn_wc_conflict_description2_t *desc = apr_pcalloc(pool, sizeof(*desc));
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "test_prop_conflicts", opts, pool));
+
+ /* Describe a property conflict */
+ b->mine = apr_hash_make(pool);
+ b->their_old = apr_hash_make(pool);
+ b->theirs = apr_hash_make(pool);
+ b->conflicts = apr_hash_make(pool);
+ svn_hash_sets(b->mine, "prop", svn_string_create("Mine", pool));
+ svn_hash_sets(b->their_old, "prop", svn_string_create("Their-Old", pool));
+ svn_hash_sets(b->theirs, "prop", svn_string_create("Theirs", pool));
+ svn_hash_sets(b->conflicts, "prop", "");
+
+ b->desc = desc;
+ desc->local_abspath = sbox.wc_abspath;
+ desc->kind = svn_wc_conflict_kind_property;
+ desc->node_kind = svn_node_dir;
+ desc->operation = svn_wc_operation_update;
+ desc->action = svn_wc_conflict_action_edit;
+ desc->reason = svn_wc_conflict_reason_edited;
+ desc->mime_type = NULL;
+ desc->is_binary = FALSE;
+ desc->property_name = "prop";
+ desc->src_left_version
+ = svn_wc_conflict_version_create2(sbox.repos_url, "uuid",
+ "trunk", 12, svn_node_dir, pool);
+ desc->src_right_version = NULL; /* WC only */
+
+ b->conflicts_seen = 0;
+
+ /* Record a conflict */
+ {
+ apr_pool_t *subpool = svn_pool_create(pool);
+ SVN_ERR(create_prop_conflict_skel(&conflict_skel, sbox.wc_ctx, b,
+ pool, subpool));
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_wc__db_op_mark_conflict(sbox.wc_ctx->db,
+ sbox.wc_abspath,
+ conflict_skel, NULL, subpool));
+ svn_pool_destroy(subpool);
+ }
+
+ /* Test the API for resolving the conflict: check that correct details
+ * of the conflict are returned. */
+ SVN_ERR(svn_wc__acquire_write_lock_for_resolve(&lock_abspath, sbox.wc_ctx,
+ sbox.wc_abspath, pool, pool));
+ err = svn_wc__resolve_conflicts(sbox.wc_ctx, sbox.wc_abspath,
+ svn_depth_empty,
+ FALSE /* resolve_text */,
+ "" /* resolve_prop (ALL props) */,
+ FALSE /* resolve_tree */,
+ svn_wc_conflict_choose_unspecified,
+ prop_conflict_cb, b,
+ NULL, NULL, /* cancellation */
+ NULL, NULL, /* notification */
+ pool);
+
+ SVN_ERR(svn_error_compose_create(err,
+ svn_wc__release_write_lock(sbox.wc_ctx,
+ lock_abspath,
+ pool)));
+
+ ASSERT_INT_EQ(b->conflicts_seen, 1);
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_prop_conflict_resolving(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_skel_t *conflict;
+ const char *A_abspath;
+ const char *marker_abspath;
+ apr_hash_t *conflicted_props;
+ apr_hash_t *props;
+ const char *value;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "test_prop_resolving", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+
+ SVN_ERR(sbox_wc_propset(&b, "prop-1", "r1", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-2", "r1", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-3", "r1", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-4", "r1", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-5", "r1", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-6", "r1", "A"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "prop-1", "r2", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-2", "r2", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-3", "r2", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-4", NULL, "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-5", NULL, "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-7", "r2", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-8", "r2", "A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_propset(&b, "prop-1", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-2", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-3", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-4", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-5", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-6", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-7", "mod", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "prop-8", "mod", "A"));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ A_abspath = sbox_wc_path(&b, "A");
+ SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL,
+ b.wc_ctx->db, A_abspath,
+ pool, pool));
+
+ /* We have tree conflicts... */
+ SVN_TEST_ASSERT(conflict != NULL);
+
+ SVN_ERR(svn_wc__conflict_read_prop_conflict(&marker_abspath,
+ NULL, NULL, NULL,
+ &conflicted_props,
+ b.wc_ctx->db, A_abspath,
+ conflict,
+ pool, pool));
+
+ SVN_TEST_ASSERT(conflicted_props != NULL);
+ /* All properties but r6 are conflicted */
+ SVN_TEST_ASSERT(apr_hash_count(conflicted_props) == 7);
+ SVN_TEST_ASSERT(! svn_hash_gets(conflicted_props, "prop-6"));
+
+ /* Let's resolve a few conflicts */
+ SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-1",
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-2",
+ svn_wc_conflict_choose_theirs_conflict));
+ SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-3",
+ svn_wc_conflict_choose_merged));
+
+ SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL,
+ b.wc_ctx->db, A_abspath,
+ pool, pool));
+
+ /* We have tree conflicts... */
+ SVN_TEST_ASSERT(conflict != NULL);
+
+ SVN_ERR(svn_wc__conflict_read_prop_conflict(&marker_abspath,
+ NULL, NULL, NULL,
+ &conflicted_props,
+ b.wc_ctx->db, A_abspath,
+ conflict,
+ pool, pool));
+
+ SVN_TEST_ASSERT(conflicted_props != NULL);
+ SVN_TEST_ASSERT(apr_hash_count(conflicted_props) == 4);
+
+ SVN_ERR(svn_wc__db_read_props(&props, b.wc_ctx->db, A_abspath,
+ pool, pool));
+
+ value = svn_prop_get_value(props, "prop-1");
+ SVN_TEST_STRING_ASSERT(value, "mod");
+ value = svn_prop_get_value(props, "prop-2");
+ SVN_TEST_STRING_ASSERT(value, "r1");
+ value = svn_prop_get_value(props, "prop-3");
+ SVN_TEST_STRING_ASSERT(value, "mod");
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_binary_file_conflict(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sbox;
+ const apr_array_header_t *conflicts;
+ svn_wc_conflict_description2_t *desc;
+
+ SVN_ERR(svn_test__sandbox_create(&sbox, "test_binary_file_conflict", opts, pool));
+
+ /* Create and add a binary file. */
+ SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xff\xff"));
+ SVN_ERR(sbox_wc_add(&sbox, "binary-file"));
+ SVN_ERR(sbox_wc_propset(&sbox, SVN_PROP_MIME_TYPE,
+ "application/octet-stream", "binary-file"));
+ SVN_ERR(sbox_wc_commit(&sbox, "binary-file")); /* r1 */
+
+ /* Make a change to the binary file. */
+ SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xfc\xfc\xfc\xfc\xfc\xfc"));
+ SVN_ERR(sbox_wc_commit(&sbox, "binary-file")); /* r2 */
+
+ /* Update back to r1, make a conflicting change to binary file. */
+ SVN_ERR(sbox_wc_update(&sbox, "binary-file", 1));
+ SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xfd\xfd\xfd\xfd"));
+
+ /* Update to HEAD and ensure the conflict is marked as binary. */
+ SVN_ERR(sbox_wc_update(&sbox, "binary-file", 2));
+ SVN_ERR(svn_wc__read_conflicts(&conflicts, NULL, sbox.wc_ctx->db,
+ sbox_wc_path(&sbox, "binary-file"),
+ FALSE /* create_tempfiles */,
+ FALSE /* only_tree_conflict */,
+ pool, pool));
+ SVN_TEST_ASSERT(conflicts->nelts == 1);
+ desc = APR_ARRAY_IDX(conflicts, 0, svn_wc_conflict_description2_t *);
+ SVN_TEST_ASSERT(desc->is_binary);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* The test table. */
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_deserialize_tree_conflict,
+ "deserialize tree conflict"),
+ SVN_TEST_PASS2(test_serialize_tree_conflict_data,
+ "serialize tree conflict data"),
+ SVN_TEST_OPTS_PASS(test_read_write_tree_conflicts,
+ "read and write tree conflict data"),
+ SVN_TEST_OPTS_PASS(test_serialize_prop_conflict,
+ "read and write a property conflict"),
+ SVN_TEST_OPTS_PASS(test_serialize_text_conflict,
+ "read and write a text conflict"),
+ SVN_TEST_OPTS_PASS(test_serialize_tree_conflict,
+ "read and write a tree conflict"),
+ SVN_TEST_OPTS_PASS(test_prop_conflicts,
+ "test prop conflicts"),
+ SVN_TEST_OPTS_PASS(test_prop_conflict_resolving,
+ "test property conflict resolving"),
+ SVN_TEST_OPTS_PASS(test_binary_file_conflict,
+ "test binary file conflict"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/create_wc_for_upgrade.sh b/subversion/tests/libsvn_wc/create_wc_for_upgrade.sh
new file mode 100755
index 0000000..622595c
--- /dev/null
+++ b/subversion/tests/libsvn_wc/create_wc_for_upgrade.sh
@@ -0,0 +1,108 @@
+#!/bin/sh
+#
+# create_wc_for_upgrade.sh : create a working copy for upgrade testing
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org/ for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+#
+# USAGE:
+# $ ./create_wc_for_upgrade.sh /path/to/svn-1.x /tmp/wc-1.x
+#
+# At this point, the working copy will be constructed for *property*
+# testing. Future changes may set up other testing scenarios.
+#
+# Note that 'svnadmin' must be in PATH
+#
+
+if test $# != 2; then
+ echo "ERROR: not enough parameters"
+ exit 1
+fi
+
+SVN="$1"
+WC="$2"
+
+if test -e "${WC}"; then
+ echo "ERROR: ${WC} exists."
+ exit 1
+fi
+
+# some higgery-jiggery to get to the parent directory in order to create
+# the repo as a sibling.
+mkdir "${WC}"
+cd "${WC}"
+cd ..
+rmdir "${WC}"
+
+# create a repo that is usable by any SVN. bail out if anything goes wrong.
+svnadmin create --pre-1.4-compatible repo
+"${SVN}" co file://`pwd`/repo "${WC}"
+cd "${WC}" || exit 1
+
+# we need some files with properties for copy sources and for deletions
+echo alpha > alpha
+echo beta > beta
+echo gamma > gamma
+echo delta > delta
+echo epsilon > epsilon
+"${SVN}" add alpha beta gamma delta epsilon
+"${SVN}" propset a-prop a-value1 alpha
+"${SVN}" propset b-prop b-value1 beta
+"${SVN}" propset g-prop g-value1 gamma
+"${SVN}" propset d-prop d-value1 delta
+"${SVN}" propset e-prop e-value1 epsilon
+"${SVN}" commit -m "commit files"
+
+### the code below needs to be rejiggered for svn <= 1.3. revert base is not
+### available, so some of the operations are not allowed.
+
+# a file with .base and .revert
+"${SVN}" delete alpha
+"${SVN}" copy epsilon alpha
+### whoops. there is an alpha.working (when using 1.7-dev, tho no ACTUAL_NODE
+### row is created).
+
+# a file with just .working
+# zeta = epsilon+1
+echo zeta > zeta
+"${SVN}" add zeta
+"${SVN}" propset z-prop z-value zeta
+
+# a file with .base and .working
+"${SVN}" propset b-more b-value2 beta
+
+# a file with .base, .revert, and .working
+"${SVN}" delete gamma
+"${SVN}" copy epsilon gamma
+"${SVN}" propset g-more g-value2 gamma
+
+# a file with .revert and .working
+"${SVN}" delete delta
+echo delta revisited > delta
+"${SVN}" add delta
+"${SVN}" propset d-more d-value2 delta
+
+# a file with just .revert
+"${SVN}" delete epsilon
+echo epsilon revisited > epsilon
+"${SVN}" add epsilon
diff --git a/subversion/tests/libsvn_wc/db-test.c b/subversion/tests/libsvn_wc/db-test.c
new file mode 100644
index 0000000..76ec893
--- /dev/null
+++ b/subversion/tests/libsvn_wc/db-test.c
@@ -0,0 +1,1556 @@
+/*
+ * db-test.c : test the wc_db subsystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#include "svn_types.h"
+
+/* Make sure SVN_DEPRECATED is defined as empty before including svn_io.h.
+ We don't want to trigger deprecation warnings. */
+#ifdef SVN_DEPRECATED
+#undef SVN_DEPRECATED
+#endif
+#define SVN_DEPRECATED
+#include "svn_io.h"
+
+#include "svn_dirent_uri.h"
+#include "svn_pools.h"
+
+#include "private/svn_sqlite.h"
+
+#include "../../libsvn_wc/wc_db.h"
+
+#include "private/svn_wc_private.h"
+
+#include "../svn_test.h"
+#include "utils.h"
+
+
+#define ROOT_ONE "http://example.com/one"
+#define ROOT_TWO "http://example.com/two"
+#define ROOT_THREE "http://example.com/three"
+
+#define UUID_ONE "uuid1"
+#define UUID_TWO "uuid2"
+#define UUID_THREE "uuid3"
+
+#define TIME_1 1235142208
+#define TIME_2 1235142268
+#define TIME_3 1235142328
+
+#define TIME_1s APR_STRINGIFY(TIME_1) "000000"
+#define TIME_2s APR_STRINGIFY(TIME_2) "000000"
+#define TIME_3s APR_STRINGIFY(TIME_3) "000000"
+
+#define TIME_1a apr_time_from_sec(TIME_1)
+#define TIME_2a apr_time_from_sec(TIME_2)
+#define TIME_3a apr_time_from_sec(TIME_3)
+
+#define AUTHOR_1 "johndoe"
+#define AUTHOR_2 "janedoe"
+
+/* Some arbitrary checksum values */
+#define MD5_1 "2d18c5e57e84c5b8a5e9a6e13fa394dc"
+#define SHA1_1 "aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d"
+
+#define F_TC_DATA "(conflict F file update edited deleted (version 22 " ROOT_ONE " 1 2 branch1/ft/F none) (version 22 " ROOT_ONE " 1 3 branch1/ft/F file))"
+#define G_TC_DATA "(conflict G file update edited deleted (version 22 " ROOT_ONE " 1 2 branch1/ft/F none) (version 22 " ROOT_ONE " 1 3 branch1/ft/F file))"
+
+static const char * const TESTING_DATA = (
+ /* Load our test data.
+
+ Note: do not use named-column insertions. This allows us to test
+ the column count in the schema matches our expectation here. */
+
+ "insert into repository values (1, '" ROOT_ONE "', '" UUID_ONE "'); "
+ "insert into repository values (2, '" ROOT_TWO "', '" UUID_TWO "'); "
+ "insert into wcroot values (1, null); "
+
+ "insert into pristine values ('$sha1$" SHA1_1 "', NULL, 15, 1, '$md5 $" MD5_1 "'); "
+);
+
+#define NOT_MOVED FALSE, NULL
+#define NO_COPY_FROM 0, NULL, SVN_INVALID_REVNUM
+
+static const svn_test__nodes_data_t nodes_init_data[] = {
+ /* load the base nodes into the nodes table */
+ { 0, "", "normal", 1, "", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "A", "normal", 1, "A", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1,
+ FALSE, NULL, 10, 10 },
+
+ { 0, "B", "excluded", 1, "B", SVN_INVALID_REVNUM, NOT_MOVED,
+ svn_node_symlink},
+
+ { 0, "C", "server-excluded", 1, "C", 0, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "D", "not-present", 1, "D", 0, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "E", "incomplete", 1, "E", 1, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "F", "normal", 1, "F", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2},
+
+ { 0, "G", "normal", 2, "G-alt", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 0, "H", "normal", 1, "H", 1, NOT_MOVED,
+ svn_node_symlink, "()", NULL, NULL, "H-target", 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "I", "normal", 1, "I", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J", "normal", 1, "J", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-e", "normal", 1, "J/J-e", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-e/J-e-a", "normal", 1, "J/J-e/J-e-a", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-e/J-e-b", "normal", 1, "J/J-e/J-e-b", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-e/J-e-b/Jeba", "normal", 1, "J/J-e/J-e-b/Jeba", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-f", "normal", 1, "J/J-f", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J/J-f/J-f-a", "normal", 1, "J/J-f/J-f-a", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "J", "normal", 1, "J", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "K", "normal", 1, "K", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "K/K-a", "normal", 2, "K/K-a", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2,
+ FALSE, NULL, 15, 14},
+
+ { 0, "K/K-b", "normal", 2, "K/K-b", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2,
+ FALSE, NULL, 15, 14},
+
+ /* Load data into the working layers of NODES */
+
+ { 1, "I", "normal", 2, "some/dir", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ /* J was originally a local addition, but its descendants are replaced,
+ so let's turn J in a copy */
+ { 1, "J", "normal", 2, "q", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-a", "normal", 2, "q/J-a", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-b", "normal", 2, "q/J-b", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 3, "J/J-b/J-b-a", "normal", 2, "another/dir", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-b/J-b-b", "normal", 2, "q/J-b/J-b-b", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-c", "normal", 2, "q/J-c", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-c/J-c-a", "normal", 2, "q/J-c/J-c-a", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "J/J-c", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 2, "J/J-c/J-c-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 2, "J/J-d", "normal", 2, "moved/file", 2, TRUE, NULL,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 0, "moved", "normal", 2, "moved", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ { 0, "moved/file", "normal", 2, "moved/file", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "moved/file", "base-deleted", NO_COPY_FROM, FALSE, "J/J-d",
+ svn_node_file},
+
+ { 1, "J/J-e", "normal", 2, "q/J-e", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-e/J-e-a", "normal", 2, "q/J-e/J-e-a", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-e/J-e-b", "normal", 2, "q/J-e/J-e-b", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "J/J-e", "base-deleted", NO_COPY_FROM, FALSE, "other/place",
+ svn_node_dir},
+
+ { 2, "J/J-e/J-e-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 2, "J/J-e/J-e-b", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "J/J-e/J-e-b/Jeba", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 1, "J/J-f", "normal", 2, "q/J-f", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "J/J-f/J-f-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "K", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "K/K-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 1, "K/K-b", "base-deleted", NO_COPY_FROM, FALSE, "moved/away",
+ svn_node_file},
+
+ { 1, "L", "normal", 2, "from", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "L/L-a", "normal", 2, "from/L-a", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 1, "L/L-a/L-a-a", "normal", 2, "from/L-a/L-a-a", 2, NOT_MOVED,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "L/L-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 2, "L/L-a/L-a-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 0, "other", "normal", 2, "other", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "other/place", "normal", 2, "q/J-e", 2, TRUE, NULL,
+ svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "other/place/J-e-a", "normal", 2, "q/J-e/J-e-a", 2, TRUE, NULL,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "other/place/J-e-b", "normal", 2, "q/J-e/J-e-b", 2, TRUE, NULL,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 },
+
+ { 2, "other/place/J-e-b/Jeba", "normal", 2, "q/J-e/J-e-b/Jeba", 2, TRUE, NULL,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 },
+
+ /*** NEW ****/
+ { 2, "moved/away", "normal", 2, "K/K-b", 1, TRUE, NULL,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2,
+ FALSE, NULL, 15, 14},
+ { 0 }
+};
+
+static const svn_test__actual_data_t actual_init_data[] = {
+ { "A", NULL, "changelist", NULL },
+ { "F", NULL, NULL, F_TC_DATA },
+ { "G", NULL, NULL, F_TC_DATA },
+
+ { 0 }
+};
+
+static svn_error_t *
+create_open(svn_wc__db_t **db,
+ const char **local_abspath,
+ const char *subdir,
+ apr_pool_t *pool)
+{
+ SVN_ERR(svn_dirent_get_absolute(local_abspath,
+ svn_dirent_join(
+ svn_test_data_path("db-test", pool),
+ subdir, pool),
+ pool));
+
+ SVN_ERR(svn_io_remove_dir2(*local_abspath, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc__db_open(db, NULL, FALSE, TRUE, pool, pool));
+ SVN_ERR(svn_test__create_fake_wc(*local_abspath, TESTING_DATA,
+ nodes_init_data, actual_init_data, pool));
+
+ svn_test_add_dir_cleanup(*local_abspath);
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Convert VALUE to a const svn_string_t *, and create a mapping from
+ NAME to the converted data type in PROPS. */
+static void
+set_prop(apr_hash_t *props, const char *name, const char *value,
+ apr_pool_t *result_pool)
+{
+ const svn_string_t *propval = svn_string_create(value, result_pool);
+
+ apr_hash_set(props, name, APR_HASH_KEY_STRING, propval);
+}
+
+
+static svn_error_t *
+validate_abspath(const char *wcroot_abspath,
+ const char *expected_relpath,
+ const char *actual_abspath,
+ apr_pool_t *scratch_pool)
+{
+ SVN_TEST_STRING_ASSERT(actual_abspath,
+ svn_dirent_join(wcroot_abspath,
+ expected_relpath,
+ scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_getting_info(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_node_kind_t kind;
+ svn_wc__db_status_t status;
+ svn_revnum_t revision;
+ const char *repos_relpath;
+ const char *repos_root_url;
+ const char *repos_uuid;
+ svn_revnum_t changed_rev;
+ apr_time_t changed_date;
+ const char *changed_author;
+ svn_depth_t depth;
+ const svn_checksum_t *checksum;
+ const char *target;
+ svn_boolean_t had_props;
+ apr_hash_t *props;
+ svn_boolean_t update_root;
+ svn_wc__db_lock_t *lock;
+ svn_wc__db_t *db;
+ svn_error_t *err;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_getting_info", pool));
+
+ /* Test: basic fetching of data. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, &kind, &revision,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &changed_rev, &changed_date, &changed_author,&depth, &checksum,
+ &target, &lock, &had_props, &props,
+ &update_root,
+ db, local_abspath,
+ pool, pool));
+ SVN_TEST_ASSERT(kind == svn_node_dir);
+ SVN_TEST_ASSERT(status == svn_wc__db_status_normal);
+ SVN_TEST_ASSERT(revision == 1);
+ SVN_TEST_STRING_ASSERT(repos_relpath, "");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_ASSERT(changed_rev == 1);
+ SVN_TEST_ASSERT(changed_date == TIME_1a);
+ SVN_TEST_STRING_ASSERT(changed_author, AUTHOR_1);
+ SVN_TEST_ASSERT(depth == svn_depth_infinity);
+ SVN_TEST_ASSERT(checksum == NULL);
+ SVN_TEST_ASSERT(target == NULL);
+ SVN_TEST_ASSERT(lock == NULL);
+ SVN_TEST_ASSERT(!had_props);
+ SVN_TEST_ASSERT(apr_hash_count(props) == 0);
+ /* SVN_TEST_ASSERT(update_root == ???); */
+
+ /* Test: file-specific values. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ NULL, &kind, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ NULL, NULL, NULL, NULL,
+ &checksum, NULL, NULL,
+ NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "A", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(kind == svn_node_file);
+ SVN_TEST_STRING_ASSERT(SHA1_1, svn_checksum_to_cstring(checksum, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "A");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+
+ /* Test: symlink kind, excluded presence, default values for columns. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, &kind, &revision,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &changed_rev, &changed_date, &changed_author,
+ &depth, &checksum, &target, &lock,
+ NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "B", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(kind == svn_node_symlink);
+ SVN_TEST_ASSERT(status == svn_wc__db_status_excluded);
+ SVN_TEST_ASSERT(!SVN_IS_VALID_REVNUM(revision));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "B");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_ASSERT(!SVN_IS_VALID_REVNUM(changed_rev));
+ SVN_TEST_ASSERT(changed_date == 0);
+ SVN_TEST_ASSERT(changed_author == NULL);
+ SVN_TEST_ASSERT(depth == svn_depth_unknown);
+ SVN_TEST_ASSERT(checksum == NULL);
+ SVN_TEST_ASSERT(target == NULL);
+ SVN_TEST_ASSERT(lock == NULL);
+
+ /* Test: unknown kind, server-excluded presence. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, &kind, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "C", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(kind == svn_node_unknown);
+ SVN_TEST_ASSERT(status == svn_wc__db_status_server_excluded);
+
+ /* Test: not-present presence. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "D", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_not_present);
+
+ /* Test: incomplete presence. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "E", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_incomplete);
+
+ /* Test: SHA1 checksum. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, &checksum, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "F", pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(SHA1_1,
+ svn_checksum_to_cstring(checksum, pool));
+
+ /* Test: alternate repository (switched file). */
+ SVN_ERR(svn_wc__db_base_get_info(
+ NULL, NULL, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &changed_rev, &changed_date, &changed_author,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "G", pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "G-alt");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(changed_rev == 2);
+ SVN_TEST_ASSERT(changed_date == TIME_2a);
+ SVN_TEST_STRING_ASSERT(changed_author, AUTHOR_2);
+
+ /* Test: symlink target. */
+ SVN_ERR(svn_wc__db_base_get_info(
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, &checksum, &target,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "H", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(checksum == NULL);
+ SVN_TEST_STRING_ASSERT(target, "H-target");
+
+ /* Test: missing node. */
+ err = svn_wc__db_base_get_info(
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "missing-file", pool),
+ pool, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_PATH_NOT_FOUND);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+validate_node(svn_wc__db_t *db,
+ const char *local_abspath,
+ const char *relpath,
+ svn_node_kind_t expected_kind,
+ svn_wc__db_status_t expected_status,
+ apr_pool_t *scratch_pool)
+{
+ const char *path = svn_dirent_join(local_abspath, relpath, scratch_pool);
+ svn_node_kind_t kind;
+ svn_wc__db_status_t status;
+ apr_hash_t *props;
+ const svn_string_t *value;
+
+ SVN_ERR(svn_wc__db_base_get_info(
+ &status, &kind, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ db, path,
+ scratch_pool, scratch_pool));
+ SVN_TEST_ASSERT(kind == expected_kind);
+ SVN_TEST_ASSERT(status == expected_status);
+
+ switch (status)
+ {
+ case svn_wc__db_status_server_excluded:
+ case svn_wc__db_status_excluded:
+ case svn_wc__db_status_incomplete:
+ case svn_wc__db_status_not_present:
+ /* Our tests aren't setting properties on these node types, so
+ short-circuit examination of name/value pairs, to avoid having
+ to handle the error from svn_wc__db_base_get_props(). */
+ return SVN_NO_ERROR;
+ default:
+ break; /* Fall through */
+ }
+
+ SVN_ERR(svn_wc__db_base_get_props(&props, db, path,
+ scratch_pool, scratch_pool));
+
+ SVN_TEST_ASSERT(props != NULL);
+
+ value = apr_hash_get(props, "p1", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(value->data, "v1");
+
+ value = apr_hash_get(props, "for-file", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(value->data, relpath);
+
+ SVN_ERR(svn_wc__db_read_props(&props, db, path,
+ scratch_pool, scratch_pool));
+ SVN_TEST_ASSERT(props != NULL);
+ value = apr_hash_get(props, "p1", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(value->data, "v1");
+
+ SVN_ERR(svn_wc__db_read_pristine_props(&props, db, path,
+ scratch_pool, scratch_pool));
+ SVN_TEST_ASSERT(props != NULL);
+ value = apr_hash_get(props, "p1", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(value->data, "v1");
+
+ /* Now add a property value and read it back (all on actual) */
+ {
+ apr_hash_t *actual_props = apr_hash_copy(scratch_pool, props);
+ apr_hash_set(actual_props, "p999", APR_HASH_KEY_STRING, value);
+ SVN_ERR(svn_wc__db_op_set_props(db, path, actual_props, FALSE,
+ NULL, NULL, scratch_pool));
+ SVN_ERR(svn_wc__db_read_props(&props, db, path,
+ scratch_pool, scratch_pool));
+ SVN_TEST_ASSERT(props != NULL);
+ value = apr_hash_get(props, "p999", APR_HASH_KEY_STRING);
+ SVN_TEST_STRING_ASSERT(value->data, "v1");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_inserting_nodes(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_checksum_t *checksum;
+ svn_wc__db_t *db;
+ apr_hash_t *props;
+ const apr_array_header_t *children;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_insert_nodes", pool));
+
+ props = apr_hash_make(pool);
+ set_prop(props, "p1", "v1", pool);
+
+ children = svn_cstring_split("N-a N-b N-c", " ", FALSE, pool);
+
+ SVN_ERR(svn_checksum_parse_hex(&checksum, svn_checksum_sha1, SHA1_1, pool));
+
+ /* Create a new directory and several child nodes. */
+ set_prop(props, "for-file", "N", pool);
+ SVN_ERR(svn_wc__db_base_add_directory(
+ db, svn_dirent_join(local_abspath, "N", pool),
+ local_abspath,
+ "N", ROOT_ONE, UUID_ONE, 3,
+ props,
+ 1, TIME_1a, AUTHOR_1,
+ children, svn_depth_infinity,
+ NULL, FALSE, NULL, NULL, NULL, NULL,
+ pool));
+
+ /* Replace an incomplete node with a file node. */
+ set_prop(props, "for-file", "N/N-a", pool);
+ SVN_ERR(svn_wc__db_base_add_file(
+ db, svn_dirent_join(local_abspath, "N/N-a", pool),
+ local_abspath,
+ "N/N-a", ROOT_ONE, UUID_ONE, 3,
+ props,
+ 1, TIME_1a, AUTHOR_1,
+ checksum,
+ NULL, FALSE, FALSE, NULL, NULL, FALSE, FALSE,
+ NULL, NULL,
+ pool));
+
+ /* Create a new symlink node. */
+ set_prop(props, "for-file", "O", pool);
+ SVN_ERR(svn_wc__db_base_add_symlink(
+ db, svn_dirent_join(local_abspath, "O", pool),
+ local_abspath,
+ "O", ROOT_ONE, UUID_ONE, 3,
+ props,
+ 1, TIME_1a, AUTHOR_1,
+ "O-target",
+ NULL, FALSE, FALSE, NULL, NULL, FALSE, FALSE,
+ NULL, NULL,
+ pool));
+
+ /* Replace an incomplete node with an server-excluded file node. */
+ SVN_ERR(svn_wc__db_base_add_excluded_node(
+ db, svn_dirent_join(local_abspath, "N/N-b", pool),
+ "N/N-b", ROOT_ONE, UUID_ONE, 3,
+ svn_node_file, svn_wc__db_status_server_excluded,
+ NULL, NULL,
+ pool));
+
+ /* Create a new excluded directory node. */
+ SVN_ERR(svn_wc__db_base_add_excluded_node(
+ db, svn_dirent_join(local_abspath, "P", pool),
+ "P", ROOT_ONE, UUID_ONE, 3,
+ svn_node_dir, svn_wc__db_status_excluded,
+ NULL, NULL,
+ pool));
+
+ /* Create a new not-present symlink node. */
+ SVN_ERR(svn_wc__db_base_add_not_present_node(
+ db, svn_dirent_join(local_abspath, "Q", pool),
+ "Q", ROOT_ONE, UUID_ONE, 3,
+ svn_node_symlink,
+ NULL, NULL,
+ pool));
+
+ /* Create a new server-excluded unknown-kind node. */
+ SVN_ERR(svn_wc__db_base_add_excluded_node(
+ db, svn_dirent_join(local_abspath, "R", pool),
+ "R", ROOT_ONE, UUID_ONE, 3,
+ svn_node_unknown, svn_wc__db_status_server_excluded,
+ NULL, NULL,
+ pool));
+
+
+ /* Are all the nodes where we expect them to be? */
+ SVN_ERR(validate_node(db, local_abspath, "N",
+ svn_node_dir, svn_wc__db_status_normal,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "N/N-a",
+ svn_node_file, svn_wc__db_status_normal,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "N/N-b",
+ svn_node_file,
+ svn_wc__db_status_server_excluded,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "N/N-c",
+ svn_node_unknown, svn_wc__db_status_incomplete,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "O",
+ svn_node_symlink, svn_wc__db_status_normal,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "P",
+ svn_node_dir, svn_wc__db_status_excluded,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "Q",
+ svn_node_symlink, svn_wc__db_status_not_present,
+ pool));
+ SVN_ERR(validate_node(db, local_abspath, "R",
+ svn_node_unknown,
+ svn_wc__db_status_server_excluded,
+ pool));
+
+ /* ### do we need to test any attributes of the node? */
+
+ /* ### yes: test the repos inheritance stuff (at least) */
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_children(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc__db_t *db;
+ const apr_array_header_t *children;
+ int i;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_children", pool));
+
+ SVN_ERR(svn_wc__db_base_get_children(&children,
+ db, local_abspath,
+ pool, pool));
+ SVN_TEST_ASSERT(children->nelts == 13);
+ for (i = children->nelts; i--; )
+ {
+ const char *name = APR_ARRAY_IDX(children, i, const char *);
+
+ if (strcmp(name, "moved") == 0
+ || strcmp(name, "other") == 0)
+ {
+ continue;
+ }
+
+ SVN_TEST_ASSERT(strlen(name) == 1);
+ /* ### check the actual values */
+ }
+
+ SVN_ERR(svn_wc__db_read_children(&children,
+ db, local_abspath,
+ pool, pool));
+ SVN_TEST_ASSERT(children->nelts == 14);
+ for (i = children->nelts; i--; )
+ {
+ const char *name = APR_ARRAY_IDX(children, i, const char *);
+
+ if (strcmp(name, "moved") == 0
+ || strcmp(name, "other") == 0)
+ {
+ continue;
+ }
+
+ SVN_TEST_ASSERT(strlen(name) == 1);
+ /* ### check the actual values */
+ }
+
+ /* ### insert some more children. replace some nodes. check values. */
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_working_info(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_node_kind_t kind;
+ svn_wc__db_status_t status;
+ svn_revnum_t revision;
+ const char *repos_relpath;
+ const char *repos_root_url;
+ const char *repos_uuid;
+ svn_revnum_t changed_rev;
+ apr_time_t changed_date;
+ const char *changed_author;
+ apr_time_t recorded_time;
+ svn_depth_t depth;
+ const svn_checksum_t *checksum;
+ svn_filesize_t recorded_size;
+ const char *target;
+ const char *changelist;
+ const char *original_repos_relpath;
+ const char *original_root_url;
+ const char *original_uuid;
+ svn_revnum_t original_revnum;
+ svn_boolean_t op_root;
+ svn_boolean_t had_props;
+ svn_boolean_t props_mod;
+ svn_boolean_t have_base;
+ svn_boolean_t have_more_work;
+ svn_boolean_t have_work;
+ svn_boolean_t conflicted;
+ svn_wc__db_lock_t *lock;
+ svn_wc__db_t *db;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_working_info", pool));
+
+ /* Test: basic fetching of data. */
+ SVN_ERR(svn_wc__db_read_info(
+ &status, &kind, &revision,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &changed_rev, &changed_date, &changed_author,
+ &depth, &checksum, &target, &original_repos_relpath,
+ &original_root_url, &original_uuid, &original_revnum,
+ &lock, &recorded_size, &recorded_time, &changelist,
+ &conflicted, &op_root, &had_props, &props_mod,
+ &have_base, &have_more_work, &have_work,
+ db, svn_dirent_join(local_abspath, "I", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_added);
+ SVN_TEST_ASSERT(kind == svn_node_dir);
+ SVN_TEST_ASSERT(revision == SVN_INVALID_REVNUM);
+ SVN_TEST_ASSERT(repos_relpath == NULL);
+ SVN_TEST_ASSERT(repos_root_url == NULL);
+ SVN_TEST_ASSERT(repos_uuid == NULL);
+ SVN_TEST_ASSERT(changed_rev == 2);
+ SVN_TEST_ASSERT(changed_date == TIME_2a);
+ SVN_TEST_STRING_ASSERT(changed_author, AUTHOR_2);
+ SVN_TEST_ASSERT(depth == svn_depth_immediates);
+ SVN_TEST_ASSERT(checksum == NULL);
+ SVN_TEST_ASSERT(recorded_size == SVN_INVALID_FILESIZE);
+ SVN_TEST_ASSERT(target == NULL);
+ SVN_TEST_STRING_ASSERT(changelist, NULL);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "some/dir");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revnum == 2);
+ SVN_TEST_ASSERT(!had_props);
+ SVN_TEST_ASSERT(!props_mod);
+ SVN_TEST_ASSERT(have_base);
+ /* SVN_TEST_ASSERT(have_more_work...); */
+ SVN_TEST_ASSERT(have_work);
+ SVN_TEST_ASSERT(!conflicted);
+ SVN_TEST_ASSERT(lock == NULL);
+ /* SVN_TEST_ASSERT(last_mod_time...); */
+ /* SVN_TEST_ASSERT(op_root...); */
+
+
+ /* ### we need a hojillion more tests in here. I just want to get this
+ ### round checked in, so I'm skipping more tests at this point. */
+ SVN_ERR(svn_wc__db_read_info(
+ &status, &kind, &revision,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &changed_rev, &changed_date, &changed_author,
+ &depth, &checksum, &target, &original_repos_relpath,
+ &original_root_url, &original_uuid, &original_revnum,
+ &lock, &recorded_size, &recorded_time, &changelist,
+ &conflicted, &op_root, &had_props, &props_mod,
+ &have_base, &have_more_work, &have_work,
+ db, svn_dirent_join(local_abspath, "A", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_normal);
+ SVN_TEST_ASSERT(kind == svn_node_file);
+ SVN_TEST_STRING_ASSERT(changelist, "changelist");
+ SVN_TEST_ASSERT(revision == 1);
+ SVN_TEST_STRING_ASSERT(repos_relpath, "A");
+ SVN_TEST_STRING_ASSERT(repos_root_url, "http://example.com/one");
+ SVN_TEST_STRING_ASSERT(repos_uuid, "uuid1");
+ SVN_TEST_ASSERT(changed_rev == 1);
+ SVN_TEST_ASSERT(changed_date == TIME_1a);
+ SVN_TEST_STRING_ASSERT(changed_author, AUTHOR_1);
+ SVN_TEST_ASSERT(depth == svn_depth_unknown);
+ SVN_TEST_ASSERT(checksum != NULL);
+ SVN_TEST_ASSERT(recorded_size == 10);
+ SVN_TEST_ASSERT(target == NULL);
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_pdh(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc__db_t *db;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_pdh", pool));
+
+ /* NOTE: this test doesn't do anything apparent -- it simply exercises
+ some internal functionality of wc_db. This is a handy driver for
+ debugging wc_db to ensure it manages per-directory handles properly. */
+
+ SVN_ERR(svn_wc__db_base_add_excluded_node(
+ db, svn_dirent_join(local_abspath, "sub", pool),
+ "sub", ROOT_ONE, UUID_ONE, 1,
+ svn_node_file, svn_wc__db_status_server_excluded,
+ NULL, NULL,
+ pool));
+
+ SVN_ERR(svn_wc__db_base_add_directory(
+ db, svn_dirent_join(local_abspath, "sub2", pool),
+ local_abspath, "sub2", ROOT_ONE, UUID_ONE, 1,
+ apr_hash_make(pool), 1, 1, "me", NULL,
+ svn_depth_infinity, NULL, FALSE, NULL, NULL,
+ NULL, NULL, pool));
+
+ SVN_ERR(svn_wc__db_base_add_excluded_node(
+ db, svn_dirent_join(local_abspath, "sub2/A", pool),
+ "sub2/A", ROOT_ONE, UUID_ONE, 1,
+ svn_node_file, svn_wc__db_status_server_excluded,
+ NULL, NULL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_scan_addition(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc__db_t *db;
+ svn_wc__db_status_t status;
+ const char *op_root_abspath;
+ const char *repos_relpath;
+ const char *repos_root_url;
+ const char *repos_uuid;
+ const char *original_repos_relpath;
+ const char *original_root_url;
+ const char *original_uuid;
+ svn_revnum_t original_revision;
+ const char *moved_from_abspath;
+ const char *move_op_root_abspath;
+ const char *move_op_root_src;
+ const char *delete_op_root_abspath;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_scan_addition", pool));
+
+ /* Simple addition of a directory. */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_copied);
+ SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "q");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ /* Simple copy (affects how scan-up is started). */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J/J-a", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_copied);
+ SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-a");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "q");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ /* Node was moved here. */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J/J-d", pool),
+ pool, pool));
+ SVN_ERR(svn_wc__db_scan_moved(
+ &moved_from_abspath,
+ &move_op_root_abspath,
+ &move_op_root_src,
+ &delete_op_root_abspath,
+ db, svn_dirent_join(local_abspath, "J/J-d", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_moved_here);
+ SVN_ERR(validate_abspath(local_abspath, "J/J-d",
+ op_root_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "moved/file",
+ moved_from_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "J/J-d",
+ move_op_root_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "moved/file",
+ move_op_root_src, pool));
+ SVN_ERR(validate_abspath(local_abspath, "moved/file",
+ delete_op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-d");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "moved/file");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ /* Check root of a copy. */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J/J-b", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_copied);
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "q");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ /* Ignore parent copy. Use copy closest to target. */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J/J-b/J-b-a", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_copied);
+ SVN_ERR(validate_abspath(local_abspath, "J/J-b/J-b-a",
+ op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b/J-b-a");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "another/dir");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ /* Inherit parent copy. */
+ SVN_ERR(svn_wc__db_scan_addition(
+ &status, &op_root_abspath,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ &original_repos_relpath, &original_root_url, &original_uuid,
+ &original_revision,
+ db, svn_dirent_join(local_abspath, "J/J-b/J-b-b", pool),
+ pool, pool));
+ SVN_TEST_ASSERT(status == svn_wc__db_status_copied);
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b/J-b-b");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+ SVN_TEST_STRING_ASSERT(original_repos_relpath, "q");
+ SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO);
+ SVN_TEST_ASSERT(original_revision == 2);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_scan_deletion(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc__db_t *db;
+ const char *base_del_abspath;
+ const char *work_del_abspath;
+ const char *moved_to_abspath;
+ const char *copy_op_root_abspath;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_scan_deletion", pool));
+
+ /* Node was moved elsewhere. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ &copy_op_root_abspath,
+ db, svn_dirent_join(local_abspath, "J/J-e", pool),
+ pool, pool));
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "other/place",
+ moved_to_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "J/J-e",
+ work_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "other/place",
+ copy_op_root_abspath, pool));
+
+ /* Node was moved elsewhere (child of operation root). */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ &copy_op_root_abspath,
+ db, svn_dirent_join(local_abspath, "J/J-e/J-e-a", pool),
+ pool, pool));
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "other/place/J-e-a",
+ moved_to_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "J/J-e",
+ work_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "other/place",
+ copy_op_root_abspath, pool));
+
+ /* Root of delete. Parent is a WORKING node. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "J/J-c", pool),
+ pool, pool));
+ /* Implicit delete of "J" (via replacement). */
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_TEST_ASSERT(moved_to_abspath == NULL);
+ SVN_ERR(validate_abspath(local_abspath, "J/J-c",
+ work_del_abspath, pool));
+
+ /* Child of a deleted root. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "J/J-c/J-c-a", pool),
+ pool, pool));
+ /* Implicit delete of "J" (via replacement). */
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_TEST_ASSERT(moved_to_abspath == NULL);
+ SVN_ERR(validate_abspath(local_abspath, "J/J-c",
+ work_del_abspath, pool));
+
+ /* Base-deleted tree extending past deleted WORKING subtree. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "J/J-e/J-e-b/Jeba", pool),
+ pool, pool));
+ /* ### I don't understand this. "J/J-e/J-e-b/Jeba" is a deleted
+ base node that is not overlayed by the replacement rooted at "J".
+ Why does base_del_abspath refer to "J-e"? */
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "other/place/J-e-b/Jeba",
+ moved_to_abspath, pool));
+ SVN_TEST_STRING_ASSERT(work_del_abspath, NULL);
+
+ /* Base-deleted tree extending past added WORKING tree. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "J/J-f/J-f-a", pool),
+ pool, pool));
+ /* Implicit delete of "J" (via replacement). */
+ SVN_ERR(validate_abspath(local_abspath, "J",
+ base_del_abspath, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(work_del_abspath, NULL);
+
+ /* Root of delete. Parent is a BASE node. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "K", pool),
+ pool, pool));
+ SVN_ERR(validate_abspath(local_abspath, "K",
+ base_del_abspath, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(work_del_abspath, NULL);
+
+ /* Base-deleted tree. Start below root. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "K/K-a", pool),
+ pool, pool));
+ SVN_ERR(validate_abspath(local_abspath, "K",
+ base_del_abspath, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(work_del_abspath, NULL);
+
+ /* Base-deleted tree via move. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ &copy_op_root_abspath,
+ db, svn_dirent_join(local_abspath, "K/K-b", pool),
+ pool, pool));
+ SVN_ERR(validate_abspath(local_abspath, "K",
+ base_del_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "moved/away",
+ moved_to_abspath, pool));
+ SVN_ERR(validate_abspath(local_abspath, "moved/away",
+ copy_op_root_abspath, pool));
+ SVN_TEST_STRING_ASSERT(work_del_abspath, NULL);
+
+ /* Subtree deletion of added tree. Start at child. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "L/L-a/L-a-a", pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(base_del_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL);
+ SVN_ERR(validate_abspath(local_abspath, "L/L-a",
+ work_del_abspath, pool));
+
+ /* Subtree deletion of added tree. Start at root. */
+ SVN_ERR(svn_wc__db_scan_deletion(
+ &base_del_abspath,
+ &moved_to_abspath,
+ &work_del_abspath,
+ NULL,
+ db, svn_dirent_join(local_abspath, "L/L-a", pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(base_del_abspath, NULL);
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL);
+ SVN_ERR(validate_abspath(local_abspath, "L/L-a",
+ work_del_abspath, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_global_relocate(apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc__db_t *db;
+ const char *repos_relpath;
+ const char *repos_root_url;
+ const char *repos_uuid;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_global_relocate", pool));
+
+ /* Initial sanity check. */
+ SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ db, local_abspath,
+ pool, pool));
+
+ SVN_TEST_STRING_ASSERT(repos_relpath, "");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+
+ /* Test relocating to a repos not existent in the db */
+ SVN_ERR(svn_wc__db_global_relocate(db, local_abspath, ROOT_THREE, pool));
+ SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ db, local_abspath,
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_THREE);
+ /* The UUID should still be the same. */
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+
+ /* While we're at it, let's see if the children have been relocated, too. */
+ SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "F",
+ pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "F");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_THREE);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE);
+
+ /* Alternate repository is not relocated. */
+ SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL,
+ &repos_relpath, &repos_root_url, &repos_uuid,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL,
+ db, svn_dirent_join(local_abspath, "G",
+ pool),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(repos_relpath, "G-alt");
+ SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_TWO);
+ SVN_TEST_STRING_ASSERT(repos_uuid, UUID_TWO);
+
+ return SVN_NO_ERROR;
+}
+
+
+static int
+detect_work_item(const svn_skel_t *work_item)
+{
+ /* Test work items are a list with one integer atom as operation */
+ if (!work_item->children)
+ return -1;
+ work_item = work_item->children;
+
+ if (!work_item->is_atom || work_item->len != 1)
+ return -1;
+ return work_item->data[0] - '0';
+}
+
+
+static svn_error_t *
+test_work_queue(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *local_abspath;
+ svn_skel_t *work_item;
+ int run_count[3] = { 4, 7, 2 }; /* run the work 13 times, total. */
+ int fetches = 0;
+ apr_int64_t last_id = 0;
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_work_queue", pool));
+
+ /* Create three work items. */
+ work_item = svn_skel__make_empty_list(pool);
+ svn_skel__prepend_int(0, work_item, pool);
+ SVN_ERR(svn_wc__db_wq_add(db, local_abspath, work_item, pool));
+
+ work_item = svn_skel__make_empty_list(pool);
+ svn_skel__prepend_int(1, work_item, pool);
+ SVN_ERR(svn_wc__db_wq_add(db, local_abspath, work_item, pool));
+
+ work_item = svn_skel__make_empty_list(pool);
+ svn_skel__prepend_int(2, work_item, pool);
+ SVN_ERR(svn_wc__db_wq_add(db, local_abspath, work_item, pool));
+
+ while (TRUE)
+ {
+ apr_uint64_t id;
+ int which;
+
+ /* Fetch the next work item, or break when the work queue is empty. */
+ SVN_ERR(svn_wc__db_wq_fetch_next(&id, &work_item, db, local_abspath,
+ last_id, pool, pool));
+ if (work_item == NULL)
+ break;
+
+ /* Got one. We should never fetch work more than 13 times. */
+ ++fetches;
+ SVN_TEST_ASSERT(fetches <= 13);
+
+ /* Parse the work item to see which of the three we found. */
+ which = detect_work_item(work_item);
+ SVN_TEST_ASSERT(which >= 0 && which <= 2);
+
+ /* We should not see an item after we've run it enough times.
+
+ Note: strictly speaking, "in the wild" a work item could remain
+ after a call to wq_completed (ie. crash while that function was
+ processing), but we don't really have a way to test that here. */
+ SVN_TEST_ASSERT(run_count[which] > 0);
+
+ /* If we have run this particular item enough times, then go ahead
+ and remove it from the work queue. */
+ if (--run_count[which] == 0)
+ last_id = id;
+ else
+ last_id = 0;
+ }
+
+ /* Should have run precisely 13 work items. */
+ SVN_TEST_ASSERT(fetches == 13);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_externals_store(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *local_abspath;
+ svn_checksum_t *orig_checksum;
+ const char *file_external_path;
+ const char *dir_external_path;
+ const char *subdir;
+ apr_hash_t *props = apr_hash_make(pool);
+ svn_string_t *value = svn_string_create("value-data", pool);
+
+ apr_hash_set(props, "key", APR_HASH_KEY_STRING, value);
+
+ SVN_ERR(create_open(&db, &local_abspath, "test_externals_store", pool));
+
+ /* Directory I exists in the standard test db */
+ subdir = svn_dirent_join(local_abspath, "I", pool);
+
+ SVN_ERR(svn_checksum_parse_hex(&orig_checksum, svn_checksum_sha1, SHA1_1,
+ pool));
+
+ file_external_path = svn_dirent_join(subdir, "file-external", pool);
+ dir_external_path = svn_dirent_join(subdir, "dir-external", pool);
+
+ SVN_ERR(svn_wc__db_external_add_file(db,
+ file_external_path,
+ local_abspath /* wri_abspath */,
+ "some/location",
+ "svn://some-repos/svn",
+ "not-a-uuid",
+ 12,
+ props,
+ NULL,
+ 10,
+ 987654,
+ "somebody",
+ orig_checksum,
+ NULL,
+ subdir,
+ "some/new-location",
+ 90,
+ 12,
+ FALSE, NULL,
+ FALSE,
+ NULL,
+ NULL,
+ pool));
+
+ SVN_ERR(svn_wc__db_external_add_dir(db,
+ dir_external_path,
+ local_abspath /* wri_abspath */,
+ "svn://other-repos/nsv",
+ "no-uuid-either",
+ subdir,
+ "some/other-location",
+ 70,
+ 32,
+ NULL,
+ pool));
+
+ {
+ svn_wc__db_status_t status;
+ svn_node_kind_t kind;
+ const char *repos_root_url;
+ const char *repos_uuid;
+ const char *defining_abspath;
+ const char *recorded_repos_relpath;
+ svn_revnum_t recorded_peg_revision;
+ svn_revnum_t recorded_revision;
+
+ SVN_ERR(svn_wc__db_external_read(&status, &kind, &defining_abspath,
+ &repos_root_url, &repos_uuid,
+ &recorded_repos_relpath,
+ &recorded_peg_revision,
+ &recorded_revision,
+ db, file_external_path, local_abspath,
+ pool, pool));
+
+ SVN_TEST_ASSERT(status == svn_wc__db_status_normal);
+ SVN_TEST_ASSERT(kind == svn_node_file);
+ SVN_TEST_STRING_ASSERT(repos_root_url, "svn://some-repos/svn");
+ SVN_TEST_STRING_ASSERT(repos_uuid, "not-a-uuid");
+ SVN_TEST_STRING_ASSERT(defining_abspath, subdir);
+ SVN_TEST_STRING_ASSERT(recorded_repos_relpath, "some/new-location");
+ SVN_TEST_ASSERT(recorded_peg_revision == 90);
+ SVN_TEST_ASSERT(recorded_revision == 12);
+
+ {
+ apr_hash_t *new_props;
+ svn_string_t *v;
+
+ SVN_ERR(svn_wc__db_base_get_props(&new_props, db,
+ file_external_path,
+ pool, pool));
+
+ SVN_TEST_ASSERT(new_props != NULL);
+ v = apr_hash_get(new_props, "key", APR_HASH_KEY_STRING);
+ SVN_TEST_ASSERT(v != NULL);
+ SVN_TEST_STRING_ASSERT(v->data, "value-data");
+ }
+
+ SVN_ERR(svn_wc__db_external_read(&status, &kind, &defining_abspath,
+ &repos_root_url, &repos_uuid,
+ &recorded_repos_relpath,
+ &recorded_peg_revision,
+ &recorded_revision,
+ db, dir_external_path, local_abspath,
+ pool, pool));
+
+ SVN_TEST_ASSERT(status == svn_wc__db_status_normal);
+ SVN_TEST_ASSERT(kind == svn_node_dir);
+ SVN_TEST_STRING_ASSERT(repos_root_url, "svn://other-repos/nsv");
+ SVN_TEST_STRING_ASSERT(repos_uuid, "no-uuid-either");
+ SVN_TEST_STRING_ASSERT(defining_abspath, subdir);
+ SVN_TEST_STRING_ASSERT(recorded_repos_relpath, "some/other-location");
+ SVN_TEST_ASSERT(recorded_peg_revision == 70);
+ SVN_TEST_ASSERT(recorded_revision == 32);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static int max_threads = 2;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_getting_info,
+ "get information from wc.db"),
+ SVN_TEST_PASS2(test_inserting_nodes,
+ "insert different nodes into wc.db"),
+ SVN_TEST_PASS2(test_children,
+ "getting the list of BASE or WORKING children"),
+ SVN_TEST_PASS2(test_working_info,
+ "reading information about the WORKING tree"),
+ SVN_TEST_PASS2(test_pdh,
+ "creation of per-directory handles"),
+ SVN_TEST_PASS2(test_scan_addition,
+ "scanning added working nodes"),
+ SVN_TEST_PASS2(test_scan_deletion,
+ "deletion introspection functions"),
+ SVN_TEST_PASS2(test_global_relocate,
+ "relocating a node"),
+ SVN_TEST_PASS2(test_work_queue,
+ "work queue processing"),
+ SVN_TEST_PASS2(test_externals_store,
+ "externals store"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/entries-compat.c b/subversion/tests/libsvn_wc/entries-compat.c
new file mode 100644
index 0000000..3470c06
--- /dev/null
+++ b/subversion/tests/libsvn_wc/entries-compat.c
@@ -0,0 +1,647 @@
+/*
+ * entries-compat.c : test backwards compatibility issues for entries
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#include "svn_types.h"
+
+/* Make sure SVN_DEPRECATED is defined as empty before including svn_io.h.
+ We don't want to trigger deprecation warnings. */
+#ifdef SVN_DEPRECATED
+#undef SVN_DEPRECATED
+#endif
+#define SVN_DEPRECATED
+#include "svn_io.h"
+
+#include "svn_dirent_uri.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+
+#include "../svn_test.h"
+#include "utils.h"
+
+
+/* NOTE: these must be canonical! */
+#define ROOT_ONE "http://example.com/one"
+#define ROOT_TWO "http://example.com/two"
+#define ROOT_THREE "http://example.com/three"
+
+#define UUID_ONE "uuid1"
+#define UUID_TWO "uuid2"
+#define UUID_THREE "uuid3"
+
+#define TIME_1 1235142208
+#define TIME_2 1235142268
+#define TIME_3 1235142328
+
+#define TIME_1s APR_STRINGIFY(TIME_1) "000000"
+#define TIME_2s APR_STRINGIFY(TIME_2) "000000"
+#define TIME_3s APR_STRINGIFY(TIME_3) "000000"
+
+#define TIME_1a apr_time_from_sec(TIME_1)
+#define TIME_2a apr_time_from_sec(TIME_2)
+#define TIME_3a apr_time_from_sec(TIME_3)
+
+#define AUTHOR_1 "johndoe"
+#define AUTHOR_2 "janedoe"
+
+/* Some arbitrary checksum values */
+#define MD5_1 "2d18c5e57e84c5b8a5e9a6e13fa394dc"
+#define SHA1_1 "aaf4c61ddcc5e8a2dabede0f3b482cd9aea9434d"
+
+#define F_TC_DATA "(conflict F file update edited deleted (version 22 " ROOT_ONE " 1 2 branch1/ft/F none) (version 22 " ROOT_ONE " 1 3 branch1/ft/F file))"
+#define G_TC_DATA "(conflict G file update edited deleted (version 22 " ROOT_ONE " 1 2 branch1/ft/F none) (version 22 " ROOT_ONE " 1 3 branch1/ft/F file))"
+
+static const char * const TESTING_DATA = (
+ /* Load our test data.
+
+ Note: do not use named-column insertions. This allows us to test
+ the column count in the schema matches our expectation here. */
+
+ "insert into repository values (1, '" ROOT_ONE "', '" UUID_ONE "'); "
+ "insert into repository values (2, '" ROOT_TWO "', '" UUID_TWO "'); "
+ "insert into wcroot values (1, null); "
+
+ "insert into pristine values ('$sha1$" SHA1_1 "', NULL, 15, 1, '$md5 $" MD5_1 "'); "
+ );
+
+#define NOT_MOVED FALSE, NULL
+#define NO_COPY_FROM 0, NULL, SVN_INVALID_REVNUM
+
+static const svn_test__nodes_data_t nodes[] =
+{
+ /* load the base nodes into the nodes table */
+ { 0, "", "normal", 1, "", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "A", "normal", 1, "A", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "B", "excluded", 1, "B", 1, NOT_MOVED,
+ svn_node_symlink},
+
+ { 0, "C", "server-excluded",1, "C", 0, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "D", "not-present", 1, "D", 0, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "E", "incomplete", 1, "E", SVN_INVALID_REVNUM, NOT_MOVED,
+ svn_node_unknown},
+
+ { 0, "F", "normal", 1, "G-alt", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "G", "normal", 1, "G-alt", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "H", "normal", 1, "I", 1, NOT_MOVED,
+ svn_node_symlink, NULL, NULL, NULL, "H-target", 1, TIME_1a, AUTHOR_1},
+
+ { 0, "I", "normal", 1, "I", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J", "normal", 1, "J", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-c", "normal", 1, "J/J-c", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-c/J-c-a", "not-present", 1, "J/J-c/J-c-a", 1, NOT_MOVED,
+ svn_node_dir},
+
+ { 0, "J/J-e", "normal", 1, "J/J-e", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-e/J-e-a", "normal", 1, "J/J-e/J-e-a", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-e/J-e-b", "normal", 1, "J/J-e/J-e-b", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-e/J-e-b/Jeba", "normal", 1, "J/J-e/J-e-b/Jeba", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-f", "normal", 1, "J/J-f", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "J/J-f/J-f-a", "normal", 1, "J/J-f/J-f-a", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "K", "normal", 1, "K", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "K/K-a", "normal", 1, "K/K-a", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "K/K-b", "normal", 1, "K/K-b", 1, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "L", "normal", 1, "switched", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "L/L-a", "normal", 1, "switched/L-a", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 0, "L/L-a/L-a-a", "normal", 1, "switched/L-a/L-a-a", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ /* Load data into NODES table;
+ ### op_depths have not been calculated by me yet;
+ the value 1 is just 'good enough' to make the nodes WORKING nodes. */
+
+ { 1, "I", "normal", 2, "some/file", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2},
+
+ { 1, "J", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir, NULL, "immediates"},
+
+ { 2, "J/J-a", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 2, "J/J-b", "normal", 2, "some/dir", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2},
+
+ { 3, "J/J-b/J-b-a", "normal", 2, "another/dir", 2, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2},
+
+ { 3, "J/J-b/J-b-b", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ /* This triggers a validation warning: bad delete */
+ { 1, "J/J-c", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "J/J-d", "normal", 2, "moved/file", 2, NOT_MOVED,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2},
+
+ { 1, "J/J-e", "base-deleted", NO_COPY_FROM, FALSE, "other/place",
+ svn_node_dir},
+
+ { 1, "J/J-e/J-e-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 1, "J/J-e/J-e-b", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "J/J-e/J-e-b/Jeba", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 1, "J/J-f", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 2, "J/J-f", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir, NULL, "immediates"},
+
+ { 1, "J/J-f/J-f-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "K", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "K/K-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_file},
+
+ { 1, "K/K-b", "base-deleted", NO_COPY_FROM, FALSE, "moved/away",
+ svn_node_file},
+
+ { 1, "L", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir, NULL, "immediates"},
+
+ { 1, "L/L-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "L/L-a/L-a-a", "base-deleted", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir},
+
+ { 1, "M", "normal", 1, "M", 1, NOT_MOVED,
+ svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1},
+
+ { 1, "M/M-a", "not-present", 1, "M/M-a", 1, NOT_MOVED,
+ svn_node_file},
+
+ /**** Move target of K/K-b ****/
+ { 1, "moved", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir, NULL, "infinity" },
+ { 2, "moved/away", "normal", 1, "??", 1, TRUE, NULL,
+ svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1},
+
+ /**** Move target of J/J-e ****/
+ { 1, "other", "normal", NO_COPY_FROM, NOT_MOVED,
+ svn_node_dir, NULL, "empty"},
+
+ { 2, "other/place", "normal", 1, "??", 1, TRUE, NULL,
+ svn_node_dir, "()", "infinity"},
+
+ { 0 },
+};
+
+static const svn_test__actual_data_t actuals[] =
+{
+ { "I", NULL, "changelist", NULL },
+ { "F", NULL, NULL, NULL /* TC-DATA */ },
+ { "G", NULL, NULL, NULL /* TC-DATA */ },
+ { 0 },
+};
+
+
+static const char * const M_TESTING_DATA = (
+ /* Load our test data.
+
+ Note: do not use named-column insertions. This allows us to test
+ the column count in the schema matches our expectation here. */
+
+ "insert into repository values (1, '" ROOT_ONE "', '" UUID_ONE "'); "
+ "insert into repository values (2, '" ROOT_TWO "', '" UUID_TWO "'); "
+ "insert into wcroot values (1, null); "
+
+ "insert into nodes values ("
+ " 1, '', 0, null, 1, 'M', 1, 'normal',"
+ " null, null, 'dir', '()', 'infinity', null, null, 1, " TIME_1s ", '" AUTHOR_1 "',"
+ " null, null, null, null, null);"
+ );
+
+
+static svn_error_t *
+create_fake_wc(const char *subdir, apr_pool_t *pool)
+{
+ const char *root;
+ const char *wc_abspath;
+
+ root = svn_dirent_join("fake-wc", subdir, pool);
+
+ SVN_ERR(svn_io_remove_dir2(root, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_dirent_get_absolute(&wc_abspath, root, pool));
+ SVN_ERR(svn_test__create_fake_wc(wc_abspath, TESTING_DATA, nodes, actuals,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+create_open(svn_wc__db_t **db,
+ const char **local_abspath,
+ const char *subdir,
+ apr_pool_t *pool)
+{
+ SVN_ERR(create_fake_wc(subdir, pool));
+
+ SVN_ERR(svn_dirent_get_absolute(local_abspath,
+ svn_dirent_join("fake-wc", subdir, pool),
+ pool));
+ SVN_ERR(svn_wc__db_open(db,
+ NULL /* config */,
+ FALSE /* not_upgraded_ok */,
+ TRUE /* enforce_empty_wq */,
+ pool, pool));
+
+ svn_test_add_dir_cleanup(*local_abspath);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_entries_alloc(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *local_abspath;
+ svn_wc_adm_access_t *adm_access;
+ apr_hash_t *entries;
+ const svn_wc_entry_t *entry;
+ const char *local_relpath;
+
+#undef WC_NAME
+#define WC_NAME "test_entries_alloc"
+
+ SVN_ERR(create_open(&db, &local_abspath, WC_NAME, pool));
+
+ SVN_ERR(svn_wc_adm_open3(&adm_access,
+ NULL /* associated */,
+ svn_dirent_join("fake-wc", WC_NAME, pool),
+ FALSE /* write_lock */,
+ 0 /* levels_to_lock */,
+ NULL /* cancel_func */,
+ NULL /* cancel_baton */,
+ pool));
+ SVN_ERR(svn_wc_entries_read(&entries, adm_access, TRUE /* show_hidden */,
+ pool));
+
+ /* The wcroot has 12 BASE children + 3 WORKING child + "this dir". */
+ SVN_TEST_ASSERT(apr_hash_count(entries) == 16);
+
+ /* The "D" entry in the entries hash should be what we get from the
+ svn_wc_entry() entrypoint. */
+ local_relpath = svn_dirent_join_many(pool,
+ "fake-wc",
+ WC_NAME,
+ "D",
+ SVN_VA_NULL);
+ SVN_ERR(svn_wc_entry(&entry, local_relpath, adm_access, TRUE, pool));
+ SVN_TEST_ASSERT(entry == apr_hash_get(entries, "D", APR_HASH_KEY_STRING));
+
+ /* This entry should be missing. */
+ SVN_ERR(svn_wc_entry(&entry, "missing", adm_access, TRUE, pool));
+ SVN_TEST_ASSERT(entry == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_stubs(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *local_abspath;
+ const char *local_relpath;
+ svn_wc_adm_access_t *adm_access;
+ svn_wc_adm_access_t *subdir_access;
+ const svn_wc_entry_t *stub_entry;
+ const svn_wc_entry_t *entry;
+ const svn_wc_entry_t *test_entry;
+ const char *M_dir;
+ apr_hash_t *entries;
+
+#undef WC_NAME
+#define WC_NAME "test_stubs"
+
+ SVN_ERR(create_open(&db, &local_abspath, WC_NAME, pool));
+
+ M_dir = svn_dirent_join(local_abspath, "M", pool);
+ SVN_ERR(svn_test__create_fake_wc(M_dir, M_TESTING_DATA, NULL, NULL, pool));
+
+ /* The "M" entry is a subdir. Let's ensure we can reach its stub,
+ and the actual contents. */
+ local_relpath = svn_dirent_join_many(pool,
+ "fake-wc",
+ WC_NAME,
+ "M",
+ SVN_VA_NULL);
+
+ SVN_ERR(svn_wc_adm_open3(&adm_access,
+ NULL /* associated */,
+ svn_dirent_join("fake-wc", WC_NAME, pool),
+ FALSE /* write_lock */,
+ 0 /* levels_to_lock */,
+ NULL /* cancel_func */,
+ NULL /* cancel_baton */,
+ pool));
+
+ /* Ensure we get the stub. NOTE: do this before we have associated the
+ subdir baton with ADM_ACCESS. */
+ SVN_ERR(svn_wc_entry(&stub_entry, local_relpath, adm_access, TRUE, pool));
+ SVN_TEST_STRING_ASSERT(stub_entry->name, "M");
+ /* Schedule add in parent-wc. Schedule normal in obstructing working copy */
+ SVN_TEST_ASSERT(stub_entry->schedule == svn_wc_schedule_add);
+
+ SVN_ERR(svn_wc_adm_open3(&subdir_access,
+ adm_access,
+ local_relpath,
+ FALSE /* write_lock */,
+ 0 /* levels_to_lock */,
+ NULL /* cancel_func */,
+ NULL /* cancel_baton */,
+ pool));
+
+ /* Ensure we get the real entry. */
+ SVN_ERR(svn_wc_entry(&entry, local_relpath, subdir_access, TRUE, pool));
+ SVN_TEST_STRING_ASSERT(entry->name, "");
+ SVN_TEST_ASSERT(entry->schedule == svn_wc_schedule_normal);
+
+ /* Ensure that we get the SAME entry, even using the parent baton. */
+ SVN_ERR(svn_wc_entry(&test_entry, local_relpath, adm_access, TRUE, pool));
+ SVN_TEST_ASSERT(test_entry == entry);
+
+ /* Ensure we get the stub when reading entries with ADM_ACCESS. */
+ SVN_ERR(svn_wc_entries_read(&entries, adm_access, TRUE /* show_hidden */,
+ pool));
+ SVN_TEST_ASSERT(stub_entry
+ == apr_hash_get(entries, "M", APR_HASH_KEY_STRING));
+
+ /* Ensure we get the real entry when reading entries with SUBDIR_ACCESS. */
+ SVN_ERR(svn_wc_entries_read(&entries, subdir_access, TRUE /* show_hidden */,
+ pool));
+ SVN_TEST_ASSERT(entry
+ == apr_hash_get(entries, "", APR_HASH_KEY_STRING));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_access_baton_like_locking(apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ svn_wc_context_t *wc_ctx, *wc_ctx2;
+ const char *local_abspath;
+ const char *D, *D1, *D2, *D3, *D4;
+ svn_boolean_t locked_here, locked;
+ svn_error_t *err;
+ svn_wc_adm_access_t *adm_access, *subdir_access;
+
+#undef WC_NAME
+#define WC_NAME "test_access_batons"
+ SVN_ERR(create_open(&db, &local_abspath, WC_NAME, pool));
+
+ D = svn_dirent_join(local_abspath, "DD", pool);
+
+ D1 = svn_dirent_join(D, "DD", pool);
+ D2 = svn_dirent_join(D1, "DD", pool);
+ D3 = svn_dirent_join(D2, "DD", pool);
+ D4 = svn_dirent_join(D3, "DD", pool);
+
+ SVN_ERR(svn_io_make_dir_recursively(D4, pool));
+
+ /* Use the legacy interface */
+ SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, local_abspath, TRUE, 0,
+ NULL, NULL, pool));
+ SVN_ERR(svn_wc_add3(D, adm_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D, pool));
+ SVN_ERR(svn_wc_add3(D1, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D1, pool));
+ SVN_ERR(svn_wc_add3(D2, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_adm_retrieve(&subdir_access, adm_access, D2, pool));
+ SVN_ERR(svn_wc_add3(D3, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add3(D4, subdir_access, svn_depth_infinity, NULL,
+ SVN_INVALID_REVNUM, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_locked(&locked, D3, pool));
+ SVN_TEST_ASSERT(locked);
+ SVN_ERR(svn_wc_locked(&locked, D4, pool));
+ SVN_TEST_ASSERT(locked);
+ SVN_ERR(svn_wc_delete3(D4, subdir_access, NULL, NULL, NULL, NULL, FALSE,
+ pool));
+ SVN_ERR(svn_wc_locked(&locked, D4, pool));
+ SVN_TEST_ASSERT(!locked);
+ SVN_ERR(svn_wc_revert3(D, adm_access, svn_depth_infinity, FALSE,
+ NULL, NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_locked(&locked, D3, pool));
+ SVN_TEST_ASSERT(!locked);
+ SVN_ERR(svn_wc_locked(&locked, local_abspath, pool));
+ SVN_TEST_ASSERT(locked);
+ SVN_ERR(svn_wc_adm_close2(adm_access, pool));
+
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+
+ /* Obtain a lock for the root, which is extended on each level */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, local_abspath, 0, FALSE, pool));
+ SVN_ERR(svn_io_make_dir_recursively(D4, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D1, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D2, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+ SVN_ERR(svn_wc_add4(wc_ctx, D3, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D3, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ /* Test if the not added path is already locked */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D4, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+
+ SVN_ERR(svn_wc_add4(wc_ctx, D4, svn_depth_infinity, NULL, SVN_INVALID_REVNUM,
+ NULL, NULL, NULL, NULL, pool));
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D4, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, local_abspath, pool));
+ /* Should be unlocked */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, local_abspath, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+
+ /* Lock shouldn't be released */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx, D, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D1, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D2, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D3, pool));
+
+ /* Try reobtaining lock on D3; should succeed */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, D3, 0, FALSE, pool));
+ SVN_ERR(svn_wc__db_wclock_release(wc_ctx->db, D4, pool));
+
+
+ /* D3 should still be locked; try stealing in a different context */
+ SVN_ERR(svn_wc_context_create(&wc_ctx2, NULL, pool, pool));
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, D3, pool));
+ SVN_TEST_ASSERT(!locked_here && locked);
+
+ err = svn_wc__db_wclock_obtain(wc_ctx2->db, D3, 0, FALSE, pool);
+
+ if (err && err->apr_err != SVN_ERR_WC_LOCKED)
+ return svn_error_trace(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't lock, as it is still locked */
+
+ err = svn_wc__db_wclock_release(wc_ctx2->db, D4, pool);
+ if (err && err->apr_err != SVN_ERR_WC_NOT_LOCKED)
+ return svn_error_trace(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't unlock, as it is not ours */
+
+ /* Now steal the lock */
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx2->db, D3, 0, TRUE, pool));
+
+ /* We should own the lock now */
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, D3, pool));
+ SVN_TEST_ASSERT(locked_here && locked);
+
+ err = svn_wc__db_wclock_release(wc_ctx2->db, D4, pool);
+ if (err && err->apr_err != SVN_ERR_WC_NOT_LOCKED)
+ return svn_error_trace(err);
+ svn_error_clear(err);
+
+ SVN_TEST_ASSERT(err != NULL); /* Can't unlock a not locked path */
+
+ /* Now create a separate working copy from the same repository directly
+ below this WC and test if our code really sees it as a separate wc,
+ for locking and normal operation */
+ {
+ const char *url, *repos_root_url, *repos_uuid;
+ const char *subdir = svn_dirent_join(local_abspath, "sub-wc", pool);
+ const char *repos_relpath;
+
+ svn_boolean_t is_root;
+ SVN_ERR(svn_wc__node_get_repos_info(NULL, &repos_relpath,
+ &repos_root_url, &repos_uuid,
+ wc_ctx, local_abspath,
+ pool, pool));
+ url = svn_path_url_add_component2(repos_root_url, repos_relpath, pool);
+
+ SVN_ERR(svn_io_make_dir_recursively(subdir, pool));
+ SVN_ERR(svn_wc_ensure_adm3(subdir, repos_uuid,
+ svn_path_url_add_component2(url, "sub-wc", pool),
+ repos_root_url, 0, svn_depth_infinity,
+ pool));
+
+ SVN_ERR(svn_wc__db_is_switched(&is_root, NULL, NULL, wc_ctx->db, subdir,
+ pool));
+
+ SVN_TEST_ASSERT(is_root);
+
+ SVN_ERR(svn_wc__db_is_switched(&is_root, NULL, NULL, wc_ctx2->db, subdir,
+ pool));
+
+ /* This test was added to show a regression where the next check failed,
+ but the check above this succeeded */
+ SVN_TEST_ASSERT(is_root);
+
+ SVN_ERR(svn_wc_locked2(&locked_here, &locked, wc_ctx2, subdir, pool));
+ SVN_TEST_ASSERT(!locked_here && !locked);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static int max_threads = -1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_entries_alloc,
+ "entries are allocated in access baton"),
+ SVN_TEST_PASS2(test_stubs,
+ "access baton mojo can return stubs"),
+ SVN_TEST_PASS2(test_access_baton_like_locking,
+ "access baton like locks must work with wc-ng"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/op-depth-test.c b/subversion/tests/libsvn_wc/op-depth-test.c
new file mode 100644
index 0000000..bf6592d
--- /dev/null
+++ b/subversion/tests/libsvn_wc/op-depth-test.c
@@ -0,0 +1,12104 @@
+/*
+ * op-depth-test.c : test layered tree changes
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* To avoid warnings... */
+#define SVN_DEPRECATED
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#include "svn_types.h"
+#include "svn_hash.h"
+#include "svn_io.h"
+#include "svn_dirent_uri.h"
+#include "svn_pools.h"
+#include "svn_repos.h"
+#include "svn_wc.h"
+#include "svn_client.h"
+#include "svn_hash.h"
+#include "svn_sorts.h"
+
+#include "utils.h"
+
+#include "private/svn_dep_compat.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_sqlite.h"
+#include "private/svn_wc_private.h"
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#include "../../libsvn_wc/workqueue.h"
+#include "../../libsvn_wc/conflicts.h"
+#define SVN_WC__I_AM_WC_DB
+#include "../../libsvn_wc/wc_db_private.h"
+
+#include "../svn_test.h"
+
+#include "wc-test-queries.h"
+
+WC_TEST_QUERIES_SQL_DECLARE_STATEMENTS(op_depth_statements);
+
+/* Compare strings, like strcmp but either or both may be NULL which
+ * compares equal to NULL and not equal to any non-NULL string. */
+static int
+strcmp_null(const char *s1, const char *s2)
+{
+ if (s1 && s2)
+ return strcmp(s1, s2);
+ else if (s1 || s2)
+ return 1;
+ else
+ return 0;
+}
+
+
+/* ---------------------------------------------------------------------- */
+/* Reading the WC DB */
+
+static svn_error_t *
+open_wc_db(svn_sqlite__db_t **sdb,
+ const char *wc_root_abspath,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ SVN_ERR(svn_wc__db_util_open_db(sdb, wc_root_abspath, "wc.db",
+ svn_sqlite__mode_readwrite,
+ FALSE /* exclusive */, 0 /* timeout */,
+ op_depth_statements,
+ result_pool, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+
+/* ---------------------------------------------------------------------- */
+/* Functions for comparing expected and found WC DB data. */
+
+/* Some of the fields from a NODES table row. */
+typedef struct nodes_row_t {
+ int op_depth;
+ const char *local_relpath;
+ const char *presence;
+ svn_revnum_t repo_revnum;
+ const char *repo_relpath;
+ svn_boolean_t file_external;
+ const char *moved_to;
+ svn_boolean_t moved_here;
+ const char *props; /* comma-separated list of prop names */
+} nodes_row_t;
+
+/* Tree conflict details */
+typedef struct tree_conflict_info
+{
+ svn_wc_conflict_action_t action;
+ svn_wc_conflict_reason_t reason;
+ const char *delete_path;
+ svn_boolean_t conflicted_fb; /* fallback for reason, action and path 0 */
+} tree_conflict_info;
+
+/* What conflicts are on a path. */
+typedef struct conflict_info_t {
+ const char *local_relpath;
+ svn_boolean_t text_conflicted;
+ svn_boolean_t prop_conflicted;
+
+ tree_conflict_info tc;
+} conflict_info_t;
+
+/* Macro for filling in the REPO_* fields of a non-base NODES_ROW_T
+ * that has no copy-from info. */
+#define NO_COPY_FROM SVN_INVALID_REVNUM, NULL, FALSE
+#define MOVED_HERE FALSE, NULL, TRUE
+#define NOT_MOVED FALSE, NULL, FALSE
+#define FILE_EXTERNAL TRUE
+
+/* Return a comma-separated list of the prop names in PROPS, in lexically
+ * ascending order, or NULL if PROPS is empty or NULL. (Here, we don't
+ * care about the difference between 'has no props' and 'can't have props',
+ * and we choose to represent both of those as NULL.) */
+static const char *
+props_hash_to_text(apr_hash_t *props, apr_pool_t *pool)
+{
+ apr_array_header_t *props_sorted;
+ svn_stringbuf_t *str;
+ int i;
+
+ if (! props)
+ return NULL;
+
+ str = svn_stringbuf_create_empty(pool);
+ props_sorted = svn_sort__hash(props, svn_sort_compare_items_lexically, pool);
+ for (i = 0; i < props_sorted->nelts; i++)
+ {
+ const svn_sort__item_t *item
+ = &APR_ARRAY_IDX(props_sorted, i, svn_sort__item_t);
+
+ if (str->len)
+ svn_stringbuf_appendbyte(str, ',');
+ svn_stringbuf_appendcstr(str, item->key);
+ }
+ return str->len ? str->data : NULL;
+}
+
+/* Return a human-readable string representing ROW. With a tiny bit of editting
+ this can be used to create expected results */
+static const char *
+print_row(const nodes_row_t *row,
+ apr_pool_t *result_pool)
+{
+ const char *relpath_str, *presence_str;
+ const char *file_external_str, *moved_here_str, *moved_to_str, *props;
+
+ if (row == NULL)
+ return "(null)";
+
+ relpath_str = apr_psprintf(result_pool, "\"%s\",", row->local_relpath);
+ presence_str = apr_psprintf(result_pool, "\"%s\",", row->presence);
+ if (row->moved_to)
+ moved_to_str = apr_psprintf(result_pool, ", \"%s\"", row->moved_to);
+ else
+ moved_to_str = "";
+
+ if (row->moved_here && !row->file_external && !row->moved_to)
+ moved_here_str = ", MOVED_HERE";
+ else if (row->moved_to)
+ moved_here_str = ", TRUE";
+ else
+ moved_here_str = "";
+
+ if (row->file_external)
+ file_external_str = ", FILE_EXTERNAL";
+ else if (row->moved_to || row->props)
+ file_external_str = ", FALSE";
+ else
+ file_external_str = "";
+
+ if (row->props)
+ props = apr_psprintf(result_pool, ", p=(%s)", row->props);
+ else
+ props = "";
+
+ if (row->repo_revnum == SVN_INVALID_REVNUM)
+ return apr_psprintf(result_pool, "%d, %-20s%-15s NO_COPY_FROM%s%s%s%s",
+ row->op_depth, relpath_str, presence_str,
+ file_external_str, moved_here_str, moved_to_str,
+ props);
+ else
+ return apr_psprintf(result_pool, "%d, %-20s%-15s %d, \"%s\"%s%s%s%s",
+ row->op_depth, relpath_str, presence_str,
+ (int)row->repo_revnum, row->repo_relpath,
+ file_external_str, moved_here_str, moved_to_str,
+ props);
+}
+/* A baton to pass through svn_hash_diff() to compare_nodes_rows(). */
+typedef struct comparison_baton_t {
+ apr_hash_t *expected_hash; /* Maps "OP_DEPTH PATH" to nodes_row_t. */
+ apr_hash_t *found_hash; /* Maps "OP_DEPTH PATH" to nodes_row_t. */
+ apr_pool_t *scratch_pool;
+ svn_error_t *errors; /* Chain of errors found in comparison. */
+} comparison_baton_t;
+
+/* Compare two hash entries indexed by KEY, in the two hashes in BATON.
+ * Append an error message to BATON->errors if they differ or are not both
+ * present.
+ *
+ * Implements svn_hash_diff_func_t. */
+static svn_error_t *
+compare_nodes_rows(const void *key, apr_ssize_t klen,
+ enum svn_hash_diff_key_status status,
+ void *baton)
+{
+ comparison_baton_t *b = baton;
+ nodes_row_t *expected = apr_hash_get(b->expected_hash, key, klen);
+ nodes_row_t *found = apr_hash_get(b->found_hash, key, klen);
+
+ if (! expected)
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "found {%s}",
+ print_row(found, b->scratch_pool));
+ }
+ else if (! found)
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "expected {%s}",
+ print_row(expected, b->scratch_pool));
+ }
+ else if (expected->repo_revnum != found->repo_revnum
+ || (strcmp_null(expected->repo_relpath, found->repo_relpath) != 0)
+ || (strcmp_null(expected->presence, found->presence) != 0)
+ || (expected->file_external != found->file_external)
+ || (expected->moved_here != found->moved_here)
+ || (expected->moved_to && !found->moved_to)
+ || (!expected->moved_to && found->moved_to)
+ || (expected->moved_to
+ && strcmp(expected->moved_to, found->moved_to))
+ || (expected->props != NULL
+ && strcmp_null(expected->props, found->props) != 0))
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "expected {%s}; found {%s}",
+ print_row(expected, b->scratch_pool),
+ print_row(found, b->scratch_pool));
+ }
+
+ /* Don't terminate the comparison: accumulate all differences. */
+ return SVN_NO_ERROR;
+}
+
+
+/* Examine the WC DB for paths ROOT_PATH and below, and check that their
+ * rows in the 'NODES' table (only those at op_depth > 0) match EXPECTED_ROWS
+ * (which is terminated by a row of null fields).
+ *
+ * Return a chain of errors describing any and all mismatches. */
+static svn_error_t *
+check_db_rows(svn_test__sandbox_t *b,
+ const char *root_path,
+ const nodes_row_t *expected_rows)
+{
+ svn_sqlite__db_t *sdb;
+ int i;
+ svn_sqlite__stmt_t *stmt;
+
+ svn_boolean_t have_row;
+ apr_hash_t *found_hash = apr_hash_make(b->pool);
+ apr_hash_t *expected_hash = apr_hash_make(b->pool);
+ comparison_baton_t comparison_baton;
+
+ comparison_baton.expected_hash = expected_hash;
+ comparison_baton.found_hash = found_hash;
+ comparison_baton.scratch_pool = b->pool;
+ comparison_baton.errors = NULL;
+
+ /* Fill ACTUAL_HASH with data from the WC DB. */
+ SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool));
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_NODES_INFO));
+ SVN_ERR(svn_sqlite__bindf(stmt, "is", (apr_int64_t)1 /* wc_id */,
+ root_path));
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ while (have_row)
+ {
+ const char *key;
+ nodes_row_t *row = apr_palloc(b->pool, sizeof(*row));
+ apr_hash_t *props_hash;
+
+ row->op_depth = svn_sqlite__column_int(stmt, 0);
+ row->presence = svn_sqlite__column_text(stmt, 1, b->pool);
+ row->local_relpath = svn_sqlite__column_text(stmt, 2, b->pool);
+ row->repo_revnum = svn_sqlite__column_revnum(stmt, 3);
+ row->repo_relpath = svn_sqlite__column_text(stmt, 4, b->pool);
+ row->file_external = !svn_sqlite__column_is_null(stmt, 5);
+ row->moved_to = svn_sqlite__column_text(stmt, 7, b->pool);
+ row->moved_here = svn_sqlite__column_boolean(stmt, 8);
+ SVN_ERR(svn_sqlite__column_properties(&props_hash, stmt, 9,
+ b->pool, b->pool));
+ row->props = props_hash_to_text(props_hash, b->pool);
+
+ if (row->file_external && svn_sqlite__column_is_null(stmt, 6))
+ comparison_baton.errors
+ = svn_error_createf(SVN_ERR_TEST_FAILED, comparison_baton.errors,
+ "incomplete {%s}", print_row(row, b->pool));
+
+ key = apr_psprintf(b->pool, "%d %s", row->op_depth, row->local_relpath);
+ apr_hash_set(found_hash, key, APR_HASH_KEY_STRING, row);
+
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ }
+ SVN_ERR(svn_sqlite__reset(stmt));
+
+ /* Fill EXPECTED_HASH with data from EXPECTED_ROWS. */
+ for (i = 0; expected_rows[i].local_relpath != NULL; i++)
+ {
+ const char *key;
+ const nodes_row_t *row = &expected_rows[i];
+
+ key = apr_psprintf(b->pool, "%d %s", row->op_depth, row->local_relpath);
+ apr_hash_set(expected_hash, key, APR_HASH_KEY_STRING, row);
+ }
+
+ /* Compare EXPECTED_HASH with ACTUAL_HASH and return any errors. */
+ SVN_ERR(svn_hash_diff(expected_hash, found_hash,
+ compare_nodes_rows, &comparison_baton, b->pool));
+ SVN_ERR(svn_sqlite__close(sdb));
+ return comparison_baton.errors;
+}
+
+#define EDIT_EDIT_TC {svn_wc_conflict_reason_edited, \
+ svn_wc_conflict_action_edit, \
+ NULL, TRUE}
+#define NO_TC { 0 }
+static const char *
+print_conflict(const conflict_info_t *row,
+ apr_pool_t *result_pool)
+{
+ const char *tc_text;
+
+ if (!row->tc.reason && !row->tc.action && !row->tc.delete_path)
+ {
+ if (row->tc.conflicted_fb)
+ tc_text = "EDIT_EDIT_TC";
+ else
+ tc_text = "NO_TC";
+ }
+ else
+ {
+ const char *action;
+ const char *reason;
+ const char *path;
+
+#define CASE_ENUM_STRVAL(x, y) case y: x = #y; break
+ switch(row->tc.action)
+ {
+ CASE_ENUM_STRVAL(action, svn_wc_conflict_action_edit);
+ CASE_ENUM_STRVAL(action, svn_wc_conflict_action_add);
+ CASE_ENUM_STRVAL(action, svn_wc_conflict_action_delete);
+ CASE_ENUM_STRVAL(action, svn_wc_conflict_action_replace);
+ default:
+ SVN_ERR_MALFUNCTION_NO_RETURN();
+ }
+ switch(row->tc.reason)
+ {
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_edited);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_obstructed);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_deleted);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_missing);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_unversioned);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_added);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_replaced);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_moved_away);
+ CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_moved_here);
+ default:
+ SVN_ERR_MALFUNCTION_NO_RETURN();
+ }
+
+ if (row->tc.delete_path)
+ path = apr_psprintf(result_pool, ", \"%s\"", row->tc.delete_path);
+ else
+ path = "";
+
+ tc_text = apr_psprintf(result_pool, "{%s, %s%s}", action,
+ reason, path);
+ }
+
+ return apr_psprintf(result_pool, "\"%s\", %s, %s, %s",
+ row->local_relpath,
+ row->text_conflicted ? "TRUE" : "FALSE",
+ row->prop_conflicted ? "TRUE" : "FALSE",
+ tc_text);
+}
+
+static svn_boolean_t
+tree_conflicts_match(const tree_conflict_info *expected,
+ const tree_conflict_info *actual)
+{
+ if (expected->action != actual->action)
+ return FALSE;
+ else if (expected->reason != actual->reason)
+ return FALSE;
+ else if (strcmp_null(expected->delete_path, actual->delete_path) != 0)
+ return FALSE;
+ else if (expected->conflicted_fb != actual->conflicted_fb)
+ return FALSE;
+
+ return TRUE;
+}
+
+static svn_error_t *
+compare_conflict_info(const void *key, apr_ssize_t klen,
+ enum svn_hash_diff_key_status status,
+ void *baton)
+{
+ comparison_baton_t *b = baton;
+ conflict_info_t *expected = apr_hash_get(b->expected_hash, key, klen);
+ conflict_info_t *found = apr_hash_get(b->found_hash, key, klen);
+
+ if (! expected)
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "found {%s}",
+ print_conflict(found, b->scratch_pool));
+ }
+ else if (! found)
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "expected {%s}",
+ print_conflict(expected, b->scratch_pool));
+ }
+ else if (expected->text_conflicted != found->text_conflicted
+ || expected->prop_conflicted != found->prop_conflicted
+ || !tree_conflicts_match(&expected->tc, &found->tc))
+ {
+ b->errors = svn_error_createf(
+ SVN_ERR_TEST_FAILED, b->errors,
+ "expected {%s}; found {%s}",
+ print_conflict(expected, b->scratch_pool),
+ print_conflict(found, b->scratch_pool));
+ }
+
+ /* Don't terminate the comparison: accumulate all differences. */
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+check_db_conflicts(svn_test__sandbox_t *b,
+ const char *root_path,
+ const conflict_info_t *expected_conflicts)
+{
+ svn_sqlite__db_t *sdb;
+ int i;
+ svn_sqlite__stmt_t *stmt;
+
+ svn_boolean_t have_row;
+ apr_hash_t *found_hash = apr_hash_make(b->pool);
+ apr_hash_t *expected_hash = apr_hash_make(b->pool);
+ apr_pool_t *iterpool = svn_pool_create(b->pool);
+ apr_hash_index_t *hi;
+ comparison_baton_t comparison_baton;
+
+ comparison_baton.expected_hash = expected_hash;
+ comparison_baton.found_hash = found_hash;
+ comparison_baton.scratch_pool = b->pool;
+ comparison_baton.errors = NULL;
+
+ /* Fill ACTUAL_HASH with data from the WC DB. */
+ SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool));
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_ACTUAL_INFO));
+ SVN_ERR(svn_sqlite__bindf(stmt, "is", (apr_int64_t)1 /* wc_id */,
+ root_path));
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ while (have_row)
+ {
+ conflict_info_t *row = apr_pcalloc(b->pool, sizeof(*row));
+
+ row->local_relpath = svn_sqlite__column_text(stmt, 0, b->pool);
+
+ svn_hash_sets(found_hash, row->local_relpath, row);
+
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ }
+ SVN_ERR(svn_sqlite__reset(stmt));
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ for (hi = apr_hash_first(b->pool, found_hash); hi; hi = apr_hash_next(hi))
+ {
+ svn_skel_t *conflict;
+ conflict_info_t *info = apr_hash_this_val(hi);
+ const char *local_abspath;
+ svn_boolean_t tree_conflicted;
+
+ svn_pool_clear(iterpool);
+
+ local_abspath = svn_dirent_join(b->wc_abspath, info->local_relpath,
+ iterpool);
+
+ SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL,
+ b->wc_ctx->db, local_abspath,
+ iterpool, iterpool));
+
+ SVN_TEST_ASSERT(conflict != NULL);
+
+ SVN_ERR(svn_wc__conflict_read_info(NULL, NULL,
+ &info->text_conflicted,
+ &info->prop_conflicted,
+ &tree_conflicted,
+ b->wc_ctx->db, local_abspath,
+ conflict,
+ iterpool, iterpool));
+
+ if (tree_conflicted)
+ {
+ const char *move_src_abspath;
+ SVN_ERR(svn_wc__conflict_read_tree_conflict(&info->tc.reason,
+ &info->tc.action,
+ &move_src_abspath,
+ b->wc_ctx->db,
+ local_abspath,
+ conflict,
+ b->pool, iterpool));
+
+ if (move_src_abspath)
+ info->tc.delete_path =
+ svn_dirent_skip_ancestor(b->wc_abspath, move_src_abspath);
+
+ if (!info->tc.reason
+ && !info->tc.action
+ && !info->tc.delete_path)
+ {
+ info->tc.conflicted_fb = TRUE;
+ }
+ }
+ }
+
+ /* Fill EXPECTED_HASH with data from EXPECTED_ROWS. */
+ if (expected_conflicts)
+ for (i = 0; expected_conflicts[i].local_relpath != NULL; i++)
+ {
+ const conflict_info_t *row = &expected_conflicts[i];
+
+ svn_hash_sets(expected_hash, row->local_relpath, row);
+ }
+
+ /* Compare EXPECTED_HASH with ACTUAL_HASH and return any errors. */
+ SVN_ERR(svn_hash_diff(expected_hash, found_hash,
+ compare_conflict_info, &comparison_baton, b->pool));
+ return comparison_baton.errors;
+}
+
+static svn_error_t *
+verify_db_callback(void *baton,
+ const char *wc_abspath,
+ const char *local_relpath,
+ int op_depth,
+ int id,
+ const char *msg,
+ apr_pool_t *scratch_pool)
+{
+ if (op_depth >= 0)
+ return svn_error_createf(SVN_ERR_WC_CORRUPT, NULL,
+ "Verify: %s: %s (%d): SV%04d %s",
+ wc_abspath, local_relpath, op_depth, id, msg);
+ else
+ return svn_error_createf(SVN_ERR_WC_CORRUPT, NULL,
+ "DB-VRFY: %s: %s: SV%04d %s",
+ wc_abspath, local_relpath, id, msg);
+}
+
+static svn_error_t *
+verify_db(svn_test__sandbox_t *b)
+{
+ SVN_ERR(svn_wc__db_verify_db_full(b->wc_ctx->db, b->wc_abspath,
+ verify_db_callback, NULL, b->pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* ---------------------------------------------------------------------- */
+/* The test functions */
+
+/* Definition of a copy sub-test and its expected results. */
+struct copy_subtest_t
+{
+ /* WC-relative or repo-relative source and destination paths. */
+ const char *from_path;
+ const char *to_path;
+ /* All the expected nodes table rows within the destination sub-tree.
+ * Terminated by an all-zero row. */
+ nodes_row_t expected[20];
+};
+
+#define source_everything "A/B"
+
+#define source_base_file "A/B/lambda"
+#define source_base_dir "A/B/E"
+
+#define source_added_file "A/B/file-added"
+#define source_added_dir "A/B/D-added"
+#define source_added_dir2 "A/B/D-added/D2"
+
+#define source_copied_file "A/B/lambda-copied"
+#define source_copied_dir "A/B/E-copied"
+
+/* Check that all kinds of WC-to-WC copies give correct op_depth results:
+ * create a Greek tree, make copies in it, and check the resulting DB rows. */
+static svn_error_t *
+wc_wc_copies(svn_test__sandbox_t *b)
+{
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Create the various kinds of source node which will be copied */
+
+ SVN_ERR(sbox_file_write(b, source_added_file, "New file"));
+ SVN_ERR(sbox_wc_add(b, source_added_file));
+ SVN_ERR(sbox_wc_mkdir(b, source_added_dir));
+ SVN_ERR(sbox_wc_mkdir(b, source_added_dir2));
+
+ SVN_ERR(sbox_wc_copy(b, source_base_file, source_copied_file));
+ SVN_ERR(sbox_wc_copy(b, source_base_dir, source_copied_dir));
+
+ /* Delete some nodes so that we can test copying onto these paths */
+
+ SVN_ERR(sbox_wc_delete(b, "A/D/gamma"));
+ SVN_ERR(sbox_wc_delete(b, "A/D/G"));
+
+ /* Test copying various things */
+
+ {
+ struct copy_subtest_t subtests[] =
+ {
+ /* base file */
+ { source_base_file, "A/C/copy1", {
+ { 3, "", "normal", 1, source_base_file }
+ } },
+
+ /* base dir */
+ { source_base_dir, "A/C/copy2", {
+ { 3, "", "normal", 1, source_base_dir },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" }
+ } },
+
+ /* added file */
+ { source_added_file, "A/C/copy3", {
+ { 3, "", "normal", NO_COPY_FROM }
+ } },
+
+ /* added dir */
+ { source_added_dir, "A/C/copy4", {
+ { 3, "", "normal", NO_COPY_FROM },
+ { 4, "D2", "normal", NO_COPY_FROM }
+ } },
+
+ /* copied file */
+ { source_copied_file, "A/C/copy5", {
+ { 3, "", "normal", 1, source_base_file }
+ } },
+
+ /* copied dir */
+ { source_copied_dir, "A/C/copy6", {
+ { 3, "", "normal", 1, source_base_dir },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" }
+ } },
+
+ /* copied tree with everything in it */
+ { source_everything, "A/C/copy7", {
+ { 3, "", "normal", 1, source_everything },
+ { 3, "lambda", "normal", 1, "A/B/lambda" },
+ { 3, "E", "normal", 1, "A/B/E" },
+ { 3, "E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "F", "normal", 1, "A/B/F" },
+ /* Each add is an op_root */
+ { 4, "file-added", "normal", NO_COPY_FROM },
+ { 4, "D-added", "normal", NO_COPY_FROM },
+ { 5, "D-added/D2", "normal", NO_COPY_FROM },
+ /* Each copied-copy subtree is an op_root */
+ { 4, "lambda-copied", "normal", 1, source_base_file },
+ { 4, "E-copied", "normal", 1, source_base_dir },
+ { 4, "E-copied/alpha", "normal", 1, "A/B/E/alpha" },
+ { 4, "E-copied/beta", "normal", 1, "A/B/E/beta" }
+ } },
+
+ /* dir onto a schedule-delete file */
+ { source_base_dir, "A/D/gamma", {
+ { 0, "", "normal", 1, "A/D/gamma" },
+ { 3, "", "normal", 1, source_base_dir },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" }
+ } },
+
+ /* file onto a schedule-delete dir */
+ { source_base_file, "A/D/G", {
+ { 0, "", "normal", 1, "A/D/G" },
+ { 0, "pi", "normal", 1, "A/D/G/pi" },
+ { 0, "rho", "normal", 1, "A/D/G/rho" },
+ { 0, "tau", "normal", 1, "A/D/G/tau" },
+ { 3, "", "normal", 1, source_base_file },
+ { 3, "pi", "base-deleted", NO_COPY_FROM },
+ { 3, "rho", "base-deleted", NO_COPY_FROM },
+ { 3, "tau", "base-deleted", NO_COPY_FROM }
+ } },
+
+ { 0 }
+ };
+ struct copy_subtest_t *subtest;
+
+ /* Fix up the expected->local_relpath fields in the subtest data to be
+ * relative to the WC root rather than to the copy destination dir. */
+ for (subtest = subtests; subtest->from_path; subtest++)
+ {
+ nodes_row_t *row;
+ for (row = &subtest->expected[0]; row->local_relpath; row++)
+ row->local_relpath = svn_dirent_join(subtest->to_path,
+ row->local_relpath, b->pool);
+ }
+
+ /* Perform each subtest in turn. */
+ for (subtest = subtests; subtest->from_path; subtest++)
+ {
+ SVN_ERR(sbox_wc_copy(b, subtest->from_path, subtest->to_path));
+ SVN_ERR(check_db_rows(b, subtest->to_path, subtest->expected));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that all kinds of repo-to-WC copies give correct op_depth results:
+ * create a Greek tree, make copies in it, and check the resulting DB rows. */
+static svn_error_t *
+repo_wc_copies(svn_test__sandbox_t *b)
+{
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Delete some nodes so that we can test copying onto these paths */
+
+ SVN_ERR(sbox_wc_delete(b, "A/B/lambda"));
+ SVN_ERR(sbox_wc_delete(b, "A/D/gamma"));
+ SVN_ERR(sbox_wc_delete(b, "A/D/G"));
+ SVN_ERR(sbox_wc_delete(b, "A/D/H"));
+
+ /* Test copying various things */
+
+ {
+ struct copy_subtest_t subtests[] =
+ {
+ /* file onto nothing */
+ { "iota", "A/C/copy1", {
+ { 3, "", "normal", 1, "iota" },
+ } },
+
+ /* dir onto nothing */
+ { "A/B/E", "A/C/copy2", {
+ { 3, "", "normal", 1, "A/B/E" },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" },
+ } },
+
+ /* file onto a schedule-delete file */
+ { "iota", "A/B/lambda", {
+ { 0, "", "normal", 1, "A/B/lambda" },
+ { 3, "", "normal", 1, "iota" },
+ } },
+
+ /* dir onto a schedule-delete dir */
+ { "A/B/E", "A/D/G", {
+ { 0, "", "normal", 1, "A/D/G" },
+ { 0, "pi", "normal", 1, "A/D/G/pi" },
+ { 0, "rho", "normal", 1, "A/D/G/rho" },
+ { 0, "tau", "normal", 1, "A/D/G/tau" },
+ { 3, "", "normal", 1, "A/B/E" },
+ { 3, "pi", "base-deleted", NO_COPY_FROM },
+ { 3, "rho", "base-deleted", NO_COPY_FROM },
+ { 3, "tau", "base-deleted", NO_COPY_FROM },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" },
+ } },
+
+ /* dir onto a schedule-delete file */
+ { "A/B/E", "A/D/gamma", {
+ { 0, "", "normal", 1, "A/D/gamma" },
+ { 3, "", "normal", 1, "A/B/E" },
+ { 3, "alpha", "normal", 1, "A/B/E/alpha" },
+ { 3, "beta", "normal", 1, "A/B/E/beta" },
+ } },
+
+ /* file onto a schedule-delete dir */
+ { "iota", "A/D/H", {
+ { 0, "", "normal", 1, "A/D/H" },
+ { 0, "chi", "normal", 1, "A/D/H/chi" },
+ { 0, "psi", "normal", 1, "A/D/H/psi" },
+ { 0, "omega", "normal", 1, "A/D/H/omega" },
+ { 3, "", "normal", 1, "iota" },
+ { 3, "chi", "base-deleted", NO_COPY_FROM },
+ { 3, "psi", "base-deleted", NO_COPY_FROM },
+ { 3, "omega", "base-deleted", NO_COPY_FROM },
+ } },
+
+ { 0 }
+ };
+ struct copy_subtest_t *subtest;
+ svn_client_ctx_t *ctx;
+
+ /* Fix up the expected->local_relpath fields in the subtest data to be
+ * relative to the WC root rather than to the copy destination dir. */
+ for (subtest = subtests; subtest->from_path; subtest++)
+ {
+ nodes_row_t *row;
+ for (row = &subtest->expected[0]; row->local_relpath; row++)
+ row->local_relpath = svn_dirent_join(subtest->to_path,
+ row->local_relpath, b->pool);
+ }
+
+ /* Perform each copy. */
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+ for (subtest = subtests; subtest->from_path; subtest++)
+ {
+ svn_opt_revision_t rev = { svn_opt_revision_number, { 1 } };
+ svn_client_copy_source_t source;
+ apr_array_header_t *sources
+ = apr_array_make(b->pool, 0, sizeof(svn_client_copy_source_t *));
+
+ source.path = svn_path_url_add_component2(b->repos_url,
+ subtest->from_path,
+ b->pool);
+ source.revision = &rev;
+ source.peg_revision = &rev;
+ APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = &source;
+ SVN_ERR(svn_client_copy6(sources,
+ sbox_wc_path(b, subtest->to_path),
+ FALSE, FALSE, FALSE,
+ NULL, NULL, NULL, ctx, b->pool));
+ }
+
+ /* Check each result. */
+ for (subtest = subtests; subtest->from_path; subtest++)
+ {
+ SVN_ERR(check_db_rows(b, subtest->to_path, subtest->expected));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_wc_wc_copies(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "wc_wc_copies", opts, pool));
+
+ return wc_wc_copies(&b);
+}
+
+static svn_error_t *
+test_reverts(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ nodes_row_t no_node_rows_expected[] = { { 0 } };
+
+ SVN_ERR(svn_test__sandbox_create(&b, "reverts", opts, pool));
+
+ SVN_ERR(wc_wc_copies(&b));
+
+
+ /* Implement revert tests below, now that we have a wc with lots of
+ copy-changes */
+
+ SVN_ERR(sbox_wc_revert(&b, "A/B/D-added", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "A/B/D-added", no_node_rows_expected));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_deletes(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "deletes", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_file_write(&b, "A/B/E/new-file", "New file"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/E/new-file"));
+ {
+ nodes_row_t rows[] = {
+ { 4, "A/B/E/new-file", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/E/new-file", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E/alpha"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 4, "A/B/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/E/alpha", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/F"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/F", "normal", 1, "A/B/F" },
+ { 3, "A/B/F", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/F", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B", "normal", 1, "A/B", },
+ { 2, "A/B/lambda", "base-deleted", NO_COPY_FROM },
+ { 0, "A/B/lambda", "normal", 1, "A/B/lambda", },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0, "A/B/E", "normal", 1, "A/B/E", },
+ { 2, "A/B/E", "base-deleted", NO_COPY_FROM },
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 2, "A/B/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 0, "A/B/E/beta", "normal", 1, "A/B/E/beta" },
+ { 2, "A/B/E/beta", "base-deleted", NO_COPY_FROM },
+ { 0, "A/B/F", "normal", 1, "A/B/F", },
+ { 2, "A/B/F", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_adds(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "adds", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ /* add file */
+ SVN_ERR(sbox_file_write(&b, "new-file", "New file"));
+ SVN_ERR(sbox_wc_add(&b, "new-file"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "new-file", "normal", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "new-file", rows));
+ }
+
+ /* add dir */
+ SVN_ERR(sbox_wc_mkdir(&b, "new-dir"));
+ SVN_ERR(sbox_wc_mkdir(&b, "new-dir/D2"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "new-dir", "normal", NO_COPY_FROM },
+ { 2, "new-dir/D2", "normal", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "new-dir", rows));
+ }
+
+ /* replace file */
+ SVN_ERR(sbox_wc_delete(&b, "iota"));
+ SVN_ERR(sbox_file_write(&b, "iota", "New iota file"));
+ SVN_ERR(sbox_wc_add(&b, "iota"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "iota", "normal", 1, "iota" },
+ { 1, "iota", "normal", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "iota", rows));
+ }
+
+ /* replace dir */
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/E/D2"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E", "normal", 1, "A/B/E" },
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 0, "A/B/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B/E", "normal", NO_COPY_FROM },
+ { 4, "A/B/E/D2", "normal", NO_COPY_FROM },
+ { 3, "A/B/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/E/beta", "base-deleted", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "A/B/E", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_adds_change_kind(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "test_adds_change_kind", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ /* replace dir with file */
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E"));
+ SVN_ERR(sbox_file_write(&b, "A/B/E", "New E file"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/E"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E", "normal", 1, "A/B/E" },
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 0, "A/B/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B/E", "normal", NO_COPY_FROM },
+ { 3, "A/B/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/E/beta", "base-deleted", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "A/B/E", rows));
+ }
+
+ /* replace file with dir */
+ SVN_ERR(sbox_wc_delete(&b, "iota"));
+ SVN_ERR(sbox_wc_mkdir(&b, "iota"));
+ SVN_ERR(sbox_wc_mkdir(&b, "iota/D2"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "iota", "normal", 1, "iota" },
+ { 1, "iota", "normal", NO_COPY_FROM },
+ { 2, "iota/D2", "normal", NO_COPY_FROM },
+ { 0 } };
+ SVN_ERR(check_db_rows(&b, "iota", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_delete_of_copies(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "deletes_of_copies", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+ SVN_ERR(sbox_wc_copy(&b, "A/B", "A/B-copied"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B-copied/E"));
+ {
+ nodes_row_t rows[] = {
+ { 2, "A/B-copied/E", "normal", 1, "A/B/E" },
+ { 2, "A/B-copied/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 2, "A/B-copied/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B-copied/E", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/beta", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A/D/G", "A/B-copied/E"));
+ {
+ nodes_row_t rows[] = {
+ { 2, "A/B-copied/E", "normal", 1, "A/B/E" },
+ { 2, "A/B-copied/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 2, "A/B-copied/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B-copied/E", "normal", 1, "A/D/G" },
+ { 3, "A/B-copied/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/beta", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/pi", "normal", 1, "A/D/G/pi" },
+ { 3, "A/B-copied/E/rho", "normal", 1, "A/D/G/rho" },
+ { 3, "A/B-copied/E/tau", "normal", 1, "A/D/G/tau" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B-copied/E/rho"));
+ {
+ nodes_row_t rows[] = {
+ { 2, "A/B-copied/E", "normal", 1, "A/B/E" },
+ { 2, "A/B-copied/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 2, "A/B-copied/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B-copied/E", "normal", 1, "A/D/G" },
+ { 3, "A/B-copied/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/beta", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/pi", "normal", 1, "A/D/G/pi" },
+ { 3, "A/B-copied/E/rho", "normal", 1, "A/D/G/rho" },
+ { 3, "A/B-copied/E/tau", "normal", 1, "A/D/G/tau" },
+ { 4, "A/B-copied/E/rho", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B-copied/E"));
+ {
+ nodes_row_t rows[] = {
+ { 2, "A/B-copied/E", "normal", 1, "A/B/E" },
+ { 2, "A/B-copied/E/alpha", "normal", 1, "A/B/E/alpha" },
+ { 2, "A/B-copied/E/beta", "normal", 1, "A/B/E/beta" },
+ { 3, "A/B-copied/E", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/alpha", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B-copied/E/beta", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B-copied/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A/B", "A/B-copied/E"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B-copied/E/F"));
+ {
+ nodes_row_t rows[] = {
+ { 3, "A/B-copied/E/F", "normal", 1, "A/B/F" },
+ { 4, "A/B-copied/E/F", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B-copied/E/F", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B-copied"));
+ {
+ nodes_row_t rows[] = { { 0 } };
+ SVN_ERR(check_db_rows(&b, "A/B-copied", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+test_delete_with_base(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "deletes_with_base", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E/beta"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E", "normal", 1, "A/B/E"},
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ { 0, "A/B/E/beta", "not-present", 2, "A/B/E/beta"},
+ { 3, "A/B/E", "base-deleted", NO_COPY_FROM},
+ { 3, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A/B/F", "A/B/E"));
+ SVN_ERR(sbox_wc_copy(&b, "A/mu", "A/B/E/alpha"));
+ SVN_ERR(sbox_wc_copy(&b, "A/mu", "A/B/E/beta"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E", "normal", 1, "A/B/E"},
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ { 0, "A/B/E/beta", "not-present", 2, "A/B/E/beta"},
+ { 3, "A/B/E", "base-deleted", NO_COPY_FROM},
+ { 3, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ { 3, "A/B/E", "normal", 1, "A/B/F"},
+ { 4, "A/B/E/alpha", "normal", 1, "A/mu"},
+ { 4, "A/B/E/beta", "normal", 1, "A/mu"},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/E", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/E"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A/B/E", "normal", 1, "A/B/E"},
+ { 0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ { 0, "A/B/E/beta", "not-present", 2, "A/B/E/beta"},
+ { 3, "A/B/E", "base-deleted", NO_COPY_FROM},
+ { 3, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A/B/E", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_repo_wc_copies(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "repo_wc_copies", opts, pool));
+
+ return repo_wc_copies(&b);
+}
+
+static svn_error_t *
+test_delete_with_update(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "delete_with_update", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 1, "A"},
+ { 1, "A", "normal", NO_COPY_FROM},
+ { 2, "A/B", "normal", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A"},
+ { 0, "A/B", "normal", 2, "A/B"},
+ { 0, "A/B/C", "normal", 2, "A/B/C"},
+ { 1, "A", "normal", NO_COPY_FROM},
+ { 1, "A/B", "base-deleted", NO_COPY_FROM},
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ { 2, "A/B", "normal", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+ SVN_ERR(sbox_wc_resolved(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 1, "A"},
+ { 1, "A", "normal", NO_COPY_FROM},
+ { 2, "A/B", "normal", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+insert_dirs(svn_test__sandbox_t *b,
+ nodes_row_t *nodes)
+{
+ svn_sqlite__db_t *sdb;
+ svn_sqlite__stmt_t *stmt;
+
+ SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool));
+
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_DELETE_NODES));
+ SVN_ERR(svn_sqlite__step_done(stmt));
+
+ while(nodes->local_relpath)
+ {
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_INSERT_NODE));
+ SVN_ERR(svn_sqlite__bindf(stmt, "sdssrs",
+ nodes->local_relpath,
+ nodes->op_depth,
+ nodes->presence,
+ nodes->repo_relpath,
+ nodes->repo_revnum,
+ nodes->local_relpath[0]
+ ? svn_relpath_dirname(nodes->local_relpath,
+ b->pool)
+ : NULL));
+
+ if (nodes->moved_to)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 7, nodes->moved_to));
+ if (nodes->moved_here)
+ SVN_ERR(svn_sqlite__bind_int(stmt, 8, 1));
+ if (nodes->props)
+ {
+ int i;
+ apr_hash_t *props = apr_hash_make(b->pool);
+ apr_array_header_t *names = svn_cstring_split(nodes->props, ",",
+ TRUE, b->pool);
+
+ for (i = 0; i < names->nelts; i++)
+ {
+ const char *name = APR_ARRAY_IDX(names, i, const char *);
+ svn_hash_sets(props, name, svn_string_create(name, b->pool));
+ }
+
+ SVN_ERR(svn_sqlite__bind_properties(stmt, 9, props, b->pool));
+ }
+ else if (nodes->repo_relpath
+ && strcmp(nodes->presence, "normal") == 0)
+ {
+ SVN_ERR(svn_sqlite__bind_text(stmt, 9, "()"));
+ }
+
+ /* File externals? */
+
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ ++nodes;
+ }
+
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ return SVN_NO_ERROR;
+}
+
+static apr_int64_t count_rows(nodes_row_t *rows)
+{
+ nodes_row_t *first = rows;
+ while(rows->local_relpath)
+ ++rows;
+ return rows - first;
+}
+
+static svn_error_t *
+base_dir_insert_remove(svn_test__sandbox_t *b,
+ const char *local_relpath,
+ svn_revnum_t revision,
+ nodes_row_t *before,
+ nodes_row_t *added)
+{
+ nodes_row_t *after;
+ const char *dir_abspath = sbox_wc_path(b, local_relpath);
+ int i;
+ apr_int64_t num_before = count_rows(before), num_added = count_rows(added);
+
+ SVN_ERR(insert_dirs(b, before));
+
+ SVN_ERR(svn_wc__db_base_add_directory(b->wc_ctx->db, dir_abspath,
+ dir_abspath,
+ local_relpath, b->repos_url,
+ "not-even-a-uuid", revision,
+ apr_hash_make(b->pool), revision,
+ 0, NULL, NULL, svn_depth_infinity,
+ NULL, FALSE, NULL, NULL, NULL, NULL,
+ b->pool));
+
+ after = apr_palloc(b->pool, sizeof(*after) * (apr_size_t)(num_before + num_added + 1));
+ for (i = 0; i < num_before; ++i)
+ after[i] = before[i];
+ for (i = 0; i < num_added; ++i)
+ after[num_before+i] = added[i];
+ after[num_before+num_added].local_relpath = NULL;
+
+ SVN_ERR(check_db_rows(b, "", after));
+
+ SVN_ERR(svn_wc__db_base_remove(b->wc_ctx->db, dir_abspath,
+ FALSE, FALSE, FALSE,
+ SVN_INVALID_REVNUM,
+ NULL, NULL, b->pool));
+ SVN_ERR(svn_wc__wq_run(b->wc_ctx->db, dir_abspath,
+ NULL, NULL, b->pool));
+
+ SVN_ERR(check_db_rows(b, "", before));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_base_dir_insert_remove(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "base_dir_insert_remove", opts, pool));
+
+ {
+ /* / normal / normal
+ A normal A normal
+ A/B normal
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal base-del A normal base-del
+ A/B normal base-del
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal base-del
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 1, "A", "normal", 1, "X" },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal not-pres A/B normal not-pres
+ A/B/C normal base-del
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "not-present", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal A/B normal normal
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B not-pres A/B normal not-pres
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "not-present", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal A/B normal base-del normal
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 1, "A", "normal", 1, "X" },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal base-del normal A/B normal base-del normal
+ A/B/C normal A/B/C normal base-del normal
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / normal / normal
+ A normal normal A normal normal
+ A/B normal not-pres normal A/B normal not-pres normal
+ A/B/C normal A/B/C normal base-del normal
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "not-present", NO_COPY_FROM },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 2, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / normal /
+ A normal normal A normal normal
+ A/B normal base-del normal A/B normal base-del normal
+ A/B/C not-pres A/B/C normal base-del not-pres
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 2, "A/B/C", "not-present", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / normal /
+ A normal normal A normal normal
+ A/B normal not-pres normal A/B normal not-pres normal
+ A/B/C not-pres A/B/C normal base-del not-pres
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "not-present", NO_COPY_FROM },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 2, "A/B/C", "not-present", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / norm /
+ A norm norm A norm norm
+ A/B norm not-p norm A/B norm not-p norm
+ A/B/C norm A/B/C norm b-del norm
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 1, "A", "normal", 1, "X" },
+ { 1, "A/B", "not-present", NO_COPY_FROM },
+ { 2, "A/B", "normal", 1, "Y" },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C", 2, before, added));
+ }
+ {
+ /* / norm / norm
+ A norm A norm
+ A/B norm A/B norm
+ A/B/C norm - - norm A/B/C norm - - norm
+ A/B/C/D norm - - b-del
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C/D", "normal", 2, "A/B/C/D" },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C/D", 2, before, added));
+ }
+ {
+ /* / norm / norm
+ A norm A norm
+ A/B norm A/B norm
+ A/B/C norm - - norm A/B/C norm - - norm
+ A/B/C/D norm A/B/C/D norm - - b-del norm
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 4, "A/B/C/D", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t added[] = {
+ { 0, "A/B/C/D", "normal", 2, "A/B/C/D" },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(base_dir_insert_remove(&b, "A/B/C/D", 2, before, added));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_db_make_copy(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "make_copy", opts, pool));
+
+ {
+ /* / norm -
+ A norm -
+ A/B norm - norm
+ A/B/C norm - base-del norm
+ A/F norm - norm
+ A/F/G norm - norm
+ A/F/H norm - not-pres
+ A/F/E norm - base-del
+ A/X norm -
+ A/X/Y incomplete -
+ */
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 0, "A/F", "normal", 2, "A/F" },
+ { 0, "A/F/G", "normal", 2, "A/F/G" },
+ { 0, "A/F/H", "normal", 2, "A/F/H" },
+ { 0, "A/F/E", "normal", 2, "A/F/E" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 2, "A/F", "normal", 1, "S2" },
+ { 2, "A/F/G", "normal", 1, "S2/G" },
+ { 2, "A/F/H", "not-present", 1, "S2/H" },
+ { 2, "A/F/E", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ /* / norm -
+ A norm norm
+ A/B norm norm norm
+ A/B/C norm norm base-del norm
+ A/F norm norm norm
+ A/F/G norm norm norm
+ A/F/H norm norm not-pres
+ A/F/E norm norm base-del
+ A/X norm norm
+ A/X/Y incomplete incomplete
+ */
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 0, "A/F", "normal", 2, "A/F" },
+ { 0, "A/F/G", "normal", 2, "A/F/G" },
+ { 0, "A/F/H", "normal", 2, "A/F/H" },
+ { 0, "A/F/E", "normal", 2, "A/F/E" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 1, "A", "normal", 2, "A" },
+ { 1, "A/B", "normal", 2, "A/B" },
+ { 1, "A/B/C", "normal", 2, "A/B/C" },
+ { 1, "A/F", "normal", 2, "A/F" },
+ { 1, "A/F/G", "normal", 2, "A/F/G" },
+ { 1, "A/F/H", "normal", 2, "A/F/H" },
+ { 1, "A/F/E", "normal", 2, "A/F/E" },
+ { 1, "A/X", "normal", 2, "A/X" },
+ { 1, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/F", "normal", 1, "S2" },
+ { 2, "A/F/E", "base-deleted", NO_COPY_FROM },
+ { 2, "A/F/G", "normal", 1, "S2/G" },
+ { 2, "A/F/H", "not-present", 1, "S2/H" },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+
+ SVN_ERR(insert_dirs(&b, before));
+ SVN_ERR(svn_wc__db_op_make_copy(b.wc_ctx->db, sbox_wc_path(&b, "A"),
+ NULL, NULL, pool));
+
+ SVN_ERR(check_db_rows(&b, "", after));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_wc_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "wc_move", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/B/C-move"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "A", "normal", 1, "A" },
+ { 0, "A/B", "normal", 1, "A/B" },
+ { 0, "A/B/C", "normal", 1, "A/B/C"},
+ { 3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/B/C-move" },
+ { 3, "A/B/C-move", "normal", 1, "A/B/C", MOVED_HERE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B-move"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "A", "normal", 1, "A" },
+ { 0, "A/B", "normal", 1, "A/B"},
+ { 0, "A/B/C", "normal", 1, "A/B/C"},
+ { 2, "A/B", "base-deleted", NO_COPY_FROM, "A/B-move" },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ { 2, "A/B-move", "normal", 1, "A/B", MOVED_HERE },
+ { 2, "A/B-move/C", "normal", 1, "A/B/C", MOVED_HERE },
+ { 3, "A/B-move/C", "base-deleted", NO_COPY_FROM, "A/B-move/C-move" },
+ { 3, "A/B-move/C-move", "normal", 1, "A/B/C", MOVED_HERE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_mixed_rev_copy(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "mixed_rev_copy", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "X", "normal", 1, "A" },
+ { 1, "X/B", "not-present", 2, "A/B" },
+ { 2, "X/B", "normal", 2, "A/B" },
+ { 2, "X/B/C", "not-present", 3, "A/B/C" },
+ { 3, "X/B/C", "normal", 3, "A/B/C" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A/B", "X/Y"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "X", "normal", 1, "A" },
+ { 1, "X/B", "not-present", 2, "A/B" },
+ { 2, "X/B", "normal", 2, "A/B" },
+ { 2, "X/B/C", "not-present", 3, "A/B/C" },
+ { 3, "X/B/C", "normal", 3, "A/B/C" },
+ { 2, "X/Y", "normal", 2, "A/B" },
+ { 2, "X/Y/C", "not-present", 3, "A/B/C" },
+ { 3, "X/Y/C", "normal", 3, "A/B/C" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "X/B/C"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "X", "normal", 1, "A" },
+ { 1, "X/B", "not-present", 2, "A/B" },
+ { 2, "X/B", "normal", 2, "A/B" },
+ { 2, "X/B/C", "not-present", 3, "A/B/C" },
+ { 2, "X/Y", "normal", 2, "A/B" },
+ { 2, "X/Y/C", "not-present", 3, "A/B/C" },
+ { 3, "X/Y/C", "normal", 3, "A/B/C" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "X"));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 0));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 0, "" },
+ { 0, "A", "normal", 1, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "not-present", 0, "A/B/C" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ {
+ nodes_row_t rows[] = {
+ { 1, "X", "normal", 1, "A" },
+ { 1, "X/B", "not-present", 2, "A/B" },
+ { 2, "X/B", "normal", 2, "A/B" },
+ { 2, "X/B/C", "not-present", 0, "A/B/C" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_delete_of_replace(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "delete_of_replace", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F/K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/G/K"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ SVN_ERR(sbox_wc_move(&b, "X/B/C/F", "X/B/C/H"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_copy(&b, "X/B", "A/B"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 0, "A/B/C/F", "normal", 2, "A/B/C/F" },
+ { 0, "A/B/C/F/K", "normal", 2, "A/B/C/F/K" },
+ { 0, "A/B/C/G", "normal", 2, "A/B/C/G" },
+ { 0, "A/B/C/G/K", "normal", 2, "A/B/C/G/K" },
+ { 2, "A/B", "normal", 2, "X/B" },
+ { 2, "A/B/C", "normal", 2, "X/B/C" },
+ { 2, "A/B/C/F", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/F/K", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/G", "normal", 2, "X/B/C/G" },
+ { 2, "A/B/C/G/K", "normal", 2, "X/B/C/G/K" },
+ { 2, "A/B/C/H", "normal", 2, "X/B/C/H" },
+ { 2, "A/B/C/H/K", "normal", 2, "X/B/C/H/K" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 0, "A/B/C/F", "normal", 2, "A/B/C/F" },
+ { 0, "A/B/C/F/K", "normal", 2, "A/B/C/F/K" },
+ { 0, "A/B/C/G", "normal", 2, "A/B/C/G" },
+ { 0, "A/B/C/G/K", "normal", 2, "A/B/C/G/K" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/F", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/F/K", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/G", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/G/K", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_del_replace_not_present(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "del_replace_not_present", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/Y"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/Z"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/B/W"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_update(&b, "A/B/X", 0));
+ SVN_ERR(sbox_wc_update(&b, "A/B/Y", 0));
+ SVN_ERR(sbox_wc_update(&b, "X/B/W", 0));
+ SVN_ERR(sbox_wc_update(&b, "X/B/Y", 0));
+ SVN_ERR(sbox_wc_update(&b, "X/B/Z", 0));
+
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/X", "not-present", 0, "A/B/X" },
+ { 0, "A/B/Y", "not-present", 0, "A/B/Y" },
+ { 0, "A/B/Z", "normal", 2, "A/B/Z" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/Z", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "X", "A"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/X", "not-present", 0, "A/B/X" },
+ { 0, "A/B/Y", "not-present", 0, "A/B/Y" },
+ { 0, "A/B/Z", "normal", 2, "A/B/Z" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 1, "A/B/W", "not-present", 0, "X/B/W" },
+ { 1, "A/B/X", "normal", 2, "X/B/X" },
+ { 1, "A/B/Y", "not-present", 0, "X/B/Y" },
+ { 1, "A/B/Z", "not-present", 0, "X/B/Z" },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/X", "not-present", 0, "A/B/X" },
+ { 0, "A/B/Y", "not-present", 0, "A/B/Y" },
+ { 0, "A/B/Z", "normal", 2, "A/B/Z" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/Z", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+typedef struct actual_row_t {
+ const char *local_relpath;
+ const char *changelist;
+} actual_row_t;
+
+static svn_error_t *
+insert_actual(svn_test__sandbox_t *b,
+ actual_row_t *actual)
+{
+ svn_sqlite__db_t *sdb;
+ svn_sqlite__stmt_t *stmt;
+
+ if (!actual)
+ return SVN_NO_ERROR;
+
+ SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool));
+
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_DELETE_ACTUAL));
+ SVN_ERR(svn_sqlite__step_done(stmt));
+
+ while(actual->local_relpath)
+ {
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_INSERT_ACTUAL));
+ SVN_ERR(svn_sqlite__bindf(stmt, "sss",
+ actual->local_relpath,
+ actual->local_relpath[0]
+ ? svn_relpath_dirname(actual->local_relpath,
+ b->pool)
+ : NULL,
+ actual->changelist));
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ if (actual->changelist)
+ {
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb,
+ STMT_ENSURE_EMPTY_PRISTINE));
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_NODES_SET_FILE));
+ SVN_ERR(svn_sqlite__bindf(stmt, "s", actual->local_relpath));
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ }
+ ++actual;
+ }
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+check_db_actual(svn_test__sandbox_t* b, actual_row_t *rows)
+{
+ svn_sqlite__db_t *sdb;
+ svn_sqlite__stmt_t *stmt;
+ svn_boolean_t have_row;
+ apr_hash_t *path_hash = apr_hash_make(b->pool);
+
+ if (!rows)
+ return SVN_NO_ERROR;
+
+ while(rows->local_relpath)
+ {
+ apr_hash_set(path_hash, rows->local_relpath, APR_HASH_KEY_STRING,
+ (void*)1);
+ ++rows;
+ }
+
+ SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool));
+
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_ALL_ACTUAL));
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ while (have_row)
+ {
+ const char *local_relpath = svn_sqlite__column_text(stmt, 0, b->pool);
+ if (!apr_hash_get(path_hash, local_relpath, APR_HASH_KEY_STRING))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__reset(stmt),
+ "actual '%s' unexpected", local_relpath);
+ apr_hash_set(path_hash, local_relpath, APR_HASH_KEY_STRING, NULL);
+ SVN_ERR(svn_sqlite__step(&have_row, stmt));
+ }
+
+ if (apr_hash_count(path_hash))
+ {
+ const char *local_relpath
+ = apr_hash_this_key(apr_hash_first(b->pool, path_hash));
+ return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__reset(stmt),
+ "actual '%s' expected", local_relpath);
+ }
+
+ SVN_ERR(svn_sqlite__reset(stmt));
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+revert(svn_test__sandbox_t *b,
+ const char *local_relpath,
+ svn_depth_t depth,
+ nodes_row_t *before_nodes,
+ nodes_row_t *after_nodes,
+ actual_row_t *before_actual,
+ actual_row_t *after_actual)
+{
+ const char *local_abspath = sbox_wc_path(b, local_relpath);
+ svn_error_t *err;
+
+ if (!before_actual)
+ {
+ actual_row_t actual[] = { { 0 } };
+ SVN_ERR(insert_actual(b, actual));
+ }
+
+ SVN_ERR(insert_dirs(b, before_nodes));
+ SVN_ERR(insert_actual(b, before_actual));
+ SVN_ERR(check_db_rows(b, "", before_nodes));
+ SVN_ERR(check_db_actual(b, before_actual));
+ err = svn_wc__db_op_revert(b->wc_ctx->db, local_abspath, depth, FALSE,
+ b->pool, b->pool);
+ if (err)
+ {
+ /* If db_op_revert returns an error the DB should be unchanged so
+ verify and return a verification error if a change is detected
+ or the revert error if unchanged. */
+ err = svn_error_compose_create(check_db_rows(b, "", before_nodes), err);
+ err = svn_error_compose_create(check_db_actual(b, before_actual), err);
+ return err;
+ }
+ SVN_ERR(check_db_rows(b, "", after_nodes));
+ SVN_ERR(check_db_actual(b, after_actual));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_op_revert(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "test_op_revert", opts, pool));
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0 },
+ };
+ actual_row_t before_actual1[] = {
+ { "A", NULL },
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual1[] = {
+ { "A", NULL },
+ { 0 }
+ };
+ actual_row_t before_actual2[] = {
+ { "A/B", NULL },
+ { "A/B/C", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual2[] = {
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t before_actual3[] = {
+ { "", NULL },
+ { "A", NULL },
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual3[] = {
+ { "", NULL },
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t before_actual4[] = {
+ { "", NULL },
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual4[] = {
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t common_actual5[] = {
+ { "A/B", NULL },
+ { "A/B/C", NULL },
+ { 0 }
+ };
+ actual_row_t common_actual6[] = {
+ { "A/B", NULL },
+ { "A/B/C", NULL },
+ { "A/B/C/D", NULL },
+ { 0 }
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, before_actual1, after_actual1));
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ before, before, before_actual2, after_actual2));
+ SVN_ERR(revert(&b, "A", svn_depth_empty,
+ before, before, before_actual3, after_actual3));
+ SVN_ERR(revert(&b, "", svn_depth_empty,
+ before, before, before_actual4, after_actual4));
+ err = revert(&b, "A/B", svn_depth_empty,
+ before, before, common_actual5, common_actual5);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ err = revert(&b, "A/B/C", svn_depth_empty,
+ before, before, common_actual6, common_actual6);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ }
+
+ {
+ nodes_row_t common[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "P", "normal", 4, "P" },
+ { 0, "P/Q", "normal", 4, "P/Q" },
+ { 1, "P", "normal", 3, "V" },
+ { 1, "P/Q", "normal", 3, "V/Q" },
+ { 2, "A/B", "normal", 2, "X/B" },
+ { 2, "A/B/C", "normal", 2, "X/B/C" },
+ { 2, "A/B/C/D", "normal", 2, "X/B/C/D" },
+ { 1, "X", "normal", NO_COPY_FROM },
+ { 2, "X/Y", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ actual_row_t common_actual[] = {
+ { "A/B/C/D", NULL },
+ { "A/B/C", NULL },
+ { "A/B", NULL },
+ { "P", NULL },
+ { "X", NULL },
+ { 0 }
+ };
+ actual_row_t actual1[] = {
+ { "A/B/C", NULL },
+ { "A/B", NULL },
+ { "P", NULL },
+ { "X", NULL },
+ { 0 }
+ };
+ actual_row_t actual2[] = {
+ { "A/B/C/D", NULL },
+ { "A/B", NULL },
+ { "P", NULL },
+ { "X", NULL },
+ { 0 }
+ };
+
+ SVN_ERR(revert(&b, "A/B/C/D", svn_depth_empty,
+ common, common, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B/C/D", svn_depth_empty,
+ common, common, common_actual, actual1));
+
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ common, common, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ common, common, common_actual, actual2));
+
+ err = revert(&b, "A/B", svn_depth_empty,
+ common, common, NULL, NULL);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ err = revert(&b, "A/B", svn_depth_empty,
+ common, common, common_actual, common_actual);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+
+ err = revert(&b, "P", svn_depth_empty,
+ common, common, NULL, NULL);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ err = revert(&b, "P", svn_depth_empty,
+ common, common, common_actual, common_actual);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+
+ err = revert(&b, "X", svn_depth_empty,
+ common, common, NULL, NULL);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ err = revert(&b, "X", svn_depth_empty,
+ common, common, common_actual, common_actual);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 3, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 0 },
+ };
+ actual_row_t before_actual[] = {
+ { "A/B", NULL },
+ { "A/B/C", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual[] = {
+ { "A/B", NULL },
+ { 0 }
+ };
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ before, after, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ before, after, before_actual, after_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 0 },
+ };
+ actual_row_t before_actual[] = {
+ { "A", NULL },
+ { "A/B", NULL },
+ { 0 }
+ };
+ actual_row_t after_actual[] = {
+ { "A", NULL },
+ { 0 }
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, before_actual, after_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 0 },
+ };
+ actual_row_t before_actual[] = {
+ { "A", NULL },
+ { "A/B", NULL },
+ { 0 },
+ };
+ actual_row_t after_actual[] = {
+ { "A", NULL },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, before_actual, after_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 3, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after1[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after2[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ before, after1, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ after1, after2, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 4, "A/B/C/D" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 4, "A/B/C/D" },
+ { 3, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after, NULL, NULL));
+ }
+
+ {
+ nodes_row_t common[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 4, "A/B/C/D" },
+ { 1, "A", "normal", 2, "X/Y" },
+ { 1, "A/B", "normal", 2, "X/Y/B" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ err = revert(&b, "A", svn_depth_empty,
+ common, common, NULL, NULL);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_INVALID_OPERATION_DEPTH);
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after1[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 0 },
+ };
+ nodes_row_t after2[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 0, "A/B/C", "normal", 4, "A/B/C" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "", svn_depth_infinity,
+ before, after1, NULL, NULL));
+ SVN_ERR(revert(&b, "A", svn_depth_infinity,
+ before, after1, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_infinity,
+ before, after2, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B/C", svn_depth_empty,
+ before, before, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 1, "A/B/C", "normal", 2, "X/B/C" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after1[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 1, "A/B/C", "normal", 2, "X/B/C" },
+ { 3, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after2[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/B", "normal", 4, "A/B" },
+ { 1, "A", "normal", 2, "X" },
+ { 1, "A/B", "normal", 2, "X/B" },
+ { 1, "A/B/C", "normal", 2, "X/B/C" },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/B", svn_depth_empty,
+ before, after1, NULL, NULL));
+ SVN_ERR(revert(&b, "A/B", svn_depth_infinity,
+ before, after2, NULL, NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_op_revert_changelist(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "test_op_revert_changelist", opts, pool));
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 2, "A/f", "normal", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0 },
+ };
+ actual_row_t before_actual[] = {
+ { "A/f", "qq" },
+ { 0 },
+ };
+ actual_row_t after_actual[] = {
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/f", svn_depth_empty,
+ before, after, before_actual, after_actual));
+ SVN_ERR(revert(&b, "A/f", svn_depth_infinity,
+ before, after, before_actual, after_actual));
+ SVN_ERR(revert(&b, "", svn_depth_infinity,
+ before, after, before_actual, after_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/f", "normal", 4, "A/f" },
+ { 2, "A/f", "base-deleted", NO_COPY_FROM },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/f", "normal", 4, "A/f" },
+ { 0 },
+ };
+ actual_row_t common_actual[] = {
+ { "A/f", "qq" },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/f", svn_depth_empty,
+ before, after, common_actual, common_actual));
+ SVN_ERR(revert(&b, "A/f", svn_depth_infinity,
+ before, after, common_actual, common_actual));
+ SVN_ERR(revert(&b, "", svn_depth_infinity,
+ before, after, common_actual, common_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/f", "normal", 4, "A/f" },
+ { 0 },
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 4, "" },
+ { 0, "A", "normal", 4, "A" },
+ { 0, "A/f", "normal", 4, "A/f" },
+ { 0 },
+ };
+ actual_row_t common_actual[] = {
+ { "A/f", "qq" },
+ { 0 },
+ };
+ SVN_ERR(revert(&b, "A/f", svn_depth_empty,
+ before, after, common_actual, common_actual));
+ SVN_ERR(revert(&b, "A/f", svn_depth_infinity,
+ before, after, common_actual, common_actual));
+ SVN_ERR(revert(&b, "", svn_depth_infinity,
+ before, after, common_actual, common_actual));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that the (const char *) keys of HASH are exactly the
+ * EXPECTED_NUM strings in EXPECTED_STRINGS. Return an error if not. */
+static svn_error_t *
+check_hash_keys(apr_hash_t *hash,
+ int expected_num,
+ const char **expected_strings,
+ apr_pool_t *scratch_pool)
+{
+ svn_error_t *err = SVN_NO_ERROR;
+ int i;
+ apr_hash_index_t *hi;
+
+ for (i = 0; i < expected_num; i++)
+ {
+ const char *name = expected_strings[i];
+
+ if (apr_hash_get(hash, name, APR_HASH_KEY_STRING))
+ apr_hash_set(hash, name, APR_HASH_KEY_STRING, NULL);
+ else
+ err = svn_error_compose_create(
+ err, svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ _("Expected, not found: '%s'"), name));
+ }
+ for (hi = apr_hash_first(scratch_pool, hash); hi;
+ hi = apr_hash_next(hi))
+ {
+ const char *name = apr_hash_this_key(hi);
+ err = svn_error_compose_create(
+ err, svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ _("Found, not expected: '%s'"), name));
+ }
+ return err;
+}
+
+/* Check that the (const char *) keys of APR_HASH are exactly the
+ * strings in (const char *[]) C_ARRAY. Return an error if not. */
+#define CHECK_HASH(apr_hash, c_array, scratch_pool) \
+ check_hash_keys(apr_hash, sizeof(c_array) / sizeof(c_array[0]), \
+ c_array, scratch_pool)
+
+/* Check that the basenames of the (const char *) paths in ARRAY are exactly
+ * the EXPECTED_NUM strings in EXPECTED_STRINGS. Return an error if not. */
+static svn_error_t *
+check_array_strings(const apr_array_header_t *array,
+ int expected_num,
+ const char **expected_strings,
+ apr_pool_t *scratch_pool)
+{
+ int i;
+ apr_hash_t *hash = apr_hash_make(scratch_pool);
+
+ for (i = 0; i < array->nelts; i++)
+ {
+ const char *path = APR_ARRAY_IDX(array, i, const char *);
+
+ apr_hash_set(hash, svn_path_basename(path, scratch_pool),
+ APR_HASH_KEY_STRING, "");
+ }
+
+ return check_hash_keys(hash, expected_num, expected_strings, scratch_pool);
+}
+
+/* Check that the basenames of the (const char *) paths in APR_ARRAY are
+ * exactly the strings in (const char *[]) C_ARRAY. Return an error if not. */
+#define CHECK_ARRAY(apr_array, c_array, scratch_pool) \
+ check_array_strings(apr_array, sizeof(c_array) / sizeof(c_array[0]), \
+ c_array, scratch_pool)
+
+
+/* The purpose of this test is to check whether a child of a deleted-and-
+ * replaced directory is reported by various "list the children" APIs. */
+static svn_error_t *
+test_children_of_replaced_dir(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ const apr_array_header_t *children_array;
+ apr_hash_t *children_hash, *conflicts_hash;
+ const char *A_abspath;
+ const char *working_children_exc_hidden[] = { "G", "H", "I", "J", "K", "L" };
+ const char *working_children_inc_hidden[] = { "G", "H", "I", "J", "K", "L" };
+ const char *all_children_inc_hidden[] = { "F", "G", "H", "I", "J", "K", "L" };
+
+ /*
+ * F - base only
+ * G - base, working (from copy of X; schedule-delete)
+ * H - base, working (from copy of X)
+ * I - working only (from copy of X)
+ * J - working only (schedule-add)
+ * K - working only (from copy of X; schedule-delete)
+ * L - base, working (not in copy; schedule-add)
+ */
+
+ SVN_ERR(svn_test__sandbox_create(&b, "children_of_replaced_dir", opts, pool));
+ A_abspath = svn_dirent_join(b.wc_abspath, "A", pool);
+
+ /* Set up the base state as revision 1. */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/F"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/H"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/L"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/H"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/I"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/K"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ /* Replace A with a copy of X. */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_copy(&b, "X", "A"));
+
+ /* Make other local mods. */
+ SVN_ERR(sbox_wc_delete(&b, "A/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/J"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/L"));
+
+ /* Test several variants of "list the children of 'A'". */
+
+ SVN_ERR(svn_wc__db_read_children(&children_array, b.wc_ctx->db, A_abspath,
+ pool, pool));
+ SVN_ERR(CHECK_ARRAY(children_array, all_children_inc_hidden, pool));
+
+ SVN_ERR(svn_wc__db_read_children_of_working_node(
+ &children_array, b.wc_ctx->db, A_abspath, pool, pool));
+ SVN_ERR(CHECK_ARRAY(children_array, working_children_inc_hidden, pool));
+
+ SVN_ERR(svn_wc__db_read_children(&children_array, b.wc_ctx->db, A_abspath,
+ pool, pool));
+ SVN_ERR(CHECK_ARRAY(children_array, all_children_inc_hidden, pool));
+
+ /* I am not testing svn_wc__node_get_children(show_hidden=FALSE) because
+ * I'm not sure what result we should expect if a certain child path is a
+ * child of a deleted-and-replaced dir (so should be included) and is also
+ * a 'hidden' child of the working dir (so should be excluded). */
+
+ SVN_ERR(svn_wc__node_get_children_of_working_node(
+ &children_array, b.wc_ctx, A_abspath,
+ pool, pool));
+ SVN_ERR(CHECK_ARRAY(children_array, working_children_exc_hidden, pool));
+
+ SVN_ERR(svn_wc__db_read_children_info(&children_hash, &conflicts_hash,
+ b.wc_ctx->db, A_abspath,
+ FALSE /* base_tree_only */,
+ pool, pool));
+ SVN_ERR(CHECK_HASH(children_hash, all_children_inc_hidden, pool));
+
+ /* We don't yet have a svn_wc__db_read_children_info2() to test. */
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+do_delete(svn_test__sandbox_t *b,
+ const char *local_relpath,
+ nodes_row_t *before,
+ nodes_row_t *after,
+ actual_row_t *actual_before,
+ actual_row_t *actual_after)
+{
+ const char *local_abspath = sbox_wc_path(b, local_relpath);
+
+ SVN_ERR(insert_dirs(b, before));
+ SVN_ERR(insert_actual(b, actual_before));
+ SVN_ERR(check_db_rows(b, "", before));
+ SVN_ERR(check_db_actual(b, actual_before));
+ SVN_ERR(svn_wc__db_op_delete(b->wc_ctx->db, local_abspath, NULL,
+ TRUE /* delete_dir_externals */,
+ NULL /* conflict */, NULL /* work_item */,
+ NULL, NULL /* cancellation */,
+ NULL, NULL /* notification */,
+ b->pool));
+ SVN_ERR(check_db_rows(b, "", after));
+ SVN_ERR(check_db_actual(b, actual_after));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_op_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ SVN_ERR(svn_test__sandbox_create(&b, "op_delete", opts, pool));
+
+ {
+ nodes_row_t before1[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0 }
+ };
+ nodes_row_t before2[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A", before1, after, NULL, NULL));
+ SVN_ERR(do_delete(&b, "A", before2, after, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 2, "A/B", "normal", 3, "X/B" },
+ { 2, "A/B/C", "normal", 3, "X/B/C" },
+ { 0 }
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A/B", before, after, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 1, "A", "normal", 3, "X" },
+ { 1, "A/B", "normal", 3, "X/B" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/D", "normal", 3, "X/B/D" },
+ { 0 }
+ };
+ nodes_row_t after1[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 1, "A", "normal", 3, "X" },
+ { 1, "A/B", "normal", 3, "X/B" },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/D", "normal", 3, "X/B/D" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t after2[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A/B", before, after1, NULL, NULL));
+ SVN_ERR(do_delete(&b, "A", before, after2, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 3, "A/B/C", "normal", 3, "X" },
+ { 3, "A/B/C/D", "normal", 3, "X/D" },
+ { 4, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A", before, after, NULL, NULL));
+ }
+
+ {
+ nodes_row_t state1[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 5, "A/B/C" },
+ { 4, "A/B/C/X", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t state2[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 5, "A/B/C" },
+ { 4, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 4, "A/B/C/X", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t state3[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 5, "A/B/C" },
+ { 2, "A/B", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 2, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ nodes_row_t state4[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 5, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 5, "A/B/C" },
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C", "base-deleted", NO_COPY_FROM },
+ { 1, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A/B/C/D", state1, state2, NULL, NULL));
+ SVN_ERR(do_delete(&b, "A/B", state2, state3, NULL, NULL));
+ SVN_ERR(do_delete(&b, "A", state3, state4, NULL, NULL));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "" },
+ { 0, "A/f", "normal", 5, "" },
+ { 2, "A/B", "normal", 5, "" },
+ { 0 }
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "" },
+ { 0, "A/f", "normal", 5, "" },
+ { 1, "A", "base-deleted", NO_COPY_FROM},
+ { 1, "A/f", "base-deleted", NO_COPY_FROM},
+ { 0 }
+ };
+ actual_row_t before_actual[] = {
+ { "A", NULL },
+ { "A/f", "qq" },
+ { "A/B", NULL },
+ { "A/B/C", NULL },
+ { 0 },
+ };
+ actual_row_t after_actual[] = {
+ { "A/f", "qq" },
+ { 0 },
+ };
+ SVN_ERR(do_delete(&b, "A", before, after, before_actual, after_actual));
+ }
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/f", "normal", 5, "A/B/f" },
+ { 0, "A/B/g", "normal", 5, "A/B/g" },
+ { 1, "A", "normal", 4, "A" },
+ { 1, "A/B", "normal", 4, "A/B" },
+ { 1, "A/B/f", "normal", 4, "A/B/f" },
+ { 1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ { 0 }
+ };
+ nodes_row_t after[] = {
+ { 0, "", "normal", 5, "" },
+ { 0, "A", "normal", 5, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/f", "normal", 5, "A/B/f" },
+ { 0, "A/B/g", "normal", 5, "A/B/g" },
+ { 1, "A", "normal", 4, "A" },
+ { 1, "A/B", "normal", 4, "A/B" },
+ { 1, "A/B/f", "normal", 4, "A/B/f" },
+ { 1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ { 2, "A/B", "base-deleted", NO_COPY_FROM},
+ { 2, "A/B/f", "base-deleted", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(do_delete(&b, "A/B", before, after, NULL, NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The purpose of this test is to check what happens if a deleted child is
+ replaced by the same nodes. */
+static svn_error_t *
+test_child_replace_with_same_origin(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "child_replace_with_same", opts, pool));
+
+ /* Set up the base state as revision 1. */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+
+ {
+ nodes_row_t rows[] = {
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "normal", 1, "A/B"},
+ {1, "X/B/C", "normal", 1, "A/B/C"},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "X/B"));
+ {
+ nodes_row_t rows[] = {
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "normal", 1, "A/B"},
+ {1, "X/B/C", "normal", 1, "A/B/C"},
+
+ {2, "X/B", "base-deleted", NO_COPY_FROM },
+ {2, "X/B/C", "base-deleted", NO_COPY_FROM },
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A/B", "X/B"));
+ {
+ /* The revisions match what was here, so for an optimal commit
+ this should have exactly the same behavior as reverting X/B.
+
+ Another copy would be fine, as that is really what the user
+ did. */
+ nodes_row_t rows[] = {
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "normal", 1, "A/B"},
+ {1, "X/B/C", "normal", 1, "A/B/C"},
+
+ /* We either expect this */
+ {2, "X/B", "normal", 1, "A/B" },
+ {2, "X/B/C", "normal", 1, "A/B/C" },
+
+ /* Or we expect that op_depth 2 does not exist */
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "X", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* The purpose of this test is to check what happens below a shadowed update,
+ in a few scenarios */
+static svn_error_t *
+test_shadowed_update(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "shadowed_update", opts, pool));
+
+ /* Set up the base state as revision 1. */
+ SVN_ERR(sbox_file_write(&b, "iota", "This is iota"));
+ SVN_ERR(sbox_wc_add(&b, "iota"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* And create two trees in r2 */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "K/L"));
+ SVN_ERR(sbox_wc_mkdir(&b, "K/L/M"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* And change something in r3 */
+ SVN_ERR(sbox_file_write(&b, "iota", "This is a new iota"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* And delete C & M */
+ SVN_ERR(sbox_wc_delete(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_delete(&b, "K/L/M"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* And now create the shadowed situation */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_tmp"));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A_tmp", "A"));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "K/L"));
+ SVN_ERR(sbox_wc_mkdir(&b, "K/L/M"));
+
+ /* Verify situation before update */
+ {
+ nodes_row_t rows[] = {
+ {0, "", "normal", 1, ""},
+ {0, "iota", "normal", 1, "iota"},
+
+ {1, "A", "normal", 2, "A"},
+ {1, "A/B", "normal", 2, "A/B"},
+ {1, "A/B/C", "normal", 2, "A/B/C"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {2, "K/L", "normal", NO_COPY_FROM},
+ {3, "K/L/M", "normal", NO_COPY_FROM},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ /* And now bring in A and K below the local information */
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+
+ {
+ nodes_row_t rows[] = {
+
+ {0, "", "normal", 3, ""},
+ {0, "iota", "normal", 3, "iota"},
+
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+
+ {1, "A", "normal", 2, "A"},
+ {1, "A/B", "normal", 2, "A/B"},
+ {1, "A/B/C", "normal", 2, "A/B/C"},
+
+ {0, "K", "normal", 3, "K"},
+ {0, "K/L", "normal", 3, "K/L"},
+ {0, "K/L/M", "normal", 3, "K/L/M"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {1, "K/L", "base-deleted", NO_COPY_FROM},
+ {1, "K/L/M", "base-deleted", NO_COPY_FROM},
+
+ {2, "K/L", "normal", NO_COPY_FROM},
+ {3, "K/L/M", "normal", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ /* Update again to remove C and M */
+ SVN_ERR(sbox_wc_resolved(&b, "A"));
+ SVN_ERR(sbox_wc_resolved(&b, "K"));
+ SVN_ERR(sbox_wc_update(&b, "", 4));
+
+ {
+ nodes_row_t rows[] = {
+
+ {0, "", "normal", 4, ""},
+ {0, "iota", "normal", 4, "iota"},
+
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 4, "A/B"},
+
+ {1, "A", "normal", 2, "A"},
+ {1, "A/B", "normal", 2, "A/B"},
+ {1, "A/B/C", "normal", 2, "A/B/C"},
+
+ {0, "K", "normal", 4, "K"},
+ {0, "K/L", "normal", 4, "K/L"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {1, "K/L", "base-deleted", NO_COPY_FROM},
+
+ {2, "K/L", "normal", NO_COPY_FROM},
+ {3, "K/L/M", "normal", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ /* Update again to bring C and M back */
+ SVN_ERR(sbox_wc_resolved(&b, "A"));
+ SVN_ERR(sbox_wc_resolved(&b, "K"));
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+
+ SVN_ERR(sbox_wc_delete(&b, "K/L/M"));
+ {
+ nodes_row_t rows[] = {
+ {0, "", "normal", 3, ""},
+ {0, "iota", "normal", 3, "iota"},
+
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+
+ {1, "A", "normal", 2, "A"},
+ {1, "A/B", "normal", 2, "A/B"},
+ {1, "A/B/C", "normal", 2, "A/B/C"},
+
+ {0, "K", "normal", 3, "K"},
+ {0, "K/L", "normal", 3, "K/L"},
+ {0, "K/L/M", "normal", 3, "K/L/M"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {1, "K/L", "base-deleted", NO_COPY_FROM},
+ {1, "K/L/M", "base-deleted", NO_COPY_FROM},
+
+ {2, "K/L", "normal", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ /* Resolve conflict on K and go back to r1 */
+ SVN_ERR(sbox_wc_revert(&b, "K", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "K/L"));
+
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+
+ {
+ nodes_row_t rows[] = {
+
+ {0, "K", "normal", 3, "K"},
+ {0, "K/L", "normal", 3, "K/L"},
+ {0, "K/L/M", "normal", 3, "K/L/M"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {1, "K/L", "base-deleted", NO_COPY_FROM},
+ {1, "K/L/M", "base-deleted", NO_COPY_FROM},
+
+ {2, "K/L", "normal", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "K", rows));
+ }
+
+ /* Update the shadowed K/L/M to r4 where they do not exit */
+ SVN_ERR(sbox_wc_resolved(&b, "K"));
+ SVN_ERR(sbox_wc_update(&b, "K/L/M", 4));
+ SVN_ERR(sbox_wc_resolved(&b, "A"));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 4));
+
+ {
+ nodes_row_t rows[] = {
+
+ {0, "", "normal", 3, ""},
+ {0, "iota", "normal", 3, "iota"},
+
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "not-present", 4, "A/B/C"},
+
+ {1, "A", "normal", 2, "A"},
+ {1, "A/B", "normal", 2, "A/B"},
+ {1, "A/B/C", "normal", 2, "A/B/C"},
+
+ {0, "K", "normal", 3, "K"},
+ {0, "K/L", "normal", 3, "K/L"},
+ {0, "K/L/M", "not-present", 4, "K/L/M"},
+
+ {1, "K", "normal", NO_COPY_FROM},
+ {1, "K/L", "base-deleted", NO_COPY_FROM},
+
+ {2, "K/L", "normal", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+/* The purpose of this test is to check what happens below a shadowed update,
+ in a few scenarios */
+static svn_error_t *
+test_copy_of_deleted(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "copy_of_deleted", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ /* Recreate the test scenario from copy_tests.py copy_wc_url_with_server_excluded */
+
+ /* Delete A/B */
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+
+ /* A/no not-present but in HEAD */
+ SVN_ERR(sbox_wc_copy(&b, "A/mu", "A/no"));
+ SVN_ERR(sbox_wc_commit(&b, "A/no"));
+ SVN_ERR(sbox_wc_update(&b, "A/no", 1));
+
+ /* A/mu not-present and not in HEAD */
+ SVN_ERR(sbox_wc_delete(&b, "A/mu"));
+ SVN_ERR(sbox_wc_commit(&b, "A/mu"));
+
+ /* A/D excluded */
+ SVN_ERR(sbox_wc_exclude(&b, "A/D"));
+
+ /* This should have created this structure */
+ {
+ nodes_row_t rows[] = {
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/E", "normal", 1, "A/B/E"},
+ {0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {0, "A/B/E/beta", "normal", 1, "A/B/E/beta"},
+ {0, "A/B/F", "normal", 1, "A/B/F"},
+ {0, "A/B/lambda", "normal", 1, "A/B/lambda"},
+ {0, "A/C", "normal", 1, "A/C"},
+ {0, "A/D", "excluded", 1, "A/D"},
+ {0, "A/mu", "not-present", 3, "A/mu"},
+ {0, "A/no", "not-present", 1, "A/no"},
+
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/E", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/E/beta", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/lambda", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/F", "base-deleted", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_copied"));
+
+ /* I would expect this behavior, as this copies all layers where possible
+ instead of just constructing a top level layer with not-present nodes
+ whenever we find a deletion. */
+ {
+ nodes_row_t rows[] = {
+
+ {1, "A_copied", "normal", 1, "A"},
+ {1, "A_copied/B", "normal", 1, "A/B"},
+ {1, "A_copied/B/E", "normal", 1, "A/B/E"},
+ {1, "A_copied/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {1, "A_copied/B/E/beta", "normal", 1, "A/B/E/beta"},
+ {1, "A_copied/B/F", "normal", 1, "A/B/F"},
+ {1, "A_copied/B/lambda", "normal", 1, "A/B/lambda"},
+ {1, "A_copied/C", "normal", 1, "A/C"},
+ {1, "A_copied/D", "excluded", 1, "A/D"},
+ {1, "A_copied/mu", "not-present", 3, "A/mu"},
+ {1, "A_copied/no", "not-present", 1, "A/no"},
+
+ {2, "A_copied/B", "base-deleted", NO_COPY_FROM},
+ {2, "A_copied/B/E", "base-deleted", NO_COPY_FROM},
+ {2, "A_copied/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ {2, "A_copied/B/E/beta", "base-deleted", NO_COPY_FROM},
+ {2, "A_copied/B/lambda", "base-deleted", NO_COPY_FROM},
+ {2, "A_copied/B/F", "base-deleted", NO_COPY_FROM},
+
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "A_copied", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Part of issue #3702, #3865 */
+static svn_error_t *
+test_case_rename(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ apr_hash_t *dirents;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "case_rename", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "a"));
+ SVN_ERR(sbox_wc_move(&b, "iota", "iotA"));
+
+ SVN_ERR(svn_io_get_dirents3(&dirents, sbox_wc_path(&b, ""), TRUE, pool, pool));
+
+ /* A shouldn't be there, but a should */
+ SVN_TEST_ASSERT(apr_hash_get(dirents, "a", APR_HASH_KEY_STRING));
+ SVN_TEST_ASSERT(apr_hash_get(dirents, "A", APR_HASH_KEY_STRING) == NULL);
+ /* iota shouldn't be there, but iotA should */
+ SVN_TEST_ASSERT(apr_hash_get(dirents, "iotA", APR_HASH_KEY_STRING));
+ SVN_TEST_ASSERT(apr_hash_get(dirents, "iota", APR_HASH_KEY_STRING) == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_file_external(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "commit_file_external", opts, pool));
+ SVN_ERR(sbox_file_write(&b, "f", "this is f\n"));
+ SVN_ERR(sbox_wc_add(&b, "f"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_file_write(&b, "g", "this is f\nmodified via g\n"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "f", "normal", 2, "f" },
+ { 0, "g", "normal", 2, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+revert_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "revert_file_externals", opts, pool));
+ SVN_ERR(sbox_file_write(&b, "f", "this is f\n"));
+ SVN_ERR(sbox_wc_add(&b, "f"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f h", ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "A"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "g", "normal", 1, "f", TRUE },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 1, "A", "normal", NO_COPY_FROM },
+ { 0, "h", "normal", 1, "f", TRUE },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+
+ { 0, "g", "not-present", 0, "g"},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "h", "normal", 1, "f", TRUE },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+
+ { 0, "g", "not-present", 0, "g"},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "g", "normal", 1, "f", TRUE },
+
+ { 0, "h", "not-present", 0, "h"},
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "g", "normal", 1, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+copy_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "copy_file_externals", opts, pool));
+ SVN_ERR(sbox_file_write(&b, "f", "this is f\n"));
+ SVN_ERR(sbox_wc_add(&b, "f"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "A/B"));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "A", "normal", 1, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+ { 0, "A/B/g", "normal", 1, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "A", "normal", 1, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+ { 0, "A/B/g", "normal", 1, "f", TRUE },
+ { 1, "X", "normal", 1, "A" },
+ { 2, "X/B", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "A", "normal", 1, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+ { 0, "A/B/g", "normal", 1, "f", TRUE },
+ { 1, "X", "normal", 1, "A" },
+ { 2, "X/B", "normal", NO_COPY_FROM },
+ { 0, "X/g", "normal", 1, "f", TRUE },
+ { 0, "X/B/g", "normal", 1, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "X"));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "A", "normal", 1, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+ { 0, "A/B/g", "normal", 1, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t rows[] = {
+ { 0, "", "normal", 1, "" },
+ { 0, "f", "normal", 1, "f" },
+ { 0, "A", "normal", 1, "A" },
+ { 2, "A/B", "normal", NO_COPY_FROM },
+ { 0, "A/g", "normal", 1, "f", TRUE },
+ { 0, "A/B/g", "normal", 1, "f", TRUE },
+ { 0 }
+ };
+ SVN_ERR(check_db_rows(&b, "", rows));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+copy_wc_wc_server_excluded(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ nodes_row_t before[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/E", "server-excluded", 1, "A/B/E"},
+ {0}
+ };
+ nodes_row_t after[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/E", "server-excluded", 1, "A/B/E"},
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "normal", 1, "A/B"},
+ {1, "X/B/E", "incomplete", 1, "A/B/E"},
+ {0}
+ };
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "copy_wc_wc_server_excluded", opts, pool));
+ SVN_ERR(insert_dirs(&b, before));
+ SVN_ERR(check_db_rows(&b, "", before));
+ SVN_ERR(sbox_disk_mkdir(&b, "A"));
+ err = sbox_wc_copy(&b, "A", "X");
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_PATH_UNEXPECTED_STATUS);
+ SVN_ERR(check_db_rows(&b, "", after));
+
+ return SVN_NO_ERROR;
+}
+
+/* Issue 4040 */
+static svn_error_t *
+incomplete_switch(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "incomplete_switch", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_copy(&b, "A", "X/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_delete(&b, "X/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ {
+ /* Interrupted switch from A@1 to X@3 */
+ nodes_row_t before[] = {
+ {0, "", "incomplete", 3, "X"},
+ {0, "A", "incomplete", 3, "X/A"},
+ {0, "A/B", "incomplete", 3, "X/A/B"},
+ {0, "A/B/C", "incomplete", 3, "X/A/B/C"},
+ {0, "B", "normal", 1, "A/B"},
+ {0, "B/C", "normal", 1, "A/B/C"},
+ {0, "B/C/D", "normal", 1, "A/B/C/D"},
+ {0}
+ };
+
+ nodes_row_t after_update[] = {
+ {0, "", "normal", 4, "X"},
+ {0, "B", "normal", 4, "A/B"},
+ {0, "B/C", "normal", 4, "A/B/C"},
+ {0, "B/C/D", "normal", 4, "A/B/C/D"},
+ {0}
+ };
+
+ SVN_ERR(insert_dirs(&b, before));
+ SVN_ERR(svn_io_remove_dir2(sbox_wc_path(&b, "A/B/C/D"), FALSE,
+ NULL, NULL, pool));
+ SVN_ERR(check_db_rows(&b, "", before));
+ SVN_ERR(sbox_wc_update(&b, "", 4));
+ SVN_ERR(check_db_rows(&b, "", after_update));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_moves_child_first(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_moves_child_first", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/B/C2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/B/C2"},
+ {3, "A/B/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A/B2/C", "base-deleted", NO_COPY_FROM, "A/B2/C2"},
+ {3, "A/B2/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B2/C", "base-deleted", NO_COPY_FROM, "A2/B2/C2"},
+ {3, "A2/B2/C2","normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Revert should leave the A to A2 move */
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2", svn_depth_infinity));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B", svn_depth_infinity));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_moves_child_last(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_moves_child_last", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A2/B", "A2/B2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A2/B2/C", "A2/B2/C2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B2/C", "base-deleted", NO_COPY_FROM, "A2/B2/C2"},
+ {3, "A2/B2/C2","normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Revert should leave the A to A2 move */
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2", svn_depth_infinity));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B", svn_depth_infinity));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_in_copy(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_in_copy", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_copy(&b, "A", "A2"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A2", "normal", 1, "A"},
+ {1, "A2/B", "normal", 1, "A/B"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A2/B", "A2/B2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A2", "normal", 1, "A"},
+ {1, "A2/B", "normal", 1, "A/B"},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_in_replace(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_in_replace", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_copy(&b, "X", "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "A", "normal", 1, "X"},
+ {1, "A/B", "normal", 1, "X/B"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "A", "normal", 1, "X"},
+ {1, "A/B", "normal", 1, "X/B"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B2", "normal", 1, "X/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+copy_a_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "copy_a_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/C2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {2, "A/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C2"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ /* Copying a move doesn't copy any moved-to/here artifacts, which
+ means that moving inside a copy is not the same as copying
+ something that contains a move? Is this behaviour correct? */
+ SVN_ERR(sbox_wc_copy(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {2, "A/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C2"},
+ {1, "A2", "normal", 1, "A"},
+ {1, "A2/B", "normal", 1, "A/B"},
+ {1, "A2/B/C", "normal", 1, "A/B/C"},
+ {2, "A2/C2", "normal", 1, "A/B/C"}, /* MOVED_HERE? */
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM}, /* "A2/C2"? */
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_to_swap(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_to_swap", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/Y"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "X/B"));
+ SVN_ERR(sbox_wc_move(&b, "X/Y", "A/Y"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "X/B"},
+ {2, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {2, "X/Y", "base-deleted", NO_COPY_FROM, "A/Y"},
+ {2, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "X/B"},
+ {2, "A2/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {2, "X/Y", "base-deleted", NO_COPY_FROM, "A2/Y"},
+ {2, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "X", "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "normal", 1, "X", FALSE, "A2", TRUE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {2, "A/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A/Y", "base-deleted", NO_COPY_FROM, "A2/Y"},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A2/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A2", "X"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "normal", 1, "X", FALSE, "X", TRUE},
+ {1, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", FALSE, "A", TRUE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {2, "A/Y", "base-deleted", NO_COPY_FROM, "X/Y"},
+ {2, "X/B", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "X/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Revert and try in different order */
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A"));
+ SVN_ERR(sbox_wc_move(&b, "A2", "X"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "normal", 1, "X", FALSE, "X", TRUE},
+ {1, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", FALSE, "A", TRUE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/Y", "X/Y"));
+ SVN_ERR(sbox_wc_move(&b, "X/B", "A/B"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "normal", 1, "X", FALSE, "X", TRUE},
+ {1, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", FALSE, "A", TRUE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {2, "A/Y", "base-deleted", NO_COPY_FROM, "X/Y"},
+ {2, "X/B", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "X/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And move this last bit back and check if the db state is restored */
+ SVN_ERR(sbox_wc_move(&b, "A/B", "X/B"));
+ SVN_ERR(sbox_wc_move(&b, "X/Y", "A/Y"));
+
+ {
+ /* Exact the same as before the initial moves */
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {1, "A", "normal", 1, "X", FALSE, "X", TRUE},
+ {1, "A/Y", "normal", 1, "X/Y", MOVED_HERE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", FALSE, "A", TRUE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And try to undo the rest */
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A"));
+ SVN_ERR(sbox_wc_move(&b, "A2", "X"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+revert_nested_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ nodes_row_t nodes_A_moved[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ nodes_row_t nodes_AB_moved[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ nodes_row_t nodes_ABC_moved[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B2/C", "base-deleted", NO_COPY_FROM, "A2/B2/C2"},
+ {3, "A2/B2/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ nodes_row_t nodes_AB_moved_C_copied[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM, "A2/B2"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A2/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B2/C2", "normal", 1, "A/B/C"},
+ {0}
+ };
+ nodes_row_t nodes_AC_moved_B_copied[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A2/B2", "normal", 1, "A/B"},
+ {2, "A2/B2/C", "normal", 1, "A/B/C"},
+ {3, "A2/B2/C", "base-deleted", NO_COPY_FROM, "A2/B2/C2"},
+ {3, "A2/B2/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+
+ SVN_ERR(svn_test__sandbox_create(&b, "revert_nested_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_A_moved));
+
+ SVN_ERR(sbox_wc_move(&b, "A2/B", "A2/B2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_AB_moved));
+
+ SVN_ERR(sbox_wc_move(&b, "A2/B2/C", "A2/B2/C2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_ABC_moved));
+
+ SVN_ERR(sbox_wc_revert(&b, "A2/B", svn_depth_infinity));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_A_moved));
+
+ SVN_ERR(sbox_wc_move(&b, "A2/B", "A2/B2"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B2/C", "A2/B2/C2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_ABC_moved));
+
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2/C", svn_depth_empty));
+ SVN_ERR(check_db_rows(&b, "", nodes_AB_moved_C_copied));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2/C2", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_AB_moved));
+
+ SVN_ERR(sbox_wc_move(&b, "A2/B2/C", "A2/B2/C2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_ABC_moved));
+
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2/C", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_AB_moved_C_copied));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2/C2", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_AB_moved));
+
+ SVN_ERR(sbox_wc_revert(&b, "A2/B", svn_depth_infinity));
+ SVN_ERR(sbox_wc_revert(&b, "A2/B2", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_A_moved));
+
+ /* Check moves in reverse order */
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/B/C2"));
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(check_db_rows(&b, "", nodes_ABC_moved));
+
+ SVN_ERR(sbox_wc_revert(&b, "A2/B", svn_depth_infinity));
+ SVN_ERR(check_db_rows(&b, "", nodes_AC_moved_B_copied));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_on_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_on_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_copy(&b, "X", "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A", "normal", 1, "X"},
+ {1, "A/B", "normal", 1, "X/B", FALSE, "B2"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B3"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B3", "normal", 1, "X/B", MOVED_HERE},
+ {1, "A", "normal", 1, "X"},
+ {1, "A/B", "normal", 1, "X/B", FALSE, "B2"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B3"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_on_move2(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_on_move2", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_copy(&b, "X", "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A", "normal", 1, "X", FALSE, "A2"},
+ {1, "A/B", "normal", 1, "X/B"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B3"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/B", "normal", 1, "X/B"},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B3", "normal", 1, "X/B", MOVED_HERE},
+ {1, "A", "normal", 1, "X", FALSE, "A2"},
+ {1, "A/B", "normal", 1, "X/B"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B3"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_added(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_added", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B/C2"));
+
+ /* Both A2/B/C and A2/B/C2 are simple adds inside the move. It
+ doesn't seem right for A2/B/C to be marked moved_here. */
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {3, "A2/B/C", "normal", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test the result of 'update' when the incoming changes are inside a
+ * directory that is locally moved. */
+static svn_error_t *
+move_update(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_update", opts, pool));
+
+ /* r1: Create files 'f', 'h' */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\n"));
+ SVN_ERR(sbox_file_write(&b, "A/B/h", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/f"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/h"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r2: Modify 'f' */
+ SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r3: Delete 'h', add 'g' */
+ SVN_ERR(sbox_file_write(&b, "A/B/g", "r3 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/g"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/h"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r4: Add a new subtree 'X' */
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_file_write(&b, "X/f", "r4 content\n"));
+ SVN_ERR(sbox_file_write(&b, "X/g", "r4 content\n"));
+ SVN_ERR(sbox_file_write(&b, "X/h", "r4 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "X/f"));
+ SVN_ERR(sbox_wc_add(&b, "X/g"));
+ SVN_ERR(sbox_wc_add(&b, "X/h"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r5: Add a subtree 'A/B/C' */
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ /* A is single-revision so A2 is a single-revision copy */
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/f", "normal", 1, "A/B/f"},
+ {0, "A/B/h", "normal", 1, "A/B/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 1, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 1, "A/B/h", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update causes a tree-conflict on A due to incoming text-change. */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f"},
+ {0, "A/B/h", "normal", 2, "A/B/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 1, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 1, "A/B/h", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Resolve should update the move. */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f"},
+ {0, "A/B/h", "normal", 2, "A/B/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 2, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 2, "A/B/h", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update causes a tree-conflict on due to incoming add. */
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/f", "normal", 3, "A/B/f"},
+ {0, "A/B/g", "normal", 3, "A/B/g"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 2, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 2, "A/B/h", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/f", "normal", 3, "A/B/f"},
+ {0, "A/B/g", "normal", 3, "A/B/g"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 3, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 3, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 3, "A/B/f", MOVED_HERE},
+ {1, "A2/B/g", "normal", 3, "A/B/g", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "A2/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/f", "normal", 3, "A/B/f"},
+ {0, "A/B/g", "normal", 3, "A/B/g"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 3, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 3, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 3, "A/B/f", MOVED_HERE},
+ {1, "A2/B/g", "normal", 3, "A/B/g", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B/f", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B/g", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f"},
+ {0, "A/B/h", "normal", 2, "A/B/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 2, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 2, "A/B/h", MOVED_HERE},
+ {2, "A2/B", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B/f", "base-deleted", NO_COPY_FROM},
+ {2, "A2/B/h", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "X", 4));
+ SVN_ERR(sbox_wc_copy(&b, "X", "A2/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f"},
+ {0, "A/B/h", "normal", 2, "A/B/h"},
+ {0, "X", "normal", 4, "X"},
+ {0, "X/f", "normal", 4, "X/f"},
+ {0, "X/g", "normal", 4, "X/g"},
+ {0, "X/h", "normal", 4, "X/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 2, "A/B/f", MOVED_HERE},
+ {1, "A2/B/h", "normal", 2, "A/B/h", MOVED_HERE},
+ {2, "A2/B", "normal", 4, "X"},
+ {2, "A2/B/f", "normal", 4, "X/f"},
+ {2, "A2/B/g", "normal", 4, "X/g"},
+ {2, "A2/B/h", "normal", 4, "X/h"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 4));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 4, ""},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 4, "A/B"},
+ {0, "A/B/f", "normal", 4, "A/B/f"},
+ {0, "A/B/g", "normal", 4, "A/B/g"},
+ {0, "X", "normal", 4, "X"},
+ {0, "X/f", "normal", 4, "X/f"},
+ {0, "X/g", "normal", 4, "X/g"},
+ {0, "X/h", "normal", 4, "X/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 4, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 4, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 4, "A/B/f", MOVED_HERE},
+ {1, "A2/B/g", "normal", 4, "A/B/g", MOVED_HERE},
+ {2, "A2/B", "normal", 4, "X"},
+ {2, "A2/B/f", "normal", 4, "X/f"},
+ {2, "A2/B/g", "normal", 4, "X/g"},
+ {2, "A2/B/h", "normal", 4, "X/h"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 5));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, ""},
+ {0, "A", "normal", 5, "A"},
+ {0, "A/B", "normal", 5, "A/B"},
+ {0, "A/B/f", "normal", 5, "A/B/f"},
+ {0, "A/B/g", "normal", 5, "A/B/g"},
+ {0, "A/B/C", "normal", 5, "A/B/C"},
+ {0, "X", "normal", 5, "X"},
+ {0, "X/f", "normal", 5, "X/f"},
+ {0, "X/g", "normal", 5, "X/g"},
+ {0, "X/h", "normal", 5, "X/h"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/g", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 5, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 5, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 5, "A/B/f", MOVED_HERE},
+ {1, "A2/B/g", "normal", 5, "A/B/g", MOVED_HERE},
+ {1, "A2/B/C", "normal", 5, "A/B/C", MOVED_HERE},
+ {2, "A2/B", "normal", 4, "X"},
+ {2, "A2/B/f", "normal", 4, "X/f"},
+ {2, "A2/B/g", "normal", 4, "X/g"},
+ {2, "A2/B/h", "normal", 4, "X/h"},
+ {2, "A2/B/C", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+check_moved_to(apr_array_header_t *moved_tos,
+ int i,
+ int op_depth,
+ const char *local_relpath)
+{
+ struct svn_wc__db_moved_to_t *moved_to;
+
+ if (i >= moved_tos->nelts)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "moved-to %d not found", i);
+
+ moved_to = APR_ARRAY_IDX(moved_tos, i, struct svn_wc__db_moved_to_t *);
+
+ if (moved_to->op_depth != op_depth
+ || strcmp(moved_to->local_relpath, local_relpath))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "expected: {%d '%s'} found[%d]: {%d '%s'}",
+ op_depth, local_relpath, i,
+ moved_to->op_depth, moved_to->local_relpath);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_scan_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ const char *moved_to_abspath, *moved_to_op_root_abspath;
+ apr_array_header_t *moved_tos;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "scan_delete", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C2"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A2", "X"));
+ SVN_ERR(sbox_wc_move(&b, "X/B", "Z"));
+ SVN_ERR(sbox_wc_move(&b, "A/B", "X/B"));
+ SVN_ERR(sbox_wc_move(&b, "X/B/C", "Y"));
+ SVN_ERR(sbox_wc_move(&b, "C2", "X/B/C"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "A2", "normal", 1, "A2"},
+ {0, "A2/B", "normal", 1, "A2/B"},
+ {0, "C2", "normal", 1, "C2"},
+ {1, "A2", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A2/B", "base-deleted", NO_COPY_FROM},
+ {1, "Z", "normal", 1, "A2/B", MOVED_HERE},
+ {1, "X", "normal", 1, "A2", MOVED_HERE},
+ {1, "X/B", "normal", 1, "A2/B", MOVED_HERE},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "X/B"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "X/B", "normal", 1, "A/B", FALSE, "Z", TRUE},
+ {2, "X/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "Y", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "C2", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {3, "X/B/C", "normal", 1, "C2", FALSE, "Y", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, &moved_to_abspath,
+ NULL, &moved_to_op_root_abspath,
+ b.wc_ctx->db, sbox_wc_path(&b, "C2"),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, sbox_wc_path(&b, "X/B/C"));
+ SVN_TEST_STRING_ASSERT(moved_to_op_root_abspath, sbox_wc_path(&b, "X/B/C"));
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, &moved_to_abspath,
+ NULL, &moved_to_op_root_abspath,
+ b.wc_ctx->db, sbox_wc_path(&b, "A/B"),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, sbox_wc_path(&b, "X/B"));
+ SVN_TEST_STRING_ASSERT(moved_to_op_root_abspath, sbox_wc_path(&b, "X/B"));
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 2, "X/B/C"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 3, "Y"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 2);
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, &moved_to_abspath,
+ NULL, &moved_to_op_root_abspath,
+ b.wc_ctx->db, sbox_wc_path(&b, "A/B/C"),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, sbox_wc_path(&b, "X/B/C"));
+ SVN_TEST_STRING_ASSERT(moved_to_op_root_abspath, sbox_wc_path(&b, "X/B"));
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, &moved_to_abspath,
+ NULL, &moved_to_op_root_abspath,
+ b.wc_ctx->db, sbox_wc_path(&b, "A2"),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, sbox_wc_path(&b, "X"));
+ SVN_TEST_STRING_ASSERT(moved_to_op_root_abspath, sbox_wc_path(&b, "X"));
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, &moved_to_abspath,
+ NULL, &moved_to_op_root_abspath,
+ b.wc_ctx->db, sbox_wc_path(&b, "A2/B"),
+ pool, pool));
+ SVN_TEST_STRING_ASSERT(moved_to_abspath, sbox_wc_path(&b, "X/B"));
+ SVN_TEST_STRING_ASSERT(moved_to_op_root_abspath, sbox_wc_path(&b, "X"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_follow_moved_to(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ apr_array_header_t *moved_tos;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "follow_moved_to", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A1"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A1/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A1/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A1/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A1/B/C/D/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A2/B/C/D/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A3"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A3/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A3/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A3/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A3/B/C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A1", "normal", 1, "A1"},
+ {0, "A1/B", "normal", 1, "A1/B"},
+ {0, "A1/B/C", "normal", 1, "A1/B/C"},
+ {0, "A1/B/C/D", "normal", 1, "A1/B/C/D"},
+ {0, "A1/B/C/D/E", "normal", 1, "A1/B/C/D/E"},
+ {0, "A2", "normal", 1, "A2"},
+ {0, "A2/B", "normal", 1, "A2/B"},
+ {0, "A2/B/C", "normal", 1, "A2/B/C"},
+ {0, "A2/B/C/D", "normal", 1, "A2/B/C/D"},
+ {0, "A2/B/C/D/E", "normal", 1, "A2/B/C/D/E"},
+ {0, "A3", "normal", 1, "A3"},
+ {0, "A3/B", "normal", 1, "A3/B"},
+ {0, "A3/B/C", "normal", 1, "A3/B/C"},
+ {0, "A3/B/C/D", "normal", 1, "A3/B/C/D"},
+ {0, "A3/B/C/D/E", "normal", 1, "A3/B/C/D/E"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A1", "X"));
+ SVN_ERR(sbox_wc_move(&b, "A2", "A1"));
+ SVN_ERR(sbox_wc_move(&b, "A3", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A3"));
+ SVN_ERR(sbox_wc_move(&b, "A1/B", "X"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B", "A1/B"));
+ SVN_ERR(sbox_wc_move(&b, "A3/B", "A2/B"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A3/B"));
+ SVN_ERR(sbox_wc_move(&b, "A1/B/C/D", "X"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B/C/D", "A1/B/C/D"));
+ SVN_ERR(sbox_wc_move(&b, "A3/B/C/D", "A2/B/C/D"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A3/B/C/D"));
+ SVN_ERR(sbox_wc_move(&b, "A1/B/C/D/E", "X"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B/C/D/E", "A1/B/C/D/E"));
+ SVN_ERR(sbox_wc_move(&b, "A3/B/C/D/E", "A2/B/C/D/E"));
+ SVN_ERR(sbox_wc_move(&b, "X", "A3/B/C/D/E"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A1", "normal", 1, "A1"},
+ {0, "A1/B", "normal", 1, "A1/B"},
+ {0, "A1/B/C", "normal", 1, "A1/B/C"},
+ {0, "A1/B/C/D", "normal", 1, "A1/B/C/D"},
+ {0, "A1/B/C/D/E", "normal", 1, "A1/B/C/D/E"},
+ {0, "A2", "normal", 1, "A2"},
+ {0, "A2/B", "normal", 1, "A2/B"},
+ {0, "A2/B/C", "normal", 1, "A2/B/C"},
+ {0, "A2/B/C/D", "normal", 1, "A2/B/C/D"},
+ {0, "A2/B/C/D/E", "normal", 1, "A2/B/C/D/E"},
+ {0, "A3", "normal", 1, "A3"},
+ {0, "A3/B", "normal", 1, "A3/B"},
+ {0, "A3/B/C", "normal", 1, "A3/B/C"},
+ {0, "A3/B/C/D", "normal", 1, "A3/B/C/D"},
+ {0, "A3/B/C/D/E", "normal", 1, "A3/B/C/D/E"},
+
+ {1, "A1", "normal", 1, "A2", FALSE, "A3", TRUE},
+ {1, "A1/B", "normal", 1, "A2/B", MOVED_HERE},
+ {1, "A1/B/C", "normal", 1, "A2/B/C", MOVED_HERE},
+ {1, "A1/B/C/D", "normal", 1, "A2/B/C/D", MOVED_HERE},
+ {1, "A1/B/C/D/E", "normal", 1, "A2/B/C/D/E", MOVED_HERE},
+
+ {1, "A2", "normal", 1, "A3", FALSE, "A1", TRUE},
+ {1, "A2/B", "normal", 1, "A3/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A3/B/C", MOVED_HERE},
+ {1, "A2/B/C/D", "normal", 1, "A3/B/C/D", MOVED_HERE},
+ {1, "A2/B/C/D/E", "normal", 1, "A3/B/C/D/E", MOVED_HERE},
+
+ {1, "A3", "normal", 1, "A1", FALSE, "A2", TRUE},
+ {1, "A3/B", "normal", 1, "A1/B", MOVED_HERE},
+ {1, "A3/B/C", "normal", 1, "A1/B/C", MOVED_HERE},
+ {1, "A3/B/C/D", "normal", 1, "A1/B/C/D", MOVED_HERE},
+ {1, "A3/B/C/D/E", "normal", 1, "A1/B/C/D/E", MOVED_HERE},
+
+ {2, "A1/B", "normal", 1, "A3/B", FALSE, "A3/B", TRUE},
+ {2, "A1/B/C", "normal", 1, "A3/B/C", MOVED_HERE},
+ {2, "A1/B/C/D", "normal", 1, "A3/B/C/D", MOVED_HERE},
+ {2, "A1/B/C/D/E", "normal", 1, "A3/B/C/D/E", MOVED_HERE},
+
+ {2, "A2/B", "normal", 1, "A1/B", FALSE, "A1/B", TRUE},
+ {2, "A2/B/C", "normal", 1, "A1/B/C", MOVED_HERE},
+ {2, "A2/B/C/D", "normal", 1, "A1/B/C/D", MOVED_HERE},
+ {2, "A2/B/C/D/E", "normal", 1, "A1/B/C/D/E", MOVED_HERE},
+
+ {2, "A3/B", "normal", 1, "A2/B", FALSE, "A2/B", TRUE},
+ {2, "A3/B/C", "normal", 1, "A2/B/C", MOVED_HERE},
+ {2, "A3/B/C/D", "normal", 1, "A2/B/C/D", MOVED_HERE},
+ {2, "A3/B/C/D/E", "normal", 1, "A2/B/C/D/E", MOVED_HERE},
+
+ {4, "A1/B/C/D", "normal", 1, "A1/B/C/D", FALSE, "A3/B/C/D", TRUE},
+ {4, "A1/B/C/D/E", "normal", 1, "A1/B/C/D/E", MOVED_HERE},
+
+ {4, "A2/B/C/D", "normal", 1, "A2/B/C/D", FALSE, "A1/B/C/D", TRUE},
+ {4, "A2/B/C/D/E", "normal", 1, "A2/B/C/D/E", MOVED_HERE},
+
+ {4, "A3/B/C/D", "normal", 1, "A3/B/C/D", FALSE, "A2/B/C/D", TRUE},
+ {4, "A3/B/C/D/E", "normal", 1, "A3/B/C/D/E", MOVED_HERE},
+
+ {5, "A1/B/C/D/E", "normal", 1, "A2/B/C/D/E", FALSE, "A3/B/C/D/E", TRUE},
+ {5, "A2/B/C/D/E", "normal", 1, "A3/B/C/D/E", FALSE, "A1/B/C/D/E", TRUE},
+ {5, "A3/B/C/D/E", "normal", 1, "A1/B/C/D/E", FALSE, "A2/B/C/D/E", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* A1->A3, A3/B->A2/B, A2/B/C/D->A1/B/C/D, A1/B/C/D/E->A3/B/C/D/E */
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1/B"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3/B"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 2, "A2/B"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 2);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1/B/C"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3/B/C"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 2, "A2/B/C"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 2);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1/B/C/D"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3/B/C/D"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 2, "A2/B/C/D"));
+ SVN_ERR(check_moved_to(moved_tos, 2, 4, "A1/B/C/D"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 3);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1/B/C/D/E"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3/B/C/D/E"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 2, "A2/B/C/D/E"));
+ SVN_ERR(check_moved_to(moved_tos, 2, 4, "A1/B/C/D/E"));
+ SVN_ERR(check_moved_to(moved_tos, 3, 5, "A3/B/C/D/E"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 4);
+
+ SVN_ERR(sbox_wc_delete(&b, "A3/B/C/D/E"));
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A1/B/C/D/E"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "A3/B/C/D/E"));
+ SVN_ERR(check_moved_to(moved_tos, 1, 2, "A2/B/C/D/E"));
+ SVN_ERR(check_moved_to(moved_tos, 2, 4, "A1/B/C/D/E"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 3);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+mixed_rev_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ apr_array_header_t *moved_tos;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "mixed_rev_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/D", "normal", 2, "A/D"},
+ {0, "A/D/E", "normal", 3, "A/D/E"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* We don't allow mixed-rev move in 1.8 and the command line client
+ will return an error, but for compatibility with 1.7 move has an
+ allow_mixed_revisions=TRUE flag which is being used here so the
+ move transforms automatically into copy+delete. This test was
+ written before that transforming was implemented so still expects
+ some move information. */
+ SVN_ERR(sbox_wc_move(&b, "A", "X"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/D", "normal", 2, "A/D"},
+ {0, "A/D/E", "normal", 3, "A/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", MOVED_HERE},
+ {1, "X/B", "not-present", 2, "A/B"},
+ {1, "X/D", "not-present", 2, "A/D"},
+ {2, "X/B", "normal", 2, "A/B"},
+ {2, "X/B/C", "not-present", 3, "A/B/C"},
+ {2, "X/D", "normal", 2, "A/D"},
+ {2, "X/D/E", "not-present", 3, "A/D/E"},
+ {3, "X/B/C", "normal", 3, "A/B/C"},
+ {3, "X/D/E", "normal", 3, "A/D/E"},
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* ### These values PASS but I'm not sure they are correct. */
+ /* A/B/C doesn't exist as X/B/C at op depth 1, but is reported */
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+
+ /* This move doesn't record moved-to */
+ SVN_ERR(sbox_wc_move(&b, "X/B", "X/Y"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/D", "normal", 2, "A/D"},
+ {0, "A/D/E", "normal", 3, "A/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", MOVED_HERE},
+ {1, "X/B", "not-present", 2, "A/B"},
+ {1, "X/D", "not-present", 2, "A/D"},
+ {2, "X/D", "normal", 2, "A/D"},
+ {2, "X/D/E", "not-present", 3, "A/D/E"},
+ {2, "X/Y", "normal", 2, "A/B"},
+ {2, "X/Y/C", "not-present", NO_COPY_FROM},
+ {3, "X/Y/C", "normal", 3, "A/B/C"},
+ {3, "X/D/E", "normal", 3, "A/D/E"},
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* A/B/C still doesn't exist as X/B/C at op depth 1 */
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ /* A/B doesn't exist exist as X/B and the move to Y can't be tracked in
+ the current scheme */
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+
+ SVN_ERR(sbox_wc_mkdir(&b, "Z"));
+ SVN_ERR(sbox_wc_commit(&b, "Z")); /* r4 */
+
+ SVN_ERR(sbox_wc_update(&b, "", 4));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 4, ""},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 4, "A/B"},
+ {0, "A/B/C", "normal", 4, "A/B/C"},
+ {0, "A/D", "normal", 4, "A/D"},
+ {0, "A/D/E", "normal", 4, "A/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/D/E", "base-deleted", NO_COPY_FROM},
+ /* X is expanded on update. The not-present nodes are now here */
+ {1, "X", "normal", 4, "A", MOVED_HERE},
+ {1, "X/B", "normal", 4, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 4, "A/B/C", MOVED_HERE},
+ {1, "X/D", "normal", 4, "A/D", MOVED_HERE},
+ {1, "X/D/E", "normal", 4, "A/D/E", MOVED_HERE},
+ {2, "X/D", "normal", 2, "A/D"},
+ {2, "X/D/E", "not-present", 3, "A/D/E"},
+ {2, "X/Y", "normal", 2, "A/B"},
+ {2, "X/Y/C", "not-present", NO_COPY_FROM},
+ {3, "X/D/E", "normal", 3, "A/D/E"},
+ {3, "X/Y/C", "normal", 3, "A/B/C"},
+
+ {0, "Z", "normal", 4, "Z"},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db,
+ sbox_wc_path(&b, "A"), pool, pool));
+ SVN_ERR(check_moved_to(moved_tos, 0, 1, "X"));
+ SVN_TEST_ASSERT(moved_tos->nelts == 1);
+
+ {
+ conflict_info_t conflicts[] = {
+ { "X/D", FALSE, FALSE, {0 /* ### Needs fixing */} },
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test the result of 'update' when the incoming changes are inside a
+ * directory that is locally moved. */
+static svn_error_t *
+update_prop_mod_into_moved(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "update_prop_mod_into_moved", opts, pool));
+
+ /* r1: Create files 'f', 'h' */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\n"));
+ SVN_ERR(sbox_file_write(&b, "A/B/h", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/f"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/h"));
+ SVN_ERR(sbox_wc_propset(&b, "pd", "f1", "A/B/f"));
+ SVN_ERR(sbox_wc_propset(&b, "pn", "f1", "A/B/f"));
+ SVN_ERR(sbox_wc_propset(&b, "pm", "f1", "A/B/f"));
+ SVN_ERR(sbox_wc_propset(&b, "p", "h1", "A/B/h"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r2: Modify 'f'. Delete prop 'pd', modify prop 'pm', add prop 'pa',
+ * leave prop 'pn' unchanged. */
+ SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n"));
+ SVN_ERR(sbox_wc_propset(&b, "pd", NULL, "A/B/f"));
+ SVN_ERR(sbox_wc_propset(&b, "pm", "f2", "A/B/f"));
+ SVN_ERR(sbox_wc_propset(&b, "pa", "f2", "A/B/f"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r3: Delete 'h', add 'g' */
+ SVN_ERR(sbox_file_write(&b, "A/B/g", "r3 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/g"));
+ SVN_ERR(sbox_wc_propset(&b, "p", "g3", "A/B/g"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/h"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/f", "normal", 1, "A/B/f", NOT_MOVED, "pd,pm,pn"},
+ {0, "A/B/h", "normal", 1, "A/B/h", NOT_MOVED, "p"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* A is single-revision so A2 is a single-revision copy */
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/f", "normal", 1, "A/B/f", NOT_MOVED, "pd,pm,pn"},
+ {0, "A/B/h", "normal", 1, "A/B/h", NOT_MOVED, "p"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 1, "A/B/f", MOVED_HERE, "pd,pm,pn"},
+ {1, "A2/B/h", "normal", 1, "A/B/h", MOVED_HERE, "p"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update causes a tree-conflict on A due to incoming text-change. */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f", NOT_MOVED, "pa,pm,pn"},
+ {0, "A/B/h", "normal", 2, "A/B/h", NOT_MOVED, "p"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 1, "A/B/f", MOVED_HERE, "pd,pm,pn"},
+ {1, "A2/B/h", "normal", 1, "A/B/h", MOVED_HERE, "p"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ {
+ conflict_info_t conflicts[] = {
+ { "A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ /* Resolve should update the move. */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/f", "normal", 2, "A/B/f", NOT_MOVED, "pa,pm,pn"},
+ {0, "A/B/h", "normal", 2, "A/B/h", NOT_MOVED, "p"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/f", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/h", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/f", "normal", 2, "A/B/f", MOVED_HERE, "pa,pm,pn"},
+ {1, "A2/B/h", "normal", 2, "A/B/h", MOVED_HERE, "p"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_move_update(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_move_update", opts, pool));
+
+ /* r1: Create file 'f' */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/C/f"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r2: Modify 'f' */
+ SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\nr2 content\n"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* r3: Create 'X' */
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B/C", "A2/B/C2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "A/B/C/f", "normal", 1, "A/B/C/f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "A2/B/C/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/f", "normal", 2, "A/B/C/f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "A2/B/C/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Following the A->A2 move should raise a tree-conflict on A2/B/C,
+ resolving that may require an explicit resolve. */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "A2/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(check_db_conflicts(&b, "", NULL /* no conflicts */));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/f", "normal", 2, "A/B/C/f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "A2/B/C/f", "normal", 2, "A/B/C/f", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/f", "normal", 2, "A/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update A to r3 brings no changes but updates the revisions. */
+ SVN_ERR(sbox_wc_update(&b, "A", 3));
+ SVN_ERR(check_db_conflicts(&b, "", NULL /* no conflicts */));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/f", "normal", 3, "A/B/C/f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 3, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 3, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 3, "A/B/C", MOVED_HERE},
+ {1, "A2/B/C/f", "normal", 3, "A/B/C/f", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 3, "A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/f", "normal", 3, "A/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_move_commit(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_move_commit", opts, pool));
+
+ /* r1: Create file 'f' */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/C/f"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "C2"));
+
+ {
+ const char *moved_to;
+ const char *expected_to;
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, NULL, NULL, &moved_to,
+ b.wc_ctx->db, sbox_wc_path(&b, "A/B/C"),
+ pool, pool));
+
+ expected_to = sbox_wc_path(&b, "C2");
+
+ if (strcmp(moved_to, expected_to) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved to %s, but was %s",
+ expected_to, moved_to);
+ }
+ {
+ const char *moved_from;
+ const char *expected_from;
+ SVN_ERR(svn_wc__db_scan_moved(&moved_from, NULL, NULL, NULL,
+ b.wc_ctx->db, sbox_wc_path(&b, "C2"),
+ pool, pool));
+
+ expected_from = sbox_wc_path(&b, "A/B/C");
+
+ if (strcmp(moved_from, expected_from) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved from %s, but was %s",
+ expected_from, moved_from);
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "A/B/C/f", "normal", 1, "A/B/C/f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "A2/B/C/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "C2/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ {
+ const char *moved_to;
+ const char *expected_to;
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, NULL, NULL, &moved_to,
+ b.wc_ctx->db, sbox_wc_path(&b, "A/B/C"),
+ pool, pool));
+
+ /* A/B/C is part of the A->A2 move. */
+ expected_to = sbox_wc_path(&b, "A2");
+ if (strcmp(moved_to, expected_to) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved to %s, but was %s",
+ expected_to, moved_to);
+
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, NULL, NULL, &moved_to,
+ b.wc_ctx->db, sbox_wc_path(&b, "A2/B/C"),
+ pool, pool));
+
+ /* A2/B/C is the A2/B/C->C2 move. */
+ expected_to = sbox_wc_path(&b, "C2");
+ if (strcmp(moved_to, expected_to) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved to %s, but was %s",
+ expected_to, moved_to);
+ }
+ {
+ const char *moved_from;
+ const char *expected_from;
+ SVN_ERR(svn_wc__db_scan_moved(&moved_from, NULL, NULL, NULL,
+ b.wc_ctx->db, sbox_wc_path(&b, "C2"),
+ pool, pool));
+
+ /* C2 is the A2/B/C->C2 move. */
+ expected_from = sbox_wc_path(&b, "A2/B/C");
+ if (strcmp(moved_from, expected_from) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved from %s, but was %s",
+ expected_from, moved_from);
+ }
+
+ {
+ apr_array_header_t *targets = apr_array_make(pool, 2, sizeof(const char *));
+
+ APR_ARRAY_PUSH(targets, const char *) = sbox_wc_path(&b, "A");
+ APR_ARRAY_PUSH(targets, const char *) = sbox_wc_path(&b, "A2");
+
+ SVN_ERR(sbox_wc_commit_ex(&b, targets, svn_depth_empty));
+ }
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "not-present", 2, "A"},
+ {0, "A2", "normal", 2, "A2"},
+ {0, "A2/B", "normal", 2, "A2/B"},
+ {0, "A2/B/C", "normal", 2, "A2/B/C"},
+ {0, "A2/B/C/f", "normal", 2, "A2/B/C/f"},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM},
+
+ /* These need to have their copyfrom information updated */
+ {1, "C2", "normal", 2, "A2/B/C", MOVED_HERE},
+ {1, "C2/f", "normal", 2, "A2/B/C/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ {
+ const char *moved_to;
+ const char *expected_to;
+ SVN_ERR(svn_wc__db_scan_deletion(NULL, NULL, NULL, &moved_to,
+ b.wc_ctx->db, sbox_wc_path(&b, "A2/B/C"),
+ pool, pool));
+
+ expected_to = sbox_wc_path(&b, "C2");
+
+ if (strcmp(moved_to, expected_to) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved to %s, but was %s",
+ expected_to, moved_to);
+ }
+
+ {
+ const char *moved_from;
+ const char *expected_from;
+ SVN_ERR(svn_wc__db_scan_moved(&moved_from, NULL, NULL, NULL,
+ b.wc_ctx->db, sbox_wc_path(&b, "C2"),
+ pool, pool));
+
+ expected_from = sbox_wc_path(&b, "A2/B/C");
+
+ if (strcmp(moved_from, expected_from) != 0)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Expected moved from %s, but was %s",
+ expected_from, moved_from);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_move_update2(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_move_update2", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P/Q"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "P", "A"));
+ SVN_ERR(sbox_wc_move(&b, "A2", "P"));
+ SVN_ERR(sbox_wc_move(&b, "A/Q", "A/Q2"));
+ SVN_ERR(sbox_wc_move(&b, "P/B", "P/B2"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "P", "normal", 1, "P"},
+ {0, "P/Q", "normal", 1, "P/Q"},
+ {1, "A", "normal", 1, "P", FALSE, "P", TRUE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/Q", "normal", 1, "P/Q", MOVED_HERE},
+ {1, "P", "normal", 1, "A", FALSE, "A", TRUE},
+ {1, "P/Q", "base-deleted", NO_COPY_FROM},
+ {1, "P/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A/Q", "base-deleted", NO_COPY_FROM, "A/Q2"},
+ {2, "A/Q2", "normal", 1, "P/Q", MOVED_HERE},
+ {2, "P/B", "base-deleted", NO_COPY_FROM, "P/B2"},
+ {2, "P/B2", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update A bumps revisions but only for moves originating in A. In
+ particular A/Q to A/Q2 does not get bumped. */
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "P", "normal", 1, "P"},
+ {0, "P/Q", "normal", 1, "P/Q"},
+ {1, "A", "normal", 1, "P", FALSE, "P", TRUE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/Q", "normal", 1, "P/Q", MOVED_HERE},
+ {1, "P", "normal", 2, "A", FALSE, "A", TRUE},
+ {1, "P/Q", "base-deleted", NO_COPY_FROM},
+ {1, "P/B", "normal", 2, "A/B", MOVED_HERE},
+ {2, "A/Q", "base-deleted", NO_COPY_FROM, "A/Q2"},
+ {2, "A/Q2", "normal", 1, "P/Q", MOVED_HERE},
+ {2, "P/B", "base-deleted", NO_COPY_FROM, "P/B2"},
+ {2, "P/B2", "normal", 2, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+check_tree_conflict_repos_path(svn_test__sandbox_t *b,
+ const char *wc_path,
+ const char *repos_path1,
+ const char *repos_path2)
+{
+ svn_skel_t *conflict;
+ svn_wc_operation_t operation;
+ const apr_array_header_t *locations;
+ svn_boolean_t text_conflicted, prop_conflicted, tree_conflicted;
+
+ SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL,
+ b->wc_ctx->db, sbox_wc_path(b, wc_path),
+ b->pool, b->pool));
+
+ SVN_TEST_ASSERT(conflict != NULL);
+
+ SVN_ERR(svn_wc__conflict_read_info(&operation, &locations,
+ &text_conflicted, &prop_conflicted,
+ &tree_conflicted,
+ b->wc_ctx->db, b->wc_abspath,
+ conflict, b->pool, b->pool));
+
+ SVN_ERR_ASSERT(tree_conflicted);
+
+ if (repos_path1)
+ {
+ svn_wc_conflict_version_t *version
+ = APR_ARRAY_IDX(locations, 0, svn_wc_conflict_version_t *);
+
+ SVN_TEST_ASSERT(version != NULL);
+
+ SVN_TEST_STRING_ASSERT(version->path_in_repos, repos_path1);
+ }
+
+ if (repos_path2)
+ {
+ svn_wc_conflict_version_t *version
+ = APR_ARRAY_IDX(locations, 1, svn_wc_conflict_version_t *);
+
+ SVN_TEST_ASSERT(version != NULL);
+
+ SVN_TEST_STRING_ASSERT(version->path_in_repos, repos_path2);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_update_conflicts(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_update_conflicts", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/C/D/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/F"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_switch(&b, "", "/X", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_move(&b, "A2/B/C", "A2/B/C2"));
+ SVN_ERR(sbox_file_write(&b, "A2/B/F", "obstruction\n"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 1, "X/A"},
+ {0, "A/B", "normal", 1, "X/A/B"},
+ {0, "A/B/C", "normal", 1, "X/A/B/C"},
+ {0, "A/B/C/D", "normal", 1, "X/A/B/C/D"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "X/A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "X/A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {1, "A2/B/C/D", "normal", 1, "X/A/B/C/D", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/D", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/D", "normal", 1, "X/A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A", "X/A", "X/A"));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 2, "X/A"},
+ {0, "A/B", "normal", 2, "X/A/B"},
+ {0, "A/B/C", "normal", 2, "X/A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "X/A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 2, "X/A/B/C/D/E"},
+ {0, "A/B/F", "normal", 2, "X/A/B/F"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/F", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "X/A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "X/A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 2, "X/A/B/C", MOVED_HERE},
+ {1, "A2/B/C/D", "normal", 2, "X/A/B/C/D", MOVED_HERE},
+ {1, "A2/B/C/D/E", "normal", 2, "X/A/B/C/D/E", MOVED_HERE},
+ {1, "A2/B/F", "normal", 2, "X/A/B/F", MOVED_HERE},
+ {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"},
+ {3, "A2/B/C/D", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {3, "A2/B/C2", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {3, "A2/B/C2/D", "normal", 1, "X/A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A2/B/C", "X/A/B/C", "X/A/B/C"));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A2/B/F", NULL, "X/A/B/F"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_update_delete_mods(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_update_delete_mods", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/D"));
+ SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "A/B/C/f"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_file_write(&b, "B2/C/f", "modified content\n"));
+ SVN_ERR(sbox_wc_delete(&b, "B2/D"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "A/B/C/f", "normal", 1, "A/B/C/f"},
+ {0, "A/B/D", "normal", 1, "A/B/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/f", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/D", "base-deleted", NO_COPY_FROM},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "B2/C/f", "normal", 1, "A/B/C/f", MOVED_HERE},
+ {1, "B2/D", "normal", 1, "A/B/D", MOVED_HERE},
+ {2, "B2/D", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {1, "B2", "normal", 2, "A/B", MOVED_HERE},
+ {2, "B2/C", "normal", 1, "A/B/C"},
+ {2, "B2/C/f", "normal", 1, "A/B/C/f"},
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"B2/C", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_edited}},
+ {"B2/D", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_deleted}},
+ { 0 }
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(check_tree_conflict_repos_path(&b, "B2/C", "A/B/C", "A/B/C"));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "B2/D", "A/B/D", "A/B/D"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+nested_moves2(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_moves2", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A/A/A/A", "C"));
+ SVN_ERR(sbox_wc_move(&b, "A/A/A/A", "D"));
+ SVN_ERR(sbox_wc_move(&b, "A/A", "E"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "X", "normal", 2, "X"},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+ {0, "A/A/A/A/A", "normal", 1, "A/A/A/A/A"},
+ {0, "A/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {2, "A/A", "base-deleted", NO_COPY_FROM, "E"},
+ {2, "A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "E", "normal", 1, "A/A", MOVED_HERE},
+ {1, "E/A", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "E/A/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "E/A/A/A", "normal", 1, "A/A/A/A/A", MOVED_HERE},
+ {1, "E/A/A/A/A", "normal", 1, "A/A/A/A/A/A", MOVED_HERE},
+ {3, "E/A/A", "base-deleted", NO_COPY_FROM, "D"},
+ {3, "E/A/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "E/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "D", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "D/A", "normal", 1, "A/A/A/A/A", MOVED_HERE},
+ {1, "D/A/A", "normal", 1, "A/A/A/A/A/A", MOVED_HERE},
+ {3, "D/A/A", "base-deleted", NO_COPY_FROM, "C"},
+ {1, "C", "normal", 1, "A/A/A/A/A/A", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "X", "normal", 2, "X"},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "A/A/A", "normal", 2, "A/A/A"},
+ {0, "A/A/A/A", "normal", 2, "A/A/A/A"},
+ {0, "A/A/A/A/A", "normal", 2, "A/A/A/A/A"},
+ {0, "A/A/A/A/A/A", "normal", 2, "A/A/A/A/A/A"},
+ {2, "A/A", "base-deleted", NO_COPY_FROM, "E"},
+ {2, "A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "E", "normal", 2, "A/A", MOVED_HERE},
+ {1, "E/A", "normal", 2, "A/A/A", MOVED_HERE},
+ {1, "E/A/A", "normal", 2, "A/A/A/A", MOVED_HERE},
+ {1, "E/A/A/A", "normal", 2, "A/A/A/A/A", MOVED_HERE},
+ {1, "E/A/A/A/A", "normal", 2, "A/A/A/A/A/A", MOVED_HERE},
+ {3, "E/A/A", "base-deleted", NO_COPY_FROM, "D"},
+ {3, "E/A/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "E/A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "D", "normal", 2, "A/A/A/A", MOVED_HERE},
+ {1, "D/A", "normal", 2, "A/A/A/A/A", MOVED_HERE},
+ {1, "D/A/A", "normal", 2, "A/A/A/A/A/A", MOVED_HERE},
+ {3, "D/A/A", "base-deleted", NO_COPY_FROM, "C"},
+ {1, "C", "normal", 2, "A/A/A/A/A/A", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_in_delete", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "C2"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {1, "C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "A/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "C2/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 3, "A/B/C/D/E"},
+
+ {1, "C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "C2/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_deleted}},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_revert(&b, "A/B", svn_depth_empty));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 3, "A/B/C/D/E"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {3, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {3, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "C2/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/B/C", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/B/C"}},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ /* Where did this conflict come from? */
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ /* Revert should have left a tree-conflict (or broken the move). */
+ SVN_ERR(sbox_wc_resolve(&b, "A/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, ""},
+ {0, "A", "normal", 3, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 3, "A/B/C/D/E"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {3, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {3, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 3, "A/B/C", MOVED_HERE},
+ {1, "C2/D", "normal", 3, "A/B/C/D", MOVED_HERE},
+ {1, "C2/D/E", "normal", 3, "A/B/C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+switch_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "switch_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/B/D/E/F"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_switch(&b, "", "/A", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ SVN_ERR(sbox_wc_move(&b, "B/C", "C2"));
+ SVN_ERR(sbox_wc_move(&b, "B/D", "D2"));
+ SVN_ERR(sbox_wc_move(&b, "D2/E", "D2/E2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, "A"},
+ {0, "B", "normal", 2, "A/B"},
+ {0, "B/C", "normal", 2, "A/B/C"},
+ {0, "B/D", "normal", 2, "A/B/D"},
+ {0, "B/D/E", "normal", 2, "A/B/D/E"},
+ {2, "B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "B/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {2, "B/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "D2", "normal", 2, "A/B/D", MOVED_HERE},
+ {1, "D2/E", "normal", 2, "A/B/D/E", MOVED_HERE},
+ {2, "D2/E", "base-deleted", NO_COPY_FROM, "D2/E2"},
+ {2, "D2/E2", "normal", 2, "A/B/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Switch "bumps" revisions and paths and raises conflicts just like
+ update. */
+ SVN_ERR(sbox_wc_switch(&b, "", "/X", svn_depth_infinity));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, "X"},
+ {0, "B", "normal", 3, "X/B"},
+ {0, "B/C", "normal", 3, "X/B/C"},
+ {0, "B/D", "normal", 3, "X/B/D"},
+ {0, "B/D/E", "normal", 3, "X/B/D/E"},
+ {0, "B/D/E/F", "normal", 3, "X/B/D/E/F"},
+ {2, "B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "B/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {2, "B/D/E", "base-deleted", NO_COPY_FROM},
+ {2, "B/D/E/F", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 3, "X/B/C", MOVED_HERE},
+ {1, "D2", "normal", 2, "A/B/D", MOVED_HERE},
+ {1, "D2/E", "normal", 2, "A/B/D/E", MOVED_HERE},
+ {2, "D2/E", "base-deleted", NO_COPY_FROM, "D2/E2"},
+ {2, "D2/E2", "normal", 2, "A/B/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Conflicts from switch are resolved just like those from update. */
+ SVN_ERR(sbox_wc_resolve(&b, "B/D", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, "X"},
+ {0, "B", "normal", 3, "X/B"},
+ {0, "B/C", "normal", 3, "X/B/C"},
+ {0, "B/D", "normal", 3, "X/B/D"},
+ {0, "B/D/E", "normal", 3, "X/B/D/E"},
+ {0, "B/D/E/F", "normal", 3, "X/B/D/E/F"},
+ {2, "B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "B/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {2, "B/D/E", "base-deleted", NO_COPY_FROM},
+ {2, "B/D/E/F", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 3, "X/B/C", MOVED_HERE},
+ {1, "D2", "normal", 3, "X/B/D", MOVED_HERE},
+ {1, "D2/E", "normal", 3, "X/B/D/E", MOVED_HERE},
+ {1, "D2/E/F", "normal", 3, "X/B/D/E/F", MOVED_HERE},
+ {2, "D2/E", "base-deleted", NO_COPY_FROM, "D2/E2"},
+ {2, "D2/E/F", "base-deleted", NO_COPY_FROM},
+ {2, "D2/E2", "normal", 2, "A/B/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "D2/E", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 3, "X"},
+ {0, "B", "normal", 3, "X/B"},
+ {0, "B/C", "normal", 3, "X/B/C"},
+ {0, "B/D", "normal", 3, "X/B/D"},
+ {0, "B/D/E", "normal", 3, "X/B/D/E"},
+ {0, "B/D/E/F", "normal", 3, "X/B/D/E/F"},
+ {2, "B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "B/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {2, "B/D/E", "base-deleted", NO_COPY_FROM},
+ {2, "B/D/E/F", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 3, "X/B/C", MOVED_HERE},
+ {1, "D2", "normal", 3, "X/B/D", MOVED_HERE},
+ {1, "D2/E", "normal", 3, "X/B/D/E", MOVED_HERE},
+ {1, "D2/E/F", "normal", 3, "X/B/D/E/F", MOVED_HERE},
+ {2, "D2/E", "base-deleted", NO_COPY_FROM, "D2/E2"},
+ {2, "D2/E/F", "base-deleted", NO_COPY_FROM},
+ {2, "D2/E2", "normal", 3, "X/B/D/E", MOVED_HERE},
+ {2, "D2/E2/F", "normal", 3, "X/B/D/E/F", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_replace(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_replace", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "X"));
+ SVN_ERR(sbox_wc_move(&b, "B", "A"));
+ SVN_ERR(sbox_wc_move(&b, "X", "B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "B", "normal", 1, "B"},
+ {1, "A", "normal", 1, "B", FALSE, "B", TRUE},
+ {1, "B", "normal", 1, "A", FALSE, "A", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/X", "normal", 2, "B/X"},
+ {1, "A", "normal", 1, "B", FALSE, "B", TRUE},
+ {1, "B", "normal", 2, "A", FALSE, "A", TRUE},
+ {1, "B/X", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/X", "normal", 2, "B/X"},
+ {1, "A", "normal", 2, "B", FALSE, "B", TRUE},
+ {1, "A/X", "normal", 2, "B/X", MOVED_HERE},
+ {1, "B", "normal", 2, "A", FALSE, "A", TRUE},
+ {1, "B/X", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+layered_moved_to(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "layered_moved_to", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "property", "value", "A/B/C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "property", "value", "C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "P"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "property2", "value", "A/B/C/D/E"));
+ SVN_ERR(sbox_wc_propset(&b, "property2", "value", "C/D/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "X"));
+ SVN_ERR(sbox_wc_move(&b, "X/B/C/D/E", "E2"));
+ SVN_ERR(sbox_wc_delete(&b, "X/B/C"));
+ SVN_ERR(sbox_wc_move(&b, "C", "X/B/C"));
+ SVN_ERR(sbox_wc_move(&b, "X/B/C/D/E", "E3"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "A/B/C/D", "normal", 1, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 1, "A/B/C/D/E"},
+ {0, "C", "normal", 1, "C"},
+ {0, "C/D", "normal", 1, "C/D"},
+ {0, "C/D/E", "normal", 1, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", MOVED_HERE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 1, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 1, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 1, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 1, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 1, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 1, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 1, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "X/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "X/B/C/D/E", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 2, "A/B/C/D/E"},
+ {0, "C", "normal", 1, "C"},
+ {0, "C/D", "normal", 1, "C/D"},
+ {0, "C/D/E", "normal", 1, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 2, "A", MOVED_HERE},
+ {1, "X/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 2, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 1, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 1, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 1, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 2, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 1, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "C", 3));
+ SVN_ERR(sbox_wc_resolve(&b, "C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "X/B/C/D/E", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 2, "A/B/C/D/E"},
+ {0, "C", "normal", 3, "C"},
+ {0, "C/D", "normal", 3, "C/D"},
+ {0, "C/D/E", "normal", 3, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 2, "A", MOVED_HERE},
+ {1, "X/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 2, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 3, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 3, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 3, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 2, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 3, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* An update with no text/property/tree changes in A, just a revision bump. */
+ SVN_ERR(sbox_wc_update(&b, "A", 4));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 4, "A/B"},
+ {0, "A/B/C", "normal", 4, "A/B/C"},
+ {0, "A/B/C/D", "normal", 4, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 4, "A/B/C/D/E"},
+ {0, "C", "normal", 3, "C"},
+ {0, "C/D", "normal", 3, "C/D"},
+ {0, "C/D/E", "normal", 3, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 4, "A", MOVED_HERE},
+ {1, "X/B", "normal", 4, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 4, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 4, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 4, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 3, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 3, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 3, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 4, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 3, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Update for conflicts on A and C */
+ SVN_ERR(sbox_wc_update(&b, "", 5));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, ""},
+ {0, "A", "normal", 5, "A"},
+ {0, "A/B", "normal", 5, "A/B"},
+ {0, "A/B/C", "normal", 5, "A/B/C"},
+ {0, "A/B/C/D", "normal", 5, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 5, "A/B/C/D/E"},
+ {0, "P", "normal", 5, "P"},
+ {0, "C", "normal", 5, "C"},
+ {0, "C/D", "normal", 5, "C/D"},
+ {0, "C/D/E", "normal", 5, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 4, "A", MOVED_HERE},
+ {1, "X/B", "normal", 4, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 4, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 4, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 4, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 3, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 3, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 3, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 4, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 3, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Partially resolve A */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "X/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ /* Cannot resolve C */
+ err = sbox_wc_resolve(&b, "C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ /* Complete resolving A and then resolve C */
+ SVN_ERR(sbox_wc_resolve(&b, "X/B/C/D/E", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, ""},
+ {0, "A", "normal", 5, "A"},
+ {0, "A/B", "normal", 5, "A/B"},
+ {0, "A/B/C", "normal", 5, "A/B/C"},
+ {0, "A/B/C/D", "normal", 5, "A/B/C/D"},
+ {0, "A/B/C/D/E", "normal", 5, "A/B/C/D/E"},
+ {0, "P", "normal", 5, "P"},
+ {0, "C", "normal", 5, "C"},
+ {0, "C/D", "normal", 5, "C/D"},
+ {0, "C/D/E", "normal", 5, "C/D/E"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "C", "base-deleted", NO_COPY_FROM, "X/B/C"},
+ {1, "C/D", "base-deleted", NO_COPY_FROM},
+ {1, "C/D/E", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 5, "A", MOVED_HERE},
+ {1, "X/B", "normal", 5, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 5, "A/B/C", MOVED_HERE},
+ {1, "X/B/C/D", "normal", 5, "A/B/C/D", MOVED_HERE},
+ {1, "X/B/C/D/E", "normal", 5, "A/B/C/D/E", MOVED_HERE},
+ {3, "X/B/C", "normal", 5, "C", MOVED_HERE},
+ {3, "X/B/C/D", "normal", 5, "C/D", MOVED_HERE},
+ {3, "X/B/C/D/E", "normal", 5, "C/D/E", FALSE, "E2", TRUE},
+ {5, "X/B/C/D/E", "base-deleted", NO_COPY_FROM, "E3"},
+ {1, "E2", "normal", 5, "A/B/C/D/E", MOVED_HERE},
+ {1, "E3", "normal", 3, "C/D/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+update_within_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "update_within_move", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "X"));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "X"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A", MOVED_HERE},
+ {1, "X/B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "X/B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Can't resolve mixed-revision source to mine-conflict. */
+ err = sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "normal", 1, "A/B"},
+ {1, "X/B/C", "normal", 1, "A/B/C"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_moved_descendant(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ SVN_ERR(svn_test__sandbox_create(&b, "commit_moved_descendant", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_copied"));
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_moved"));
+ SVN_ERR(sbox_wc_delete(&b, "A/A"));
+ SVN_ERR(sbox_wc_copy(&b, "A_copied/A", "A/A"));
+
+ /* And now I want to commit AAA_moved (the entire move), but not
+ the replacement of A/A */
+
+ /* For now, just start committing directly */
+ /* ### This fails, because A/A/A is not collected by the commit
+ harvester (it doesn't need committing, but our move filter
+ blocks on it) */
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* It would be nicer if we could just do a: */
+ /* SVN_ERR(sbox_wc_commit(&b, "AAA_moved")); */
+ /* Which then includes the delete half of the move, when it is
+ shadowed, like in this case. The commit processing doesn't
+ support this yet though*/
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "A/A/A", "normal", 2, "A/A/A"},
+ {0, "A/A/A/A", "normal", 2, "A/A/A/A"},
+ {0, "A/A/A/A/A", "normal", 2, "A/A/A/A/A"},
+ {0, "A/A/A/A/A/A", "normal", 2, "A/A/A/A/A/A"},
+ {0, "A_copied", "normal", 2, "A_copied"},
+ {0, "A_copied/A", "normal", 2, "A_copied/A"},
+ {0, "A_copied/A/A", "normal", 2, "A_copied/A/A"},
+ {0, "A_copied/A/A/A", "normal", 2, "A_copied/A/A/A"},
+ {0, "A_copied/A/A/A/A", "normal", 2, "A_copied/A/A/A/A"},
+ {0, "A_copied/A/A/A/A/A","normal", 2, "A_copied/A/A/A/A/A"},
+ {0, "AAA_moved", "normal", 2, "AAA_moved"},
+ {0, "AAA_moved/A", "normal", 2, "AAA_moved/A"},
+ {0, "AAA_moved/A/A", "normal", 2, "AAA_moved/A/A"},
+ {0, "AAA_moved/A/A/A", "normal", 2, "AAA_moved/A/A/A"},
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_moved_away_descendant(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ SVN_ERR(svn_test__sandbox_create(&b, "commit_moved_away_descendant",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_copied"));
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_moved"));
+ SVN_ERR(sbox_wc_delete(&b, "A/A"));
+ SVN_ERR(sbox_wc_copy(&b, "A_copied/A", "A/A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+ {0, "A/A/A/A/A", "normal", 1, "A/A/A/A/A"},
+ {0, "A/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {1, "A_copied", "normal", 1, "A"},
+ {1, "A_copied/A", "normal", 1, "A/A"},
+ {1, "A_copied/A/A", "normal", 1, "A/A/A"},
+ {1, "A_copied/A/A/A", "normal", 1, "A/A/A/A"},
+ {1, "A_copied/A/A/A/A", "normal", 1, "A/A/A/A/A"},
+ {1, "A_copied/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {1, "AAA_moved", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "AAA_moved/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "AAA_moved/A/A", "normal", 1, "A/A/A/A/A", MOVED_HERE},
+ {1, "AAA_moved/A/A/A", "normal", 1, "A/A/A/A/A/A", MOVED_HERE},
+ {2, "A/A", "normal", 1, "A/A"},
+ {2, "A/A/A", "normal", 1, "A/A/A", FALSE, "AAA_moved"},
+ {2, "A/A/A/A", "normal", 1, "A/A/A/A"},
+ {2, "A/A/A/A/A", "normal", 1, "A/A/A/A/A"},
+ {2, "A/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And now I want to make sure that I can't commit A, without also
+ committing AAA_moved, as that would break the move*/
+ SVN_ERR(sbox_wc_commit(&b, "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "A/A/A", "normal", 2, "A/A/A"},
+ {0, "A/A/A/A", "normal", 2, "A/A/A/A"},
+ {0, "A/A/A/A/A", "normal", 2, "A/A/A/A/A"},
+ {0, "A/A/A/A/A/A", "normal", 2, "A/A/A/A/A/A"},
+ {1, "A_copied", "normal", 1, "A"},
+ {1, "A_copied/A", "normal", 1, "A/A"},
+ {1, "A_copied/A/A", "normal", 1, "A/A/A"},
+ {1, "A_copied/A/A/A", "normal", 1, "A/A/A/A"},
+ {1, "A_copied/A/A/A/A", "normal", 1, "A/A/A/A/A"},
+ {1, "A_copied/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {1, "AAA_moved", "normal", 1, "A/A/A"},
+ {1, "AAA_moved/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_moved/A/A", "normal", 1, "A/A/A/A/A"},
+ {1, "AAA_moved/A/A/A", "normal", 1, "A/A/A/A/A/A"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "The commit should have failed");
+
+ /*return SVN_NO_ERROR;*/
+}
+
+static svn_error_t *
+finite_move_update_bump(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+ SVN_ERR(svn_test__sandbox_create(&b, "finite_move_update_bump",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P/Q"));
+ SVN_ERR(sbox_file_write(&b, "P/Q/f", "r1 content\n"));
+ SVN_ERR(sbox_wc_add(&b, "P/Q/f"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 2));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", NULL, NULL));
+ err = sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "P", "normal", 1, "P"},
+ {0, "P/Q", "normal", 1, "P/Q"},
+ {0, "P/Q/f", "normal", 1, "P/Q/f"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_wc_move(&b, "P/Q", "Q2"));
+ SVN_ERR(sbox_wc_update_depth(&b, "A/B", 2, svn_depth_files, FALSE));
+ SVN_ERR(sbox_wc_update_depth(&b, "P/Q", 2, svn_depth_files, FALSE));
+ {
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/B"}},
+ {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "P/Q"}},
+ {0}
+ };
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", "A/B", "A/B"));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "P/Q", "P/Q", "P/Q"));
+ err = sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ /* sbox_wc_resolve() obtains a lock on the target path, so now it
+ will apply the change on the target */
+ SVN_ERR(sbox_wc_resolve(&b, "P/Q", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {0, "P", "normal", 1, "P"},
+ {0, "P/Q", "normal", 2, "P/Q"},
+ {0, "P/Q/f", "normal", 2, "P/Q/f"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "P/Q", "base-deleted", NO_COPY_FROM, "Q2"},
+ {2, "P/Q/f", "base-deleted", NO_COPY_FROM},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "Q2", "normal", 2, "P/Q", MOVED_HERE},
+ {1, "Q2/f", "normal", 2, "P/Q/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_wc_move(&b, "P", "P2"));
+ SVN_ERR(sbox_wc_update_depth(&b, "A/B", 2, svn_depth_immediates, FALSE));
+ SVN_ERR(sbox_wc_update_depth(&b, "P", 2, svn_depth_immediates, FALSE));
+ {
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/B"}},
+ {"P", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "P"}},
+ {0}
+ };
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(check_tree_conflict_repos_path(&b, "P", "P", "P"));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", "A/B", "A/B"));
+ err = sbox_wc_resolve(&b, "P", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "P", "normal", 2, "P"},
+ {0, "P/Q", "normal", 2, "P/Q"},
+ {0, "P/Q/f", "normal", 1, "P/Q/f"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "P", "base-deleted", NO_COPY_FROM, "P2"},
+ {1, "P/Q", "base-deleted", NO_COPY_FROM},
+ {1, "P/Q/f", "base-deleted", NO_COPY_FROM},
+ {1, "B2", "normal", 2, "A/B", MOVED_HERE},
+ {1, "B2/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "P2", "normal", 1, "P", MOVED_HERE},
+ {1, "P2/Q", "normal", 1, "P/Q", MOVED_HERE},
+ {1, "P2/Q/f", "normal", 1, "P/Q/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "C2"));
+ SVN_ERR(sbox_wc_move(&b, "P/Q", "Q2"));
+ SVN_ERR(sbox_wc_update_depth(&b, "A/B/C", 2, svn_depth_empty, FALSE));
+ SVN_ERR(sbox_wc_update_depth(&b, "P/Q", 2, svn_depth_empty, FALSE));
+ {
+ conflict_info_t conflicts[] = {
+ {"A/B/C", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/B/C"}},
+ {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "P/Q"}},
+
+ {0}
+ };
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(check_tree_conflict_repos_path(&b, "A/B/C", "A/B/C", "A/B/C"));
+ SVN_ERR(check_tree_conflict_repos_path(&b, "P/Q", "P/Q", "P/Q"));
+ err = sbox_wc_resolve(&b, "P/Q", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+ SVN_ERR(sbox_wc_resolve(&b, "A/B/C", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "P", "normal", 1, "P"},
+ {0, "P/Q", "normal", 2, "P/Q"},
+ {0, "P/Q/f", "normal", 1, "P/Q/f"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {2, "P/Q", "base-deleted", NO_COPY_FROM, "Q2"},
+ {2, "P/Q/f", "base-deleted", NO_COPY_FROM},
+ {1, "C2", "normal", 2, "A/B/C", MOVED_HERE},
+ {1, "Q2", "normal", 1, "P/Q", MOVED_HERE},
+ {1, "Q2/f", "normal", 1, "P/Q/f", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_away_delete_update(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ SVN_ERR(svn_test__sandbox_create(&b, "move_away_delete_update",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "P/Q"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_delete(&b, "P/Q"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "C2"));
+ SVN_ERR(sbox_wc_move(&b, "P/Q", "Q2"));
+
+ /* Update to r2 removes the move sources and clears moved_here from
+ the move destinations. */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "P", "normal", 2, "P"},
+ {1, "C2", "normal", 1, "A/B/C", MOVED_HERE},
+ {1, "Q2", "normal", 1, "P/Q"},
+
+ {2, "A/B", "normal", 1, "A/B"},
+ {2, "A/B/C", "normal", 1, "A/B/C"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"},
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_edited}},
+ {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "P/Q"}},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_not_present_variants(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ SVN_ERR(svn_test__sandbox_create(&b, "move_not_present_variants",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "D/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_delete(&b, "B/B"));
+ SVN_ERR(sbox_wc_update(&b, "C/B", 0));
+ SVN_ERR(sbox_wc_exclude(&b, "D/B"));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "cA"));
+ SVN_ERR(sbox_wc_copy(&b, "B", "cB"));
+ SVN_ERR(sbox_wc_copy(&b, "C", "cC"));
+ SVN_ERR(sbox_wc_copy(&b, "D", "cD"));
+
+ SVN_ERR(sbox_wc_copy(&b, "cA", "ccA"));
+ SVN_ERR(sbox_wc_copy(&b, "cB", "ccB"));
+ SVN_ERR(sbox_wc_copy(&b, "cC", "ccC"));
+ SVN_ERR(sbox_wc_copy(&b, "cD", "ccD"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ /* Copy of a deleted + committed node */
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "not-present", 2, "A/B"},
+
+ {1, "cA", "normal", 1, "A"},
+ {1, "cA/B", "not-present", 2, "A/B"},
+
+ {1, "ccA", "normal", 1, "A"},
+ {1, "ccA/B", "not-present", 2, "A/B"},
+
+ /* Copy of a local deleted node */
+ {0, "B", "normal", 1, "B"},
+ {0, "B/B", "normal", 1, "B/B"},
+ {2, "B/B", "base-deleted", NO_COPY_FROM},
+
+ {1, "cB", "normal", 1, "B",},
+ {1, "cB/B", "normal", 1, "B/B"},
+ {2, "cB/B", "base-deleted", NO_COPY_FROM},
+
+ {1, "ccB", "normal", 1, "B"},
+ {1, "ccB/B", "normal", 1, "B/B"},
+ {2, "ccB/B", "base-deleted", NO_COPY_FROM},
+
+ /* Copy of a to r0 updated node */
+ {0, "C", "normal", 1, "C"},
+ {0, "C/B", "not-present", 0, "C/B"},
+
+ {1, "cC", "normal", 1, "C"},
+ {1, "cC/B", "not-present", 0, "C/B"},
+
+ {1, "ccC", "normal", 1, "C"},
+ {1, "ccC/B", "not-present", 0, "C/B"},
+
+ /* Copy of an excluded node */
+ {0, "D", "normal", 1, "D"},
+ {0, "D/B", "excluded", 1, "D/B"},
+
+ {1, "cD", "normal", 1, "D"},
+ {1, "cD/B", "excluded", 1, "D/B"},
+
+ {1, "ccD", "normal", 1, "D"},
+ {1, "ccD/B", "excluded", 1, "D/B"},
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_delete(&b, "B/B"));
+
+ /* And now do the same thing with moves */
+
+ SVN_ERR(sbox_wc_move(&b, "A", "mA"));
+ SVN_ERR(sbox_wc_move(&b, "B", "mB"));
+ SVN_ERR(sbox_wc_move(&b, "C", "mC"));
+ SVN_ERR(sbox_wc_move(&b, "D", "mD"));
+
+ SVN_ERR(sbox_wc_move(&b, "mA", "mmA"));
+ SVN_ERR(sbox_wc_move(&b, "mB", "mmB"));
+ SVN_ERR(sbox_wc_move(&b, "mC", "mmC"));
+ SVN_ERR(sbox_wc_move(&b, "mD", "mmD"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ /* Move of a deleted + committed node */
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "not-present", 2, "A/B"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "mmA"},
+
+ {1, "mmA", "normal", 1, "A", MOVED_HERE},
+ {1, "mmA/B", "not-present", 2, "A/B", MOVED_HERE},
+
+
+ /* Move of a local deleted node */
+ {0, "B", "normal", 1, "B"},
+ {0, "B/B", "normal", 1, "B/B"},
+ {1, "B", "base-deleted", NO_COPY_FROM, "mmB"},
+ {1, "B/B", "base-deleted", NO_COPY_FROM},
+
+ {1, "mmB", "normal", 1, "B", MOVED_HERE},
+ {1, "mmB/B", "normal", 1, "B/B", MOVED_HERE},
+ {2, "mmB/B", "base-deleted", NO_COPY_FROM},
+
+ /* Move of a to r0 updated node */
+ {0, "C", "normal", 1, "C"},
+ {0, "C/B", "not-present", 0, "C/B"},
+ {1, "C", "base-deleted", NO_COPY_FROM, "mmC"},
+
+ {1, "mmC", "normal", 1, "C", MOVED_HERE},
+ {1, "mmC/B", "not-present", 0, "C/B", MOVED_HERE},
+
+ /* Move of an excluded node */
+ {0, "D", "normal", 1, "D",},
+ {0, "D/B", "excluded", 1, "D/B", },
+ {1, "D", "base-deleted", NO_COPY_FROM, "mmD"},
+
+ {1, "mmD", "normal", 1, "D", MOVED_HERE},
+ {1, "mmD/B", "excluded", 1, "D/B", MOVED_HERE},
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And move everything back */
+ SVN_ERR(sbox_wc_move(&b, "mmA", "A"));
+ SVN_ERR(sbox_wc_move(&b, "mmB", "B"));
+ SVN_ERR(sbox_wc_move(&b, "mmC", "C"));
+ SVN_ERR(sbox_wc_move(&b, "mmD", "D"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ /* deleted + committed node */
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "not-present", 2, "A/B"},
+
+ /* local deleted node */
+ {0, "B", "normal", 1, "B"},
+ {0, "B/B", "normal", 1, "B/B"},
+ {2, "B/B", "base-deleted", NO_COPY_FROM},
+
+ /* To r0 updated node */
+ {0, "C", "normal", 1, "C"},
+ {0, "C/B", "not-present", 0, "C/B"},
+
+ /* Move of an excluded node */
+ {0, "D", "normal", 1, "D",},
+ {0, "D/B", "excluded", 1, "D/B", },
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+update_child_under_add(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "not-present", 0, "A/B"},
+ {2, "A/B", "normal", NO_COPY_FROM},
+ {3, "A/B/C", "normal", NO_COPY_FROM},
+ {4, "A/B/C/D", "normal", NO_COPY_FROM},
+ {0}
+ };
+
+ SVN_ERR(svn_test__sandbox_create(&b, "update_child_under_add",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/B", 0));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(check_db_rows(&b, "", nodes));
+
+ /* A/B/C/D is skipped as it has no base node parent */
+ SVN_ERR(sbox_wc_update(&b, "A/B/C/D", 1));
+ SVN_ERR(check_db_rows(&b, "", nodes));
+
+ /* A/B/C should be skipped as it has a not-present base node parent */
+ err = sbox_wc_update(&b, "A/B/C", 1);
+ svn_error_clear(err); /* Allow any error and always check NODES. */
+ SVN_ERR(check_db_rows(&b, "", nodes));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+delete_over_moved_away(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "delete_over_moved_away",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B"));
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM, "B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+
+ {1, "B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Now replace A with a similar tree */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {1, "A", "normal", NO_COPY_FROM},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM, "B"},
+ {2, "A/B", "normal", NO_COPY_FROM},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {3, "A/B/C", "normal", NO_COPY_FROM},
+
+ {1, "B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And delete the new A */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {0, "A/B", "normal", 1, "A/B"},
+ /* And here the moved-to information is lost */
+ {1, "A/B", "base-deleted", NO_COPY_FROM, "B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+
+ /* But the moved-here is still there */
+ {1, "B", "normal", 1, "A/B", MOVED_HERE},
+ {1, "B/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+movedto_opdepth(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "movedto_opdepth",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "C"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And now the moved_to information has to switch op-depths */
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And again */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And now stay at the depth of A */
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {1, "A", "normal", NO_COPY_FROM},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {2, "A/B", "normal", NO_COPY_FROM},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And see if it can jump back to B again? */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_revert(&b, "A", svn_depth_empty));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* And can we bump it back to C itself? */
+ SVN_ERR(sbox_wc_revert(&b, "A", svn_depth_immediates));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+new_basemove(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "new_basemove",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ /* We keep track of moved children of copies */
+ SVN_ERR(sbox_wc_copy(&b, "A", "Copy"));
+ SVN_ERR(sbox_wc_move(&b, "Copy/B/C", "C"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {1, "Copy", "normal", 1, "A"},
+ {1, "Copy/B", "normal", 1, "A/B"},
+ {1, "Copy/B/C", "normal", 1, "A/B/C"},
+
+ {3, "Copy/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ /* C is a copy of A/B/C */
+ {1, "C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ {
+ apr_array_header_t *targets = apr_array_make(pool, 1, sizeof(char *));
+ APR_ARRAY_PUSH(targets, const char*) = sbox_wc_path(&b, "Copy");
+
+ SVN_ERR(sbox_wc_commit_ex(&b, targets, svn_depth_empty));
+ }
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 1, "A/B/C"},
+
+ {0, "Copy", "normal", 2, "Copy"},
+ {0, "Copy/B", "normal", 2, "Copy/B"},
+ {0, "Copy/B/C", "normal", 2, "Copy/B/C"},
+
+ {3, "Copy/B/C", "base-deleted", NO_COPY_FROM, "C"},
+
+ /* And this node is now a copy of Copy/B/C at r2 */
+ {1, "C", "normal", 2, "Copy/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_back(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_back", opts, pool));
+
+ /* X just so we don't always test with local_relpath == repos_path */
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/A/B/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/E"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_switch(&b, "", "/X", svn_depth_infinity));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 1, "X/A"},
+ {0, "A/B", "normal", 1, "X/A/B"},
+ {0, "A/B/C", "normal", 1, "X/A/B/C"},
+ {0, "A/B/D", "normal", 1, "X/A/B/D"},
+ {0, "E", "normal", 1, "X/E"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 1, "X/A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {2, "A/B2/D", "normal", 1, "X/A/B/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B2", "A/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 1, "X/A"},
+ {0, "A/B", "normal", 1, "X/A/B"},
+ {0, "A/B/C", "normal", 1, "X/A/B/C"},
+ {0, "A/B/D", "normal", 1, "X/A/B/D"},
+ {0, "E", "normal", 1, "X/E"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ SVN_ERR(sbox_wc_move(&b, "A/B2/C", "A/B2/C2"));
+ SVN_ERR(sbox_wc_move(&b, "A/B2/D", "D2"));
+ SVN_ERR(sbox_wc_move(&b, "E", "A/B2/E2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 1, "X/A"},
+ {0, "A/B", "normal", 1, "X/A/B"},
+ {0, "A/B/C", "normal", 1, "X/A/B/C"},
+ {0, "A/B/D", "normal", 1, "X/A/B/D"},
+ {0, "E", "normal", 1, "X/E"},
+ {1, "D2", "normal", 1, "X/A/B/D", MOVED_HERE},
+ {1, "E", "base-deleted", NO_COPY_FROM, "A/B2/E2"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 1, "X/A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {2, "A/B2/D", "normal", 1, "X/A/B/D", MOVED_HERE},
+ {3, "A/B2/C", "base-deleted", NO_COPY_FROM, "A/B2/C2"},
+ {3, "A/B2/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {3, "A/B2/C2", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {3, "A/B2/E2", "normal", 1, "X/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_move(&b, "A/B2", "A/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "X"},
+ {0, "A", "normal", 1, "X/A"},
+ {0, "A/B", "normal", 1, "X/A/B"},
+ {0, "A/B/C", "normal", 1, "X/A/B/C"},
+ {0, "A/B/D", "normal", 1, "X/A/B/D"},
+ {0, "E", "normal", 1, "X/E"},
+ {1, "D2", "normal", 1, "X/A/B/D", MOVED_HERE},
+ {1, "E", "base-deleted", NO_COPY_FROM, "A/B/E2"},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/B/C2"},
+ {3, "A/B/D", "base-deleted", NO_COPY_FROM, "D2"},
+ {3, "A/B/C2", "normal", 1, "X/A/B/C", MOVED_HERE},
+ {3, "A/B/E2", "normal", 1, "X/E", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_update_subtree(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_update_subtree", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 3));
+ SVN_ERR(sbox_wc_copy(&b, "A", "P"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ /* Subtree update is like an interrupted update, it leaves a
+ mixed-revision move source. */
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B2"));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Resolve fails because of the mixed-revision. */
+ err = sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ /* Update to single-revision to allow resolve; this requires update
+ while the tree-conflict on A/B is present. */
+ SVN_ERR(sbox_wc_update(&b, "A/B", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Now resolve is possible. */
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 2, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Subtree update that only bumps. */
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 3));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 2, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Resolve fails because of the mixed-revision. */
+ err = sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ /* Update allowed while tree-conflict is present. */
+ SVN_ERR(sbox_wc_update(&b, "A/B", 3));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 2, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 2, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Now resolve works. */
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 3, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 3, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 3, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Partial switch of source. */
+ SVN_ERR(sbox_wc_switch(&b, "A", "/P", svn_depth_immediates));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 4, "P"},
+ {0, "A/B", "normal", 4, "P/B"},
+ {0, "A/B/C", "normal", 3, "A/B/C"},
+ {0, "A/B/C/D", "normal", 3, "A/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 3, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 3, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 3, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Resolve fails because of the subtree-switch. */
+ err = sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE);
+
+ /* Switch works while tree-conflict is present. */
+ SVN_ERR(sbox_wc_switch(&b, "A", "/P", svn_depth_infinity));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 4, "P"},
+ {0, "A/B", "normal", 4, "P/B"},
+ {0, "A/B/C", "normal", 4, "P/B/C"},
+ {0, "A/B/C/D", "normal", 4, "P/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 3, "A/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 3, "A/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 3, "A/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Now resolve works. */
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 4, "P"},
+ {0, "A/B", "normal", 4, "P/B"},
+ {0, "A/B/C", "normal", 4, "P/B/C"},
+ {0, "A/B/C/D", "normal", 4, "P/B/C/D"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM, "A/B2"},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {2, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {2, "A/B2", "normal", 4, "P/B", MOVED_HERE},
+ {2, "A/B2/C", "normal", 4, "P/B/C", MOVED_HERE},
+ {2, "A/B2/C/D", "normal", 4, "P/B/C/D", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_parent_into_child(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_parent_into_child", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B2"));
+ SVN_ERR(sbox_wc_move(&b, "A", "B2/A"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "B2/A"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "B2", "normal", 1, "A/B", MOVED_HERE},
+ {2, "B2/A", "normal", 1, "A", MOVED_HERE},
+ {2, "B2/A/B", "normal", 1, "A/B", MOVED_HERE},
+ {3, "B2/A/B", "base-deleted", NO_COPY_FROM, "B2"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "B2", "A"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A", "normal", 1, "A/B", FALSE, "A/A", TRUE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/A", "normal", 1, "A", MOVED_HERE},
+ {2, "A/A/B", "normal", 1, "A/B", MOVED_HERE},
+ {3, "A/A/B", "base-deleted", NO_COPY_FROM, "A"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A/A", "A/B"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A", "normal", 1, "A/B", FALSE, "A/B", TRUE},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {2, "A/B", "normal", 1, "A", MOVED_HERE},
+ {2, "A/B/B", "normal", 1, "A/B", MOVED_HERE},
+ {3, "A/B/B", "base-deleted", NO_COPY_FROM, "A"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_depth_expand(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_depth_expand", opts, pool));
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 0));
+
+ SVN_ERR(sbox_wc_update_depth(&b, "", 1, svn_depth_immediates, TRUE));
+ SVN_ERR(sbox_wc_update_depth(&b, "A", 1, svn_depth_immediates, TRUE));
+ /* Make A/B not present */
+ SVN_ERR(sbox_wc_update_depth(&b, "A/B", 0, svn_depth_immediates, TRUE));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A/A")); /* Local addition obstruction */
+ SVN_ERR(sbox_wc_copy(&b, "C/A", "C/B")); /* Copied obstruction */
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "" },
+
+ {0, "A", "normal", 1, "A" },
+ {1, "A", "base-deleted", NO_COPY_FROM, "C" },
+ {0, "A/A", "normal", 1, "A/A" },
+ {1, "A/A", "base-deleted", NO_COPY_FROM },
+ {0, "A/B", "not-present", 0, "A/B" },
+
+ {1, "C", "normal", 1, "A", MOVED_HERE },
+
+ {1, "C/A", "normal", 1, "A/A", MOVED_HERE },
+ {3, "C/A/A", "normal", NO_COPY_FROM },
+
+ {1, "C/B", "not-present", 0, "A/B", MOVED_HERE},
+ {2, "C/B", "normal", 1, "A/A" },
+ {3, "C/B/A", "normal", NO_COPY_FROM },
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update_depth(&b, "", 1, svn_depth_infinity, TRUE));
+
+ /* And now verify that there are no not-present nodes left and a
+ consistent working copy */
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "" },
+
+ {0, "A", "normal", 1, "A" },
+ {0, "A/A", "normal", 1, "A/A" },
+ {0, "A/A/A", "normal", 1, "A/A/A" },
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A" },
+ {0, "A/B", "normal", 1, "A/B" },
+ {0, "A/B/A", "normal", 1, "A/B/A" },
+ {0, "A/B/A/A", "normal", 1, "A/B/A/A" },
+
+ {1, "A", "base-deleted", NO_COPY_FROM, "C" },
+ {1, "A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/B", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/A/A/A", "base-deleted", NO_COPY_FROM },
+
+ {1, "C", "normal", 1, "A", MOVED_HERE },
+ {1, "C/A", "normal", 1, "A/A", MOVED_HERE },
+ {1, "C/B", "not-present", 0, "A/B", MOVED_HERE},
+
+ {2, "C/B", "normal", 1, "A/A" },
+
+ {3, "C/A/A", "normal", NO_COPY_FROM },
+ {3, "C/B/A", "normal", NO_COPY_FROM },
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* This used to cause a segfault. Then it asserted in a different place */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ /* And now verify that there are no not-present nodes left and a
+ consistent working copy */
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "" },
+
+ {0, "A", "normal", 1, "A" },
+ {0, "A/A", "normal", 1, "A/A" },
+ {0, "A/A/A", "normal", 1, "A/A/A" },
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A" },
+ {0, "A/B", "normal", 1, "A/B" },
+ {0, "A/B/A", "normal", 1, "A/B/A" },
+ {0, "A/B/A/A", "normal", 1, "A/B/A/A" },
+
+ {1, "A", "base-deleted", NO_COPY_FROM, "C" },
+ {1, "A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/B", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/A/A", "base-deleted", NO_COPY_FROM },
+ {1, "A/A/A/A", "base-deleted", NO_COPY_FROM },
+
+
+ {1, "C", "normal", 1, "A", MOVED_HERE },
+ {1, "C/A", "normal", 1, "A/A", MOVED_HERE },
+ {1, "C/A/A", "normal", 1, "A/A/A", MOVED_HERE },
+ {1, "C/A/A/A", "normal", 1, "A/A/A/A", MOVED_HERE },
+ {1, "C/B", "normal", 1, "A/B", MOVED_HERE },
+ {1, "C/B/A", "normal", 1, "A/B/A", MOVED_HERE },
+ {1, "C/B/A/A", "normal", 1, "A/B/A/A", MOVED_HERE },
+
+ {2, "C/B", "normal", 1, "A/A" },
+ {2, "C/B/A", "base-deleted", NO_COPY_FROM },
+ {2, "C/B/A/A", "base-deleted", NO_COPY_FROM },
+
+ {3, "C/A/A", "normal", NO_COPY_FROM },
+ {3, "C/A/A/A", "base-deleted", NO_COPY_FROM },
+ {3, "C/B/A", "normal", NO_COPY_FROM },
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_retract(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_retract", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/A/D"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_delete(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_delete(&b, "A/A/A/C"));
+ SVN_ERR(sbox_wc_delete(&b, "A/A/A/D"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/A/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/A/D", "D"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_move(&b, "A/A", "A/B"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, "" },
+
+ {0, "A", "normal", 1, "A" },
+ {0, "A/A", "normal", 1, "A/A" },
+ {0, "A/A/A", "normal", 1, "A/A/A" },
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A" },
+ {0, "A/A/A/C", "normal", 1, "A/A/A/C" },
+ {0, "A/A/A/D", "normal", 1, "A/A/A/D" },
+
+ {2, "A/A", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A/A/A", "base-deleted", NO_COPY_FROM },
+ {2, "A/A/A/A", "base-deleted", NO_COPY_FROM },
+ {2, "A/A/A/C", "base-deleted", NO_COPY_FROM },
+ {2, "A/A/A/D", "base-deleted", NO_COPY_FROM },
+
+ {0, "A/B", "normal", 1, "A/B" },
+ {0, "A/B/A", "normal", 1, "A/B/A" },
+ {0, "A/B/A/A", "normal", 1, "A/B/A/A" },
+ {0, "A/B/A/C", "normal", 1, "A/B/A/C" },
+ {0, "A/B/A/D", "normal", 1, "A/B/A/D" },
+
+ {2, "A/B", "normal", 1, "A/A", MOVED_HERE },
+ {2, "A/B/A", "normal", 1, "A/A/A", MOVED_HERE },
+ {2, "A/B/A/A", "normal", 1, "A/A/A/A", MOVED_HERE },
+ {2, "A/B/A/C", "normal", 1, "A/A/A/C", MOVED_HERE },
+ {2, "A/B/A/D", "normal", 1, "A/A/A/D", FALSE, "D", TRUE },
+
+ {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE },
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, "" },
+
+ {0, "A", "normal", 2, "A" },
+ {0, "A/A", "normal", 2, "A/A" },
+ {0, "A/A/A", "normal", 2, "A/A/A" },
+
+ {2, "A/A", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A/A/A", "base-deleted", NO_COPY_FROM },
+
+ {0, "A/B", "normal", 2, "A/B" },
+ {0, "A/B/A", "normal", 2, "A/B/A" },
+ {0, "A/B/A/A", "normal", 2, "A/B/A/A" },
+ {0, "A/B/A/D", "normal", 2, "A/B/A/D" },
+
+ {2, "A/B", "normal", 1, "A/A", MOVED_HERE },
+ {2, "A/B/A", "normal", 1, "A/A/A", MOVED_HERE },
+ {2, "A/B/A/A", "normal", 1, "A/A/A/A", MOVED_HERE },
+ {2, "A/B/A/C", "normal", 1, "A/A/A/C", MOVED_HERE },
+ {2, "A/B/A/D", "normal", 1, "A/A/A/D", FALSE, "D", TRUE },
+
+ {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE },
+
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/A"}},
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+
+ SVN_ERR(sbox_wc_resolve(&b, "A/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, "" },
+
+ {0, "A", "normal", 2, "A" },
+ {0, "A/A", "normal", 2, "A/A" },
+ {0, "A/A/A", "normal", 2, "A/A/A" },
+
+ {0, "A/B", "normal", 2, "A/B" },
+ {0, "A/B/A", "normal", 2, "A/B/A" },
+ {0, "A/B/A/A", "normal", 2, "A/B/A/A" },
+ {0, "A/B/A/D", "normal", 2, "A/B/A/D" },
+
+ {2, "A/A", "base-deleted", NO_COPY_FROM, "A/B"},
+ {2, "A/A/A", "base-deleted", NO_COPY_FROM },
+
+ {2, "A/B", "normal", 2, "A/A", MOVED_HERE },
+ {2, "A/B/A", "normal", 2, "A/A/A", MOVED_HERE },
+ {2, "A/B/A/A", "base-deleted", NO_COPY_FROM }, /* ### MISSING! */
+ {2, "A/B/A/D", "base-deleted", NO_COPY_FROM, "D" }, /* ### MISSING! */
+
+ /* Still conflicted */
+ {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE },
+
+ {4, "A/B/A/C", "normal", 1, "A/A/A/C"},
+
+
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"A/B/A/C", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_edited}},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ /* ### TODO: Resolve via which specific target? */
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+
+ {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE },
+
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "D", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_delete_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_delete_file_externals", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_file_write(&b, "f", "New file"));
+ SVN_ERR(sbox_wc_add(&b, "f"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f B/P/g", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f Q/g\n^/f g", "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {0, "A/B/g", "normal", 1, "f", TRUE},
+ {0, "A/B/P/g", "normal", 1, "f", TRUE},
+ {0, "A/B/Q/g", "normal", 1, "f", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Delete removes the file external rows. */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Revert doesn't restore the file external rows... */
+ SVN_ERR(sbox_wc_revert(&b, "A", svn_depth_infinity));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ /* ... but update does. */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {0, "A/B/g", "normal", 1, "f", TRUE},
+ {0, "A/B/P/g", "normal", 1, "f", TRUE},
+ {0, "A/B/Q/g", "normal", 1, "f", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Move removes the file external rows. */
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ /* Update adds file external rows to the copy. */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "f", "normal", 1, "f"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {0, "A2/B/g", "normal", 1, "f", TRUE},
+ {0, "A2/B/P/g", "normal", 1, "f", TRUE},
+ {0, "A2/B/Q/g", "normal", 1, "f", TRUE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+update_with_tree_conflict(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "update_with_tree_conflict", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+
+ SVN_ERR(sbox_wc_update_depth(&b, "A", 2, svn_depth_empty, FALSE));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "A", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Second update overwrote the existing tree-conflict and that
+ causes the move-update to assert. */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_infinity,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Same again but second update is A/B rather than A which triggers
+ the problem through open_root rather than open_directory. */
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+ SVN_ERR(sbox_wc_update_depth(&b, "A", 2, svn_depth_empty, FALSE));
+ SVN_ERR(sbox_wc_update(&b, "A/B", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 1, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_infinity,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A2"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A2", "normal", 2, "A", MOVED_HERE},
+ {1, "A2/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A2/B/C", "normal", 2, "A/B/C", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_update_parent_replace(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_update_parent_replace", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/C"));
+
+ /* Update breaks the move and leaves a conflict. */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+
+ {2, "A/C", "normal", 1, "A/B/C", MOVED_HERE},
+
+ {2, "A/B", "normal", 1, "A/B"},
+ {2, "A/B/C", "normal", 1, "A/B/C", FALSE},
+
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C"},
+
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A/B", FALSE, FALSE, {svn_wc_conflict_action_replace,
+ svn_wc_conflict_reason_edited}},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_infinity,
+ svn_wc_conflict_choose_merged));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {2, "A/C", "normal", 1, "A/B/C", MOVED_HERE},
+ {2, "A/B", "normal", 1, "A/B"},
+ {2, "A/B/C", "normal", 1, "A/B/C", FALSE},
+ {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C"},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+copy_mixed_rev_mods(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "copy_mixed_rev_mods", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/B", 2));
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {2, "A/B", "normal", NO_COPY_FROM},
+ {2, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "X"));
+ {
+ nodes_row_t nodes[] = {
+ {1, "X", "normal", 1, "A"},
+ {1, "X/B", "not-present", 2, "A/B"},
+ {2, "X/B", "normal", NO_COPY_FROM},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "X", nodes));
+ }
+
+ SVN_ERR(sbox_wc_commit(&b, "X"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "X", "normal", 3, "X"},
+ {0, "X/B", "normal", 3, "X/B"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "X", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_child_to_parent_revert(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_child_to_parent_revert", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "B"));
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+
+ /* Verify that the move is still recorded correctly */
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {1, "A/B", "base-deleted", NO_COPY_FROM, "B"},
+
+ {1, "B", "normal", 1, "A/B", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_revert(&b, "A", svn_depth_infinity));
+
+ /* Verify that the move is now just a copy */
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+
+ {1, "B", "normal", 1, "A/B"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_delete_intermediate(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_delete_intermediate", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_1"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_move(&b, "B", "A"));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_2"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/A"));
+ SVN_ERR(sbox_wc_move(&b, "C/A", "A/A"));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_3"));
+
+ /* Verify that the move is still recorded correctly */
+ {
+ nodes_row_t nodes[] = {
+
+ {0, "", "normal", 0, ""},
+
+ {1, "AAA_1", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+
+ {1, "A", "normal", 1, "B", MOVED_HERE},
+ {1, "A/A", "normal", 1, "B/A", MOVED_HERE},
+ {1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE},
+ {1, "A/A/A/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+
+ {2, "A/A", "normal", 1, "C/A", MOVED_HERE},
+ {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE},
+ {2, "A/A/A/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {3, "A/A/A", "base-deleted", NO_COPY_FROM, "AAA_3"},
+ {3, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A"},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A"},
+
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A"},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A"},
+
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0},
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Ok, now we are in the very ugly case where A/A/A is moved away 3 times */
+
+ /* Let's delete A */
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+
+ /* AAA_1, AAA_2 and AAA_3 should still be moves after deleting A */
+ {
+ nodes_row_t nodes[] = {
+
+ {0, "", "normal", 0, ""},
+
+ {1, "AAA_1", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+
+ {1, "A", "base-deleted", NO_COPY_FROM},
+ {1, "A/A", "base-deleted", NO_COPY_FROM},
+ {1, "A/A/A", "base-deleted", NO_COPY_FROM, "AAA_1"},
+ {1, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A"},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A"},
+
+ {1, "B", "base-deleted", NO_COPY_FROM},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM, "AAA_2"},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A"},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A"},
+
+ {2, "C/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM, "AAA_3"},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0},
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_revert_intermediate(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_revert_intermediate", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_1"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A"));
+ SVN_ERR(sbox_wc_move(&b, "B", "A"));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_2"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/A"));
+ SVN_ERR(sbox_wc_move(&b, "C/A", "A/A"));
+
+ SVN_ERR(sbox_wc_move(&b, "A/A/A", "AAA_3"));
+
+ /* Verify that the move is still recorded correctly */
+ {
+ nodes_row_t nodes[] = {
+
+ {0, "", "normal", 0, ""},
+
+ {1, "AAA_1", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+
+ {1, "A", "normal", 1, "B", MOVED_HERE},
+ {1, "A/A", "normal", 1, "B/A", MOVED_HERE},
+ {1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE},
+ {1, "A/A/A/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+
+ {2, "A/A", "normal", 1, "C/A", MOVED_HERE},
+ {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE},
+ {2, "A/A/A/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {3, "A/A/A", "base-deleted", NO_COPY_FROM, "AAA_3"},
+ {3, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A"},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A"},
+
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A"},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A"},
+
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0},
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Ok, now we are in the very ugly case where A/A/A is moved away 3 times */
+
+ /* Let's revert A */
+ SVN_ERR(sbox_wc_revert(&b, "A", svn_depth_infinity));
+
+ /* AAA_1 should now be a copy, but AAA_2 and AAA_3 should still be moves,
+ but now from the original location instead of from "A/A/A" */
+ {
+ nodes_row_t nodes[] = {
+
+ {0, "", "normal", 0, ""},
+
+ {1, "AAA_1", "normal", 1, "A/A/A",},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A"},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A"},
+
+ {1, "B", "base-deleted", NO_COPY_FROM},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM, "AAA_2"},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A"},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A"},
+
+ {2, "C/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM, "AAA_3"},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {0},
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_replace_ancestor_with_child(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_replace_ancestor_with_child", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A2"));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 1, "" },
+
+ { 0, "A", "normal", 1, "A"},
+ { 0, "A/A", "normal", 1, "A/A" },
+
+ { 1, "A", "base-deleted", NO_COPY_FROM , "A2"},
+ { 1, "A/A", "base-deleted", NO_COPY_FROM },
+
+ { 1, "A2", "normal", 1, "A", MOVED_HERE },
+ { 1, "A2/A", "normal", 1, "A/A", MOVED_HERE },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A2/A", "A"));
+
+ {
+ nodes_row_t nodes[] = {
+ { 0, "", "normal", 1, "" },
+
+ { 0, "A", "normal", 1, "A"},
+ { 0, "A/A", "normal", 1, "A/A" },
+
+ { 1, "A", "normal", 1, "A/A", FALSE, "A2", TRUE },
+ { 1, "A/A", "base-deleted", NO_COPY_FROM },
+
+ { 1, "A2", "normal", 1, "A", MOVED_HERE },
+ { 1, "A2/A", "normal", 1, "A/A", MOVED_HERE },
+
+ { 2, "A2/A", "base-deleted", NO_COPY_FROM, "A" },
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* ### This currently fails with an assertion in maintainer mode */
+ SVN_ERR(sbox_wc_delete(&b, "A2"));
+
+ {
+ nodes_row_t nodes[] = {
+ { 0, "", "normal", 1, "" },
+
+ { 0, "A", "normal", 1, "A"},
+ { 0, "A/A", "normal", 1, "A/A" },
+
+ { 1, "A", "normal", 1, "A/A", MOVED_HERE },
+ { 1, "A/A", "base-deleted", NO_COPY_FROM, "A" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_commit(&b, "A"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_twice_within_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_twice_within_delete", opts,
+ pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/A/A"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_move(&b, "A", "B/A"));
+ SVN_ERR(sbox_wc_move(&b, "B/A/A", "B/AA"));
+ SVN_ERR(sbox_wc_move(&b, "B/AA/A", "AA"));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 1, "" },
+
+ { 0, "A", "normal", 1, "A" },
+ { 0, "A/A", "normal", 1, "A/A" },
+ { 0, "A/A/A", "normal", 1, "A/A/A" },
+
+ { 1, "A", "base-deleted", NO_COPY_FROM, "B/A" },
+ { 1, "A/A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/A/A", "base-deleted", NO_COPY_FROM },
+
+ { 1, "AA", "normal", 1, "A/A/A", MOVED_HERE },
+
+ { 1, "B", "normal", NO_COPY_FROM },
+ { 2, "B/A", "normal", 1, "A", MOVED_HERE },
+ { 2, "B/A/A", "normal", 1, "A/A", MOVED_HERE },
+ { 2, "B/A/A/A", "normal", 1, "A/A/A", MOVED_HERE },
+
+ { 3, "B/A/A", "base-deleted", NO_COPY_FROM, "B/AA" },
+ { 3, "B/A/A/A", "base-deleted", NO_COPY_FROM },
+
+ { 2, "B/AA", "normal", 1, "A/A", MOVED_HERE},
+ { 2, "B/AA/A", "normal", 1, "A/A/A", MOVED_HERE },
+
+ { 3, "B/AA/A", "base-deleted", NO_COPY_FROM, "AA" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_delete(&b, "B"));
+
+ {
+ nodes_row_t nodes[] = {
+ { 0, "", "normal", 1, "" },
+
+ { 0, "A", "normal", 1, "A" },
+ { 0, "A/A", "normal", 1, "A/A" },
+ { 0, "A/A/A", "normal", 1, "A/A/A" },
+
+ { 1, "A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/A", "base-deleted", NO_COPY_FROM },
+ { 1, "A/A/A", "base-deleted", NO_COPY_FROM, "AA" },
+
+ { 1, "AA", "normal", 1, "A/A/A", MOVED_HERE },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Helper function for 4 move4 tests */
+static svn_error_t *
+init_move4(svn_test__sandbox_t *sandbox,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ svn_boolean_t move_away,
+ apr_pool_t *pool)
+{
+ SVN_ERR(svn_test__sandbox_create(sandbox, test_name, opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(sandbox, "A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "A/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A/A"));
+
+ SVN_ERR(sbox_wc_mkdir(sandbox, "B"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "B/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A/A"));
+
+ SVN_ERR(sbox_wc_mkdir(sandbox, "C"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "C/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A/A"));
+
+ SVN_ERR(sbox_wc_mkdir(sandbox, "D"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "D/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A/A"));
+
+ SVN_ERR(sbox_wc_commit(sandbox, "")); /* r1 */
+
+ if (strstr(test_name, "_edit_"))
+ {
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "A/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "B/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "C/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "D/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "A/A/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "B/A/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "C/A/A/A"));
+ SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "D/A/A/A"));
+ }
+ else if (strstr(test_name, "_delete_"))
+ {
+ SVN_ERR(sbox_wc_delete(sandbox, "A/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "B/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "C/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "D/A/A/A"));
+ }
+ else if (strstr(test_name, "_add_"))
+ {
+ SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A/NEW"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A/NEW"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A/NEW"));
+ SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A/NEW"));
+ }
+ else if (strstr(test_name, "_replace_"))
+ {
+ SVN_ERR(sbox_wc_delete(sandbox, "A/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "B/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "C/A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "D/A/A/A"));
+ SVN_ERR(sbox_file_write(sandbox, "A/A/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "B/A/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "C/A/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "D/A/A/A", "A"));
+ SVN_ERR(sbox_wc_add(sandbox, "A/A/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "B/A/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "C/A/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "D/A/A/A"));
+ }
+ else if (strstr(test_name, "_delself_"))
+ {
+ SVN_ERR(sbox_wc_delete(sandbox, "A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "B/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "C/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "D/A/A"));
+ }
+ else if (strstr(test_name, "_replaceself_"))
+ {
+ SVN_ERR(sbox_wc_delete(sandbox, "A/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "B/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "C/A/A"));
+ SVN_ERR(sbox_wc_delete(sandbox, "D/A/A"));
+ SVN_ERR(sbox_file_write(sandbox, "A/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "B/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "C/A/A", "A"));
+ SVN_ERR(sbox_file_write(sandbox, "D/A/A", "A"));
+ SVN_ERR(sbox_wc_add(sandbox, "A/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "B/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "C/A/A"));
+ SVN_ERR(sbox_wc_add(sandbox, "D/A/A"));
+ }
+
+ SVN_ERR(sbox_wc_commit(sandbox, ""));
+ SVN_ERR(sbox_wc_update(sandbox, "", 1));
+
+ SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_1"));
+
+ if (move_away)
+ SVN_ERR(sbox_wc_move(sandbox, "A", "A_moved"));
+ else
+ SVN_ERR(sbox_wc_delete(sandbox, "A"));
+
+ SVN_ERR(sbox_wc_move(sandbox, "B", "A"));
+
+ SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_2"));
+
+ if (move_away)
+ SVN_ERR(sbox_wc_move(sandbox, "A/A", "BA_moved"));
+ else
+ SVN_ERR(sbox_wc_delete(sandbox, "A/A"));
+
+ SVN_ERR(sbox_wc_move(sandbox, "C/A", "A/A"));
+
+ SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_3"));
+
+ SVN_ERR(sbox_wc_move(sandbox, "D/A/A", "A/A/A"));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_edit_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_edit_AAA", opts, FALSE, pool));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "A/A/A", "normal", 1, "A/A/A" },
+ { 1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE },
+ { 2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE },
+ { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE },
+
+ { 0, "A/A/A/A", "normal", 1, "A/A/A/A" },
+ { 1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE },
+ { 2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE },
+ { 3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE },
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "A/A/A", nodes));
+ }
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key"},
+ {0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/A", "normal", 2, "B/A"},
+ {0, "B/A/A", "normal", 2, "B/A/A", NOT_MOVED, "key"},
+ {0, "B/A/A/A", "normal", 2, "B/A/A/A", NOT_MOVED, "key"},
+ {0, "C", "normal", 2, "C"},
+ {0, "C/A", "normal", 2, "C/A"},
+ {0, "C/A/A", "normal", 2, "C/A/A", NOT_MOVED, "key"},
+ {0, "C/A/A/A", "normal", 2, "C/A/A/A", NOT_MOVED, "key"},
+ {0, "D", "normal", 2, "D"},
+ {0, "D/A", "normal", 2, "D/A"},
+ {0, "D/A/A", "normal", 2, "D/A/A", NOT_MOVED, "key"},
+ {0, "D/A/A/A", "normal", 2, "D/A/A/A", NOT_MOVED, "key"},
+
+ {1, "A", "normal", 2, "B", MOVED_HERE},
+ {1, "A/A", "normal", 2, "B/A", MOVED_HERE},
+ {1, "A/A/A", "normal", 2, "B/A/A", FALSE, "AAA_1", TRUE, "key"},
+ {1, "A/A/A/A", "normal", 2, "B/A/A/A", FALSE, NULL, TRUE, "key"},
+ {1, "AAA_1", "normal", 2, "A/A/A", MOVED_HERE, "key"},
+ {1, "AAA_1/A", "normal", 2, "A/A/A/A", MOVED_HERE, "key"},
+ {1, "AAA_2", "normal", 2, "B/A/A", MOVED_HERE, "key"},
+ {1, "AAA_2/A", "normal", 2, "B/A/A/A", MOVED_HERE, "key"},
+ {1, "AAA_3", "normal", 2, "C/A/A", MOVED_HERE, "key"},
+ {1, "AAA_3/A", "normal", 2, "C/A/A/A", MOVED_HERE, "key"},
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {2, "A/A", "normal", 2, "C/A", MOVED_HERE},
+ {2, "A/A/A", "normal", 2, "C/A/A", FALSE, "AAA_2", TRUE, "key"},
+ {2, "A/A/A/A", "normal", 2, "C/A/A/A", FALSE, NULL, TRUE, "key"},
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {3, "A/A/A", "normal", 2, "D/A/A", FALSE, "AAA_3", TRUE, "key"},
+ {3, "A/A/A/A", "normal", 2, "D/A/A/A", FALSE, NULL, TRUE, "key"},
+ {3, "D/A/A", "base-deleted", NO_COPY_FROM, "A/A/A"},
+ {3, "D/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A", NOT_MOVED},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A", NOT_MOVED},
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A", NOT_MOVED},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A", NOT_MOVED},
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A", NOT_MOVED},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A", NOT_MOVED},
+ {0, "D", "normal", 1, "D"},
+ {0, "D/A", "normal", 1, "D/A"},
+ {0, "D/A/A", "normal", 1, "D/A/A", NOT_MOVED},
+ {0, "D/A/A/A", "normal", 1, "D/A/A/A", NOT_MOVED},
+
+ {1, "A", "normal", 1, "B", MOVED_HERE},
+ {1, "A/A", "normal", 1, "B/A", MOVED_HERE},
+ {1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE},
+ {1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE},
+ {1, "AAA_1", "normal", 1, "A/A/A", MOVED_HERE},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {2, "A/A", "normal", 1, "C/A", MOVED_HERE},
+ {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE},
+ {2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE},
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE},
+ {3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE},
+ {3, "D/A/A", "base-deleted", NO_COPY_FROM, "A/A/A"},
+ {3, "D/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0},
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ /* This breaks the move A/A/A -> AAA_1 */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_merged));
+ /* This breaks the move B -> A */
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, svn_wc_conflict_choose_merged));
+ /* This breaks the move C/A/A -> A/A */
+ SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, svn_wc_conflict_choose_merged));
+ /* This breaks the move from D/A/A -> A/A/A */
+ SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty, svn_wc_conflict_choose_merged));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key"},
+ {0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/A", "normal", 2, "B/A"},
+ {0, "B/A/A", "normal", 2, "B/A/A", NOT_MOVED, "key"},
+ {0, "B/A/A/A", "normal", 2, "B/A/A/A", NOT_MOVED, "key"},
+ {0, "C", "normal", 2, "C"},
+ {0, "C/A", "normal", 2, "C/A"},
+ {0, "C/A/A", "normal", 2, "C/A/A", NOT_MOVED, "key"},
+ {0, "C/A/A/A", "normal", 2, "C/A/A/A", NOT_MOVED, "key"},
+ {0, "D", "normal", 2, "D"},
+ {0, "D/A", "normal", 2, "D/A"},
+ {0, "D/A/A", "normal", 2, "D/A/A", NOT_MOVED, "key"},
+ {0, "D/A/A/A", "normal", 2, "D/A/A/A", NOT_MOVED, "key"},
+ {1, "A", "normal", 1, "B"},
+ {1, "A/A", "normal", 1, "B/A"},
+ {1, "A/A/A", "normal", 1, "B/A/A", FALSE},
+ {1, "A/A/A/A", "normal", 1, "B/A/A/A"},
+ {1, "AAA_1", "normal", 1, "A/A/A"},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+ {1, "B", "base-deleted", NO_COPY_FROM},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A", "normal", 1, "C/A"},
+ {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2"},
+ {2, "A/A/A/A", "normal", 1, "C/A/A/A"},
+ {2, "C/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"},
+ {3, "A/A/A/A", "normal", 1, "D/A/A/A"},
+ {3, "D/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "D/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_delete_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_delete_AAA", opts, FALSE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_add_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_add_AAA", opts, FALSE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_replace_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_replace_AAA", opts, FALSE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_delself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_delself_AAA", opts, FALSE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+
+ /* Resolve a few conflicts manually */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/A", "normal", 2, "B/A"},
+ {0, "C", "normal", 2, "C"},
+ {0, "C/A", "normal", 2, "C/A"},
+ {0, "D", "normal", 2, "D"},
+ {0, "D/A", "normal", 2, "D/A"},
+ {1, "A", "normal", 2, "B", MOVED_HERE},
+ {1, "A/A", "normal", 2, "B/A", MOVED_HERE},
+ {1, "AAA_1", "normal", 1, "A/A/A"},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_2", "normal", 1, "B/A/A"},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A"},
+ {1, "AAA_3", "normal", 1, "C/A/A"},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A"},
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {2, "A/A", "normal", 2, "C/A", MOVED_HERE},
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {3, "A/A/A", "normal", 1, "D/A/A"},
+ {3, "A/A/A/A", "normal", 1, "D/A/A/A"},
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+ {
+ conflict_info_t conflicts[] = {
+ /* Not resolved yet */
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+
+ /* New */
+ {"A/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_deleted}},
+ {"A/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "A/A/A"}},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ /* These can only be resolved to merged, as the merge is already broken
+ (because the move source is gone): incoming delete on moved_away */
+ SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ conflict_info_t conflicts[] = {
+ {"A/A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_deleted}},
+ {"A/A/A", FALSE, FALSE, { svn_wc_conflict_action_add,
+ svn_wc_conflict_reason_added}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity,
+ svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+del4_update_replaceself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "del4_update_replaceself_AAA", opts, FALSE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+move4_update_edit_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_edit_AAA", opts, TRUE, pool));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "A/A/A", "normal", 1, "A/A/A" },
+ { 1, "A/A/A", "normal", 1, "B/A/A", FALSE, NULL /*"AAA_1"*/, TRUE },
+ { 2, "A/A/A", "normal", 1, "C/A/A", FALSE, NULL /*"AAA_2"*/, TRUE },
+ { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE },
+
+ { 0, "A/A/A/A", "normal", 1, "A/A/A/A" },
+ { 1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE },
+ { 2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE },
+ { 3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE },
+
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "A/A/A", nodes));
+ }
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key" },
+ { 1, "A/A/A", "normal", 2, "B/A/A", FALSE, NULL /*"AAA_1"*/, TRUE, "key" },
+ { 2, "A/A/A", "normal", 2, "C/A/A", FALSE, NULL /*"AAA_2"*/, TRUE, "key" },
+ { 3, "A/A/A", "normal", 2, "D/A/A", FALSE, "AAA_3", TRUE, "key" },
+
+ { 0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key" },
+ { 1, "A/A/A/A", "normal", 2, "B/A/A/A", FALSE, NULL, TRUE, "key" },
+ { 2, "A/A/A/A", "normal", 2, "C/A/A/A", FALSE, NULL, TRUE, "key" },
+ { 3, "A/A/A/A", "normal", 2, "D/A/A/A", FALSE, NULL, TRUE, "key" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "A/A/A", nodes));
+ }
+
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key" },
+ { 1, "A/A/A", "normal", 1, "B/A/A" },
+ { 2, "A/A/A", "normal", 1, "C/A/A" },
+ { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"},
+
+ { 0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key" },
+ { 1, "A/A/A/A", "normal", 1, "B/A/A/A" },
+ { 2, "A/A/A/A", "normal", 1, "C/A/A/A" },
+ { 3, "A/A/A/A", "normal", 1, "D/A/A/A" },
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "A/A/A", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move4_update_delete_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_delete_AAA", opts, TRUE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move4_update_add_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_add_AAA", opts, TRUE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move4_update_replace_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_replace_AAA", opts, TRUE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move4_update_delself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_delself_AAA", opts, TRUE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ {
+ nodes_row_t nodes[] = {
+
+ {1, "A_moved", "normal", 1, "A", MOVED_HERE},
+ {1, "A_moved/A", "normal", 1, "A/A", MOVED_HERE},
+ {1, "A_moved/A/A", "normal", 1, "A/A/A", MOVED_HERE},
+ {3, "A_moved/A/A", "base-deleted", NO_COPY_FROM, "AAA_1"},
+ {1, "A_moved/A/A/A", "normal", 1, "A/A/A/A", MOVED_HERE},
+ {3, "A_moved/A/A/A", "base-deleted", NO_COPY_FROM},
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "A_moved", nodes));
+ }
+
+ /* Resolve a few conflicts manually */
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/A", "normal", 2, "A/A"},
+ {0, "B", "normal", 2, "B"},
+ {0, "B/A", "normal", 2, "B/A"},
+ {0, "C", "normal", 2, "C"},
+ {0, "C/A", "normal", 2, "C/A"},
+ {0, "D", "normal", 2, "D"},
+ {0, "D/A", "normal", 2, "D/A"},
+ {1, "A", "normal", 1, "B", FALSE, "A_moved", TRUE},
+ {1, "A/A", "normal", 1, "B/A", MOVED_HERE},
+ {1, "A/A/A", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "A/A/A/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_1", "normal", 1, "A/A/A"},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+ {1, "A_moved", "normal", 2, "A", MOVED_HERE},
+ {1, "A_moved/A", "normal", 2, "A/A", MOVED_HERE},
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "BA_moved", "normal", 1, "B/A", MOVED_HERE},
+ {1, "BA_moved/A", "normal", 1, "B/A/A", MOVED_HERE},
+ {1, "BA_moved/A/A", "normal", 1, "B/A/A/A", MOVED_HERE},
+ {2, "A/A", "normal", 1, "C/A", FALSE, "BA_moved", TRUE},
+ {2, "A/A/A", "normal", 1, "C/A/A", MOVED_HERE},
+ {2, "A/A/A/A", "normal", 1, "C/A/A/A", MOVED_HERE},
+ {2, "BA_moved/A", "base-deleted", NO_COPY_FROM, "AAA_2"},
+ {2, "BA_moved/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"},
+ {3, "A/A/A/A", "normal", 1, "D/A/A/A"},
+
+ { 0 },
+ };
+ conflict_info_t conflicts[] = {
+ {"A_moved/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "A_moved/A/A"}},
+ {"B", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "D/A/A"}},
+
+ { 0 },
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+
+ /* ### These can currently only be resolved to merged ???? */
+ SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "A_moved/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "A/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "BA_moved/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/A", "normal", 1, "A/A"},
+ {0, "A/A/A", "normal", 1, "A/A/A"},
+ {0, "A/A/A/A", "normal", 1, "A/A/A/A"},
+ {0, "B", "normal", 1, "B"},
+ {0, "B/A", "normal", 1, "B/A"},
+ {0, "B/A/A", "normal", 1, "B/A/A"},
+ {0, "B/A/A/A", "normal", 1, "B/A/A/A"},
+ {0, "C", "normal", 1, "C"},
+ {0, "C/A", "normal", 1, "C/A"},
+ {0, "C/A/A", "normal", 1, "C/A/A"},
+ {0, "C/A/A/A", "normal", 1, "C/A/A/A"},
+ {0, "D", "normal", 1, "D"},
+ {0, "D/A", "normal", 1, "D/A"},
+ {0, "D/A/A", "normal", 1, "D/A/A"},
+ {0, "D/A/A/A", "normal", 1, "D/A/A/A"},
+ {1, "A", "normal", 2, "B", FALSE, "A_moved", TRUE},
+ {1, "A/A", "normal", 2, "B/A", MOVED_HERE},
+ {1, "A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "A/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved", "normal", 2, "A", MOVED_HERE},
+ {1, "A_moved/A", "normal", 2, "A/A", MOVED_HERE},
+ {1, "AAA_1", "normal", 1, "A/A/A"},
+ {1, "AAA_1/A", "normal", 1, "A/A/A/A"},
+ {1, "AAA_2", "normal", 1, "B/A/A"},
+ {1, "AAA_2/A", "normal", 1, "B/A/A/A"},
+ {1, "AAA_3", "normal", 1, "C/A/A"},
+ {1, "AAA_3/A", "normal", 1, "C/A/A/A"},
+ {1, "B", "base-deleted", NO_COPY_FROM, "A"},
+ {1, "B/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "B/A/A/A", "base-deleted", NO_COPY_FROM},
+ {1, "BA_moved", "normal", 2, "B/A", MOVED_HERE},
+ {2, "A/A", "normal", 2, "C/A", FALSE, "BA_moved", TRUE},
+ {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"},
+ {2, "C/A/A", "base-deleted", NO_COPY_FROM},
+ {2, "C/A/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "A/A/A", "normal", 1, "D/A/A"},
+ {3, "A/A/A/A", "normal", 1, "D/A/A/A"},
+
+ { 0 },
+ };
+
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_replaced}},
+ {"B", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B"}},
+ {"C/A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "C/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ conflict_info_t conflicts[] = {
+ {"A/A", FALSE, FALSE, { svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A/A"}},
+ {"A/A/A", FALSE, FALSE, { svn_wc_conflict_action_add,
+ svn_wc_conflict_reason_added}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move4_update_replaceself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(init_move4(&b, "move4_update_replaceself_AAA", opts, TRUE, pool));
+
+ /* Update and resolve via mine strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Go back to start position */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict));
+ /* Update and resolve via their strategy */
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+simple_move_bump(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "simple_move_bump", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+
+ SVN_ERR(sbox_wc_propset(&b, "old_A", "val", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "old_B", "val", "A/B"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_propset(&b, "new_A", "val", "A"));
+ SVN_ERR(sbox_wc_propset(&b, "new_B", "val", "A/B"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A/B", "A/B_mv"));
+ SVN_ERR(sbox_wc_move(&b, "A", "A_mv"));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 1, ""},
+ { 0, "A", "normal", 1, "A", NOT_MOVED, "old_A"},
+ { 0, "A/B", "normal", 1, "A/B", NOT_MOVED, "old_B"},
+
+ { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"},
+ { 1, "A/B", "base-deleted", NO_COPY_FROM},
+
+ { 1, "A_mv", "normal", 1, "A", MOVED_HERE, "old_A" },
+ { 1, "A_mv/B", "normal", 1, "A/B", MOVED_HERE, "old_B" },
+
+ { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" },
+ { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+
+ /* Expect the A tree to be updated */
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 2, ""},
+ { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"},
+ { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"},
+
+ { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"},
+ { 1, "A/B", "base-deleted", NO_COPY_FROM},
+
+ { 1, "A_mv", "normal", 1, "A", MOVED_HERE, "old_A" },
+ { 1, "A_mv/B", "normal", 1, "A/B", MOVED_HERE, "old_B" },
+
+ { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" },
+ { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 2, ""},
+ { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"},
+ { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"},
+
+ { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"},
+ { 1, "A/B", "base-deleted", NO_COPY_FROM},
+
+ { 1, "A_mv", "normal", 2, "A", MOVED_HERE, "new_A,old_A" },
+ { 1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE, "new_B,old_B" },
+
+ { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" },
+ { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "A_mv/B", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+
+ { 0, "", "normal", 2, ""},
+ { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"},
+ { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"},
+
+ { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"},
+ { 1, "A/B", "base-deleted", NO_COPY_FROM},
+
+ { 1, "A_mv", "normal", 2, "A", MOVED_HERE, "new_A,old_A" },
+ { 1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE, "new_B,old_B" },
+
+ { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" },
+ { 2, "A_mv/B_mv", "normal", 2, "A/B", FALSE, NULL, TRUE, "new_B,old_B" },
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+movedhere_extract_retract(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "movedhere_extract_retract",
+ opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B1"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B3"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/C1"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/C2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/C3"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/D1"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/D2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/D3"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_propset(&b, "k", "v", "A/B1"));
+ SVN_ERR(sbox_wc_propset(&b, "k", "v", "A/B2"));
+ SVN_ERR(sbox_wc_propset(&b, "k", "v", "A/B3"));
+ SVN_ERR(sbox_wc_delete(&b, "A/C1"));
+ SVN_ERR(sbox_wc_delete(&b, "A/C2"));
+ SVN_ERR(sbox_wc_delete(&b, "A/C3"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/E1"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/E2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/E3"));
+
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "A", "Z"));
+
+ SVN_ERR(sbox_wc_delete(&b, "Z/B1"));
+ SVN_ERR(sbox_wc_delete(&b, "Z/C1"));
+ SVN_ERR(sbox_wc_delete(&b, "Z/D1"));
+
+ SVN_ERR(sbox_wc_move(&b, "Z/B2", "B2"));
+ SVN_ERR(sbox_wc_move(&b, "Z/C2", "C2"));
+ SVN_ERR(sbox_wc_move(&b, "Z/D2", "D2"));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "Z/B2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "Z/C2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "Z/D2"));
+ SVN_ERR(sbox_wc_mkdir(&b, "Z/E2"));
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ conflict_info_t conflicts[] = {
+ {"A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ {
+ conflict_info_t conflicts[] = {
+ {"Z/B1", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_deleted}},
+ {"Z/B2", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "Z/B2"}},
+ {"Z/C1", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_deleted}},
+ {"Z/C2", FALSE, FALSE, {svn_wc_conflict_action_delete,
+ svn_wc_conflict_reason_moved_away, "Z/C2"}},
+ {"Z/E2", FALSE, FALSE, {svn_wc_conflict_action_add,
+ svn_wc_conflict_reason_added}},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+ SVN_ERR(sbox_wc_resolve(&b, "Z/B1", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(sbox_wc_resolve(&b, "Z/B2", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ SVN_ERR(sbox_wc_resolve(&b, "Z/C1", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+ SVN_ERR(sbox_wc_resolve(&b, "Z/C2", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+
+ SVN_ERR(sbox_wc_resolve(&b, "Z/E2", svn_depth_empty,
+ svn_wc_conflict_choose_merged));
+
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ {
+ nodes_row_t nodes[] = {
+
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B1", "normal", 2, "A/B1", FALSE, NULL, FALSE, "k"},
+ {0, "A/B2", "normal", 2, "A/B2", FALSE, NULL, FALSE, "k"},
+ {0, "A/B3", "normal", 2, "A/B3", FALSE, NULL, FALSE, "k"},
+ {0, "A/D1", "normal", 2, "A/D1"},
+ {0, "A/D2", "normal", 2, "A/D2"},
+ {0, "A/D3", "normal", 2, "A/D3"},
+ {0, "A/E1", "normal", 2, "A/E1"},
+ {0, "A/E2", "normal", 2, "A/E2"},
+ {0, "A/E3", "normal", 2, "A/E3"},
+
+ {1, "A", "base-deleted", NO_COPY_FROM, "Z"},
+ {1, "A/B1", "base-deleted", NO_COPY_FROM},
+ {1, "A/B2", "base-deleted", NO_COPY_FROM},
+ {1, "A/B3", "base-deleted", NO_COPY_FROM},
+
+ {1, "A/D1", "base-deleted", NO_COPY_FROM},
+ {1, "A/D2", "base-deleted", NO_COPY_FROM},
+ {1, "A/D3", "base-deleted", NO_COPY_FROM},
+
+ {1, "A/E1", "base-deleted", NO_COPY_FROM},
+ {1, "A/E2", "base-deleted", NO_COPY_FROM},
+ {1, "A/E3", "base-deleted", NO_COPY_FROM},
+
+ {1, "B2", "normal", 2, "A/B2", MOVED_HERE, "k"},
+ {1, "C2", "normal", 1, "A/C2"},
+ {1, "D2", "normal", 1, "A/D2", MOVED_HERE},
+
+ {1, "Z", "normal", 2, "A", MOVED_HERE},
+ {1, "Z/B1", "normal", 2, "A/B1", MOVED_HERE, "k"},
+ {1, "Z/B2", "normal", 2, "A/B2", MOVED_HERE, "k"},
+ {1, "Z/B3", "normal", 2, "A/B3", MOVED_HERE, "k"},
+ {1, "Z/D1", "normal", 2, "A/D1", MOVED_HERE},
+ {1, "Z/D2", "normal", 2, "A/D2", MOVED_HERE},
+ {1, "Z/D3", "normal", 2, "A/D3", MOVED_HERE},
+ {1, "Z/E1", "normal", 2, "A/E1", MOVED_HERE},
+ {1, "Z/E2", "normal", 2, "A/E2", MOVED_HERE},
+ {1, "Z/E3", "normal", 2, "A/E3", MOVED_HERE},
+
+ {2, "Z/B2", "normal", NO_COPY_FROM, "B2"},
+ {2, "Z/C2", "normal", NO_COPY_FROM},
+ {2, "Z/D2", "normal", NO_COPY_FROM, "D2"},
+ {2, "Z/E2", "normal", NO_COPY_FROM},
+
+ {2, "Z/B1", "base-deleted", NO_COPY_FROM},
+ {2, "Z/D1", "base-deleted", NO_COPY_FROM},
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+repo_wc_copy(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ const char *new_repos_dir;
+ const char *new_repos_url;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "repo_wc_copy",
+ opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_copy_url(&b,
+ svn_path_url_add_component2(b.repos_url, "A/B",
+ pool),
+ -1, "AA"));
+
+ {
+ nodes_row_t nodes[] = {
+
+ {1, "AA/lambda", "normal", 1, "A/B/lambda"},
+ {1, "AA", "normal", 1, "A/B"},
+ {1, "AA/E/beta", "normal", 1, "A/B/E/beta"},
+ {1, "AA/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {1, "AA/F", "normal", 1, "A/B/F"},
+ {1, "AA/E", "normal", 1, "A/B/E"},
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "AA", nodes));
+ }
+
+ new_repos_dir = apr_pstrcat(pool, b.repos_dir, "-2", SVN_VA_NULL);
+ new_repos_url = apr_pstrcat(pool, b.repos_url, "-2", SVN_VA_NULL);
+
+ svn_test_add_dir_cleanup(new_repos_dir);
+
+ SVN_ERR(svn_io_remove_dir2(new_repos_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_copy_dir_recursively(b.repos_dir,
+ svn_dirent_dirname(new_repos_dir, pool),
+ svn_dirent_basename(new_repos_dir, pool),
+ FALSE, NULL, NULL, pool));
+
+ SVN_ERR(sbox_wc_relocate(&b, new_repos_url));
+
+ /* This produced an invalid copy in Subversion <= 1.8.8.
+ Status would show all descendants as incomplete */
+ SVN_ERR(sbox_wc_copy_url(&b,
+ svn_path_url_add_component2(b.repos_url, "A/B",
+ pool),
+ -1, "BB"));
+
+ {
+ nodes_row_t nodes[] = {
+
+ {1, "BB/lambda", "normal", 1, "A/B/lambda"},
+ {1, "BB", "normal", 1, "A/B"},
+ {1, "BB/E/beta", "normal", 1, "A/B/E/beta"},
+ {1, "BB/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {1, "BB/F", "normal", 1, "A/B/F"},
+ {1, "BB/E", "normal", 1, "A/B/E"},
+
+ { 0 },
+ };
+ SVN_ERR(check_db_rows(&b, "BB", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+break_move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "break_move_in_delete", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/Y"));
+ SVN_ERR(sbox_wc_mkdir(&b, "X/Y/Z"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "key", "value", "X/Y/Z"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "X/Y/Z", "A/Z"));
+ SVN_ERR(sbox_wc_delete(&b, "X"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "X", "normal", 1, "X"},
+ {0, "X/Y", "normal", 1, "X/Y"},
+ {0, "X/Y/Z", "normal", 1, "X/Y/Z"},
+ {1, "X", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y/Z", "base-deleted", NO_COPY_FROM, "A/Z"},
+ {2, "A/Z", "normal", 1, "X/Y/Z", MOVED_HERE},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "X", "normal", 2, "X"},
+ {0, "X/Y", "normal", 2, "X/Y"},
+ {0, "X/Y/Z", "normal", 2, "X/Y/Z"},
+ {1, "X", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y/Z", "base-deleted", NO_COPY_FROM, "A/Z"},
+ {2, "A/Z", "normal", 1, "X/Y/Z", MOVED_HERE},
+ {0}
+ };
+ conflict_info_t conflicts1[] = {
+ {"X", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_deleted}},
+ {0}
+ };
+ conflict_info_t conflicts2[] = {
+ {"X/Y/Z", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "X"}},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts1));
+ SVN_ERR(sbox_wc_resolve(&b, "X", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts2));
+ }
+
+ SVN_ERR(sbox_wc_resolved(&b, "X/Y/Z"));
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 2, ""},
+ {0, "A", "normal", 2, "A"},
+ {0, "A/B", "normal", 2, "A/B"},
+ {0, "X", "normal", 2, "X"},
+ {0, "X/Y", "normal", 2, "X/Y"},
+ {0, "X/Y/Z", "normal", 2, "X/Y/Z"},
+ {1, "X", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y", "base-deleted", NO_COPY_FROM},
+ {1, "X/Y/Z", "base-deleted", NO_COPY_FROM},
+ {2, "A/Z", "normal", 1, "X/Y/Z"},
+ {0}
+ };
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+static svn_error_t *
+nested_move_delete(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "nested_move_delete", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A/Z"));
+ SVN_ERR(sbox_wc_move(&b, "A/B/lambda", "A/Z/lambda"));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B"));
+
+ {
+ nodes_row_t nodes_AB[] = {
+ {0, "A/B", "normal", 1, "A/B"},
+ {2, "A/B", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E", "normal", 1, "A/B/E"},
+ {2, "A/B/E", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {2, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E/beta", "normal", 1, "A/B/E/beta"},
+ {2, "A/B/E/beta", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/F", "normal", 1, "A/B/F"},
+ {2, "A/B/F", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/lambda", "normal", 1, "A/B/lambda"},
+ {2, "A/B/lambda", "base-deleted", NO_COPY_FROM, "A/Z/lambda"},
+ {0}
+ };
+ nodes_row_t nodes_AZ[] = {
+ {2, "A/Z", "normal", NO_COPY_FROM},
+ {3, "A/Z/lambda", "normal", 1, "A/B/lambda", MOVED_HERE },
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "A/B", nodes_AB));
+ SVN_ERR(check_db_rows(&b, "A/Z", nodes_AZ));
+ }
+
+ SVN_ERR(sbox_wc_move(&b, "A", "A_moved"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {1, "A", "base-deleted", NO_COPY_FROM, "A_moved"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {1, "A/B", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E", "normal", 1, "A/B/E"},
+ {1, "A/B/E", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E/alpha", "normal", 1, "A/B/E/alpha"},
+ {1, "A/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/E/beta", "normal", 1, "A/B/E/beta"},
+ {1, "A/B/E/beta", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/F", "normal", 1, "A/B/F"},
+ {1, "A/B/F", "base-deleted", NO_COPY_FROM},
+ {0, "A/B/lambda", "normal", 1, "A/B/lambda"},
+ {1, "A/B/lambda", "base-deleted", NO_COPY_FROM},
+ {0, "A/C", "normal", 1, "A/C"},
+ {1, "A/C", "base-deleted", NO_COPY_FROM},
+ {0, "A/D", "normal", 1, "A/D"},
+ {1, "A/D", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/G", "normal", 1, "A/D/G"},
+ {1, "A/D/G", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/G/pi", "normal", 1, "A/D/G/pi"},
+ {1, "A/D/G/pi", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/G/rho", "normal", 1, "A/D/G/rho"},
+ {1, "A/D/G/rho", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/G/tau", "normal", 1, "A/D/G/tau"},
+ {1, "A/D/G/tau", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/H", "normal", 1, "A/D/H"},
+ {1, "A/D/H", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/H/chi", "normal", 1, "A/D/H/chi"},
+ {1, "A/D/H/chi", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/H/omega", "normal", 1, "A/D/H/omega"},
+ {1, "A/D/H/omega", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/H/psi", "normal", 1, "A/D/H/psi"},
+ {1, "A/D/H/psi", "base-deleted", NO_COPY_FROM},
+ {0, "A/D/gamma", "normal", 1, "A/D/gamma"},
+ {1, "A/D/gamma", "base-deleted", NO_COPY_FROM},
+ {0, "A/mu", "normal", 1, "A/mu"},
+ {1, "A/mu", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved", "normal", 1, "A", MOVED_HERE},
+ {1, "A_moved/B", "normal", 1, "A/B", MOVED_HERE},
+ {2, "A_moved/B", "base-deleted", NO_COPY_FROM},
+ {2, "A_moved/B/E", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved/B/E", "normal", 1, "A/B/E", MOVED_HERE},
+ {1, "A_moved/B/E/alpha", "normal", 1, "A/B/E/alpha", MOVED_HERE},
+ {2, "A_moved/B/E/alpha", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved/B/E/beta", "normal", 1, "A/B/E/beta", MOVED_HERE},
+ {2, "A_moved/B/E/beta", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved/B/F", "normal", 1, "A/B/F", MOVED_HERE},
+ {2, "A_moved/B/F", "base-deleted", NO_COPY_FROM},
+ {1, "A_moved/B/lambda", "normal", 1, "A/B/lambda", MOVED_HERE},
+ {2, "A_moved/B/lambda", "base-deleted", NO_COPY_FROM, "A_moved/Z/lambda"},
+ {1, "A_moved/C", "normal", 1, "A/C", MOVED_HERE},
+ {1, "A_moved/D", "normal", 1, "A/D", MOVED_HERE},
+ {1, "A_moved/D/G", "normal", 1, "A/D/G", MOVED_HERE},
+ {1, "A_moved/D/G/pi", "normal", 1, "A/D/G/pi", MOVED_HERE},
+ {1, "A_moved/D/G/rho", "normal", 1, "A/D/G/rho", MOVED_HERE},
+ {1, "A_moved/D/G/tau", "normal", 1, "A/D/G/tau", MOVED_HERE},
+ {1, "A_moved/D/H", "normal", 1, "A/D/H", MOVED_HERE},
+ {1, "A_moved/D/H/chi", "normal", 1, "A/D/H/chi", MOVED_HERE},
+ {1, "A_moved/D/H/omega", "normal", 1, "A/D/H/omega", MOVED_HERE},
+ {1, "A_moved/D/H/psi", "normal", 1, "A/D/H/psi", MOVED_HERE},
+ {1, "A_moved/D/gamma", "normal", 1, "A/D/gamma", MOVED_HERE},
+ {2, "A_moved/Z", "normal", NO_COPY_FROM},
+ {3, "A_moved/Z/lambda", "normal", 1, "A/B/lambda", MOVED_HERE},
+ {1, "A_moved/mu", "normal", 1, "A/mu", MOVED_HERE},
+ {0, "iota", "normal", 1, "iota"},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_within_mixed_move(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_within_mixed_move", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_delete(&b, "iota"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ /* Make A mixed revision */
+ SVN_ERR(sbox_wc_update(&b, "A/B/E", 2));
+
+ /* Single rev moves.. ok */
+ SVN_ERR(sbox_wc_move(&b, "A/D", "A/D_mv"));
+ SVN_ERR(sbox_wc_move(&b, "A/C", "C_mv"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/E", "normal", 2, "A/B/E"},
+ {0, "A/B/E/alpha", "normal", 2, "A/B/E/alpha"},
+ {0, "A/B/E/beta", "normal", 2, "A/B/E/beta"},
+ {0, "A/B/F", "normal", 1, "A/B/F"},
+ {0, "A/B/lambda", "normal", 1, "A/B/lambda"},
+ {0, "A/C", "normal", 1, "A/C"},
+ {0, "A/D", "normal", 1, "A/D"},
+ {0, "A/D/G", "normal", 1, "A/D/G"},
+ {0, "A/D/G/pi", "normal", 1, "A/D/G/pi"},
+ {0, "A/D/G/rho", "normal", 1, "A/D/G/rho"},
+ {0, "A/D/G/tau", "normal", 1, "A/D/G/tau"},
+ {0, "A/D/gamma", "normal", 1, "A/D/gamma"},
+ {0, "A/D/H", "normal", 1, "A/D/H"},
+ {0, "A/D/H/chi", "normal", 1, "A/D/H/chi"},
+ {0, "A/D/H/omega", "normal", 1, "A/D/H/omega"},
+ {0, "A/D/H/psi", "normal", 1, "A/D/H/psi"},
+ {0, "A/mu", "normal", 1, "A/mu"},
+ {0, "iota", "not-present", 2, "iota"},
+ {1, "C_mv", "normal", 1, "A/C", MOVED_HERE},
+ {2, "A/C", "base-deleted", NO_COPY_FROM, "C_mv"},
+ {2, "A/D", "base-deleted", NO_COPY_FROM, "A/D_mv"},
+ {2, "A/D/G", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/G/pi", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/G/rho", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/G/tau", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/gamma", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/H", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/H/chi", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/H/omega", "base-deleted", NO_COPY_FROM},
+ {2, "A/D/H/psi", "base-deleted", NO_COPY_FROM},
+ {2, "A/D_mv", "normal", 1, "A/D", MOVED_HERE},
+ {2, "A/D_mv/G", "normal", 1, "A/D/G", MOVED_HERE},
+ {2, "A/D_mv/G/pi", "normal", 1, "A/D/G/pi", MOVED_HERE},
+ {2, "A/D_mv/G/rho", "normal", 1, "A/D/G/rho", MOVED_HERE},
+ {2, "A/D_mv/G/tau", "normal", 1, "A/D/G/tau", MOVED_HERE},
+ {2, "A/D_mv/gamma", "normal", 1, "A/D/gamma", MOVED_HERE},
+ {2, "A/D_mv/H", "normal", 1, "A/D/H", MOVED_HERE},
+ {2, "A/D_mv/H/chi", "normal", 1, "A/D/H/chi", MOVED_HERE},
+ {2, "A/D_mv/H/omega", "normal", 1, "A/D/H/omega", MOVED_HERE},
+ {2, "A/D_mv/H/psi", "normal", 1, "A/D/H/psi", MOVED_HERE},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ /* Mixed rev move... breaks recordings "A/D" -> "A/D_mv" */
+ SVN_ERR(sbox_wc_move(&b, "A", "A_mv"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 0, ""},
+ {0, "A", "normal", 1, "A"},
+ {0, "A/B", "normal", 1, "A/B"},
+ {0, "A/B/E", "normal", 2, "A/B/E"},
+ {0, "A/B/E/alpha", "normal", 2, "A/B/E/alpha"},
+ {0, "A/B/E/beta", "normal", 2, "A/B/E/beta"},
+ {0, "A/B/F", "normal", 1, "A/B/F"},
+ {0, "A/B/lambda", "normal", 1, "A/B/lambda"},
+ {0, "A/C", "normal", 1, "A/C"},
+ {0, "A/D", "normal", 1, "A/D"},
+ {0, "A/D/G", "normal", 1, "A/D/G"},
+ {0, "A/D/G/pi", "normal", 1, "A/D/G/pi"},
+ {0, "A/D/G/rho", "normal", 1, "A/D/G/rho"},
+ {0, "A/D/G/tau", "normal", 1, "A/D/G/tau"},
+ {0, "A/D/gamma", "normal", 1, "A/D/gamma"},
+ {0, "A/D/H", "normal", 1, "A/D/H"},
+ {0, "A/D/H/chi", "normal", 1, "A/D/H/chi"},
+ {0, "A/D/H/omega", "normal", 1, "A/D/H/omega"},
+ {0, "A/D/H/psi", "normal", 1, "A/D/H/psi"},
+ {0, "A/mu", "normal", 1, "A/mu"},
+ {0, "iota", "not-present", 2, "iota"},
+ {1, "A", "base-deleted", NO_COPY_FROM },
+ {1, "A/B", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/E", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/E/alpha", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/E/beta", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/F", "base-deleted", NO_COPY_FROM },
+ {1, "A/B/lambda", "base-deleted", NO_COPY_FROM },
+ {1, "A/C", "base-deleted", NO_COPY_FROM, "C_mv"},
+ {1, "A/D", "base-deleted", NO_COPY_FROM, "A/D_mv" },
+ {1, "A/D/G", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/G/pi", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/G/rho", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/G/tau", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/gamma", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/H", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/H/chi", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/H/omega", "base-deleted", NO_COPY_FROM },
+ {1, "A/D/H/psi", "base-deleted", NO_COPY_FROM },
+ {1, "A/mu", "base-deleted", NO_COPY_FROM },
+ {1, "A_mv", "normal", 1, "A"},
+ {1, "A_mv/B", "normal", 1, "A/B"},
+ {1, "A_mv/B/E", "not-present", 2, "A/B/E"},
+ {1, "A_mv/B/F", "normal", 1, "A/B/F"},
+ {1, "A_mv/B/lambda", "normal", 1, "A/B/lambda"},
+ {1, "A_mv/C", "normal", 1, "A/C"},
+ {1, "A_mv/D", "normal", 1, "A/D"},
+ {1, "A_mv/D/G", "normal", 1, "A/D/G"},
+ {1, "A_mv/D/G/pi", "normal", 1, "A/D/G/pi"},
+ {1, "A_mv/D/G/rho", "normal", 1, "A/D/G/rho"},
+ {1, "A_mv/D/G/tau", "normal", 1, "A/D/G/tau"},
+ {1, "A_mv/D/gamma", "normal", 1, "A/D/gamma"},
+ {1, "A_mv/D/H", "normal", 1, "A/D/H"},
+ {1, "A_mv/D/H/chi", "normal", 1, "A/D/H/chi"},
+ {1, "A_mv/D/H/omega", "normal", 1, "A/D/H/omega"},
+ {1, "A_mv/D/H/psi", "normal", 1, "A/D/H/psi"},
+ {1, "A_mv/mu", "normal", 1, "A/mu"},
+ {1, "C_mv", "normal", 1, "A/C", MOVED_HERE},
+ {2, "A_mv/C", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/G", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/G/pi", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/G/rho", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/G/tau", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/gamma", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/H", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/H/chi", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/H/omega", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D/H/psi", "base-deleted", NO_COPY_FROM },
+ {2, "A_mv/D_mv", "normal", 1, "A/D", MOVED_HERE},
+ {2, "A_mv/D_mv/G", "normal", 1, "A/D/G", MOVED_HERE},
+ {2, "A_mv/D_mv/G/pi", "normal", 1, "A/D/G/pi", MOVED_HERE},
+ {2, "A_mv/D_mv/G/rho", "normal", 1, "A/D/G/rho", MOVED_HERE},
+ {2, "A_mv/D_mv/G/tau", "normal", 1, "A/D/G/tau", MOVED_HERE},
+ {2, "A_mv/D_mv/gamma", "normal", 1, "A/D/gamma", MOVED_HERE},
+ {2, "A_mv/D_mv/H", "normal", 1, "A/D/H", MOVED_HERE},
+ {2, "A_mv/D_mv/H/chi", "normal", 1, "A/D/H/chi", MOVED_HERE},
+ {2, "A_mv/D_mv/H/omega","normal", 1, "A/D/H/omega", MOVED_HERE},
+ {2, "A_mv/D_mv/H/psi", "normal", 1, "A/D/H/psi", MOVED_HERE},
+ {3, "A_mv/B/E", "normal", 2, "A/B/E"},
+ {3, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha"},
+ {3, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta"},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_edit_obstruction(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_edit_obstruction", opts, pool));
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_file_write(&b, "A/B/E/alpha", "Update alpha"));
+ SVN_ERR(sbox_wc_propset(&b, "a", "b", "A/B/F"));
+ SVN_ERR(sbox_wc_commit(&b, "")); /* r2 */
+
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ /* Simple move */
+ SVN_ERR(sbox_wc_move(&b, "A", "A_mv"));
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {1, "A_mv", "normal", 2, "A", MOVED_HERE},
+ {1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A_mv/B/E", "normal", 2, "A/B/E", MOVED_HERE},
+ {1, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha", MOVED_HERE},
+ {1, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta", MOVED_HERE},
+ {1, "A_mv/B/F", "normal", 2, "A/B/F", MOVED_HERE, "a"},
+ {1, "A_mv/B/lambda", "normal", 2, "A/B/lambda", MOVED_HERE},
+ {1, "A_mv/C", "normal", 2, "A/C", MOVED_HERE},
+ {1, "A_mv/D", "normal", 2, "A/D", MOVED_HERE},
+ {1, "A_mv/D/G", "normal", 2, "A/D/G", MOVED_HERE},
+ {1, "A_mv/D/G/pi", "normal", 2, "A/D/G/pi", MOVED_HERE},
+ {1, "A_mv/D/G/rho", "normal", 2, "A/D/G/rho", MOVED_HERE},
+ {1, "A_mv/D/G/tau", "normal", 2, "A/D/G/tau", MOVED_HERE},
+ {1, "A_mv/D/gamma", "normal", 2, "A/D/gamma", MOVED_HERE},
+ {1, "A_mv/D/H", "normal", 2, "A/D/H", MOVED_HERE},
+ {1, "A_mv/D/H/chi", "normal", 2, "A/D/H/chi", MOVED_HERE},
+ {1, "A_mv/D/H/omega", "normal", 2, "A/D/H/omega", MOVED_HERE},
+ {1, "A_mv/D/H/psi", "normal", 2, "A/D/H/psi", MOVED_HERE},
+ {1, "A_mv/mu", "normal", 2, "A/mu", MOVED_HERE},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "A_mv", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+ }
+
+ /* Now do the same thing with local obstructions on the edited nodes */
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity));
+ SVN_ERR(sbox_wc_move(&b, "A", "A_mv"));
+
+ SVN_ERR(svn_io_remove_file2(sbox_wc_path(&b, "A_mv/B/E/alpha"), FALSE, pool));
+ SVN_ERR(svn_io_dir_make(sbox_wc_path(&b, "A_mv/B/E/alpha"), APR_OS_DEFAULT,
+ pool));
+ SVN_ERR(svn_io_dir_remove_nonrecursive(sbox_wc_path(&b, "A_mv/B/F"), pool));
+ SVN_ERR(sbox_file_write(&b, "A_mv/B/F", "F file"));
+
+ SVN_ERR(sbox_wc_update(&b, "", 2));
+ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+
+ {
+ nodes_row_t nodes[] = {
+ {1, "A_mv", "normal", 2, "A", MOVED_HERE},
+ {1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE},
+ {1, "A_mv/B/E", "normal", 2, "A/B/E", MOVED_HERE},
+ {1, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha", MOVED_HERE},
+ {1, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta", MOVED_HERE},
+ {1, "A_mv/B/F", "normal", 2, "A/B/F", MOVED_HERE, "a"},
+ {1, "A_mv/B/lambda", "normal", 2, "A/B/lambda", MOVED_HERE},
+ {1, "A_mv/C", "normal", 2, "A/C", MOVED_HERE},
+ {1, "A_mv/D", "normal", 2, "A/D", MOVED_HERE},
+ {1, "A_mv/D/G", "normal", 2, "A/D/G", MOVED_HERE},
+ {1, "A_mv/D/G/pi", "normal", 2, "A/D/G/pi", MOVED_HERE},
+ {1, "A_mv/D/G/rho", "normal", 2, "A/D/G/rho", MOVED_HERE},
+ {1, "A_mv/D/G/tau", "normal", 2, "A/D/G/tau", MOVED_HERE},
+ {1, "A_mv/D/gamma", "normal", 2, "A/D/gamma", MOVED_HERE},
+ {1, "A_mv/D/H", "normal", 2, "A/D/H", MOVED_HERE},
+ {1, "A_mv/D/H/chi", "normal", 2, "A/D/H/chi", MOVED_HERE},
+ {1, "A_mv/D/H/omega", "normal", 2, "A/D/H/omega", MOVED_HERE},
+ {1, "A_mv/D/H/psi", "normal", 2, "A/D/H/psi", MOVED_HERE},
+ {1, "A_mv/mu", "normal", 2, "A/mu", MOVED_HERE},
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"A_mv/B/E/alpha", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_obstructed}},
+ {"A_mv/B/F", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_obstructed}},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "A_mv", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+move_deep_bump(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "move_deep_bump", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A/A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "C/C"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "Z"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A/A/A"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+
+ SVN_ERR(sbox_wc_move(&b, "B/B/A", "B/B/B"));
+ SVN_ERR(sbox_wc_move(&b, "B/B/B/A", "C/C/A"));
+
+ /* This can't bump C/C/A as that is outside the lock range
+ so we expect a tree conflict.
+
+ This used to cause a node not found during bumping
+ because B/B/B/A doesn't have a BASE node */
+ SVN_ERR(sbox_wc_update(&b, "B/B", 2));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 1, ""},
+ {0, "B", "normal", 1, "B"},
+ {0, "B/B", "normal", 2, "B/B"},
+ {0, "B/B/A", "normal", 2, "B/B/A"},
+ {0, "B/B/A/A", "normal", 2, "B/B/A/A"},
+ {0, "B/B/A/A/A", "normal", 2, "B/B/A/A/A"},
+ {0, "C", "normal", 1, "C"},
+ {0, "C/C", "normal", 1, "C/C"},
+ {3, "B/B/A", "base-deleted", NO_COPY_FROM, "B/B/B"},
+ {3, "B/B/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "B/B/A/A/A", "base-deleted", NO_COPY_FROM},
+ {3, "B/B/B", "normal", 2, "B/B/A", MOVED_HERE},
+ {3, "B/B/B/A", "normal", 2, "B/B/A/A", MOVED_HERE},
+ {3, "B/B/B/A/A", "normal", 2, "B/B/A/A/A", MOVED_HERE},
+ {3, "C/C/A", "normal", 1, "B/B/A/A", MOVED_HERE},
+ {3, "C/C/A/A", "normal", 1, "B/B/A/A/A", MOVED_HERE},
+ {4, "B/B/B/A", "base-deleted", NO_COPY_FROM, "C/C/A"},
+ {4, "B/B/B/A/A", "base-deleted", NO_COPY_FROM},
+ {0}
+ };
+ conflict_info_t conflicts[] = {
+ {"B/B/B/A", FALSE, FALSE, {svn_wc_conflict_action_edit,
+ svn_wc_conflict_reason_moved_away, "B/B/B/A"}},
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ SVN_ERR(check_db_conflicts(&b, "", conflicts));
+ }
+
+ SVN_ERR(sbox_wc_resolve(&b, "B/B/B/A", svn_depth_empty,
+ svn_wc_conflict_choose_mine_conflict));
+ SVN_ERR(check_db_conflicts(&b, "", NULL));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+make_copy_mixed(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "make_copy_mixed", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/H"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/I"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/L"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/M"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/O"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/Q"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R/S"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R/S/T"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r2", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r3", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r4", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r5", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 5));
+ SVN_ERR(sbox_wc_update(&b, "A", 4));
+ SVN_ERR(sbox_wc_update(&b, "A/B", 3));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 2));
+ SVN_ERR(sbox_wc_update(&b, "A/B/K", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/N/O", 3));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/C/F"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/G/J"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J"));
+
+ SVN_ERR(sbox_wc_update(&b, "A/N/P", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/N/Q", 1));
+ SVN_ERR(sbox_wc_delete(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_move(&b, "A/N/Q", "Q"));
+ SVN_ERR(sbox_wc_move(&b, "A/B/G/H", "H"));
+
+ /* And something that can't be represented */
+ SVN_ERR(sbox_wc_update(&b, "A/B/C/E", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C/E", "E"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, "", NOT_MOVED, "k"},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {0, "A/B/C/E", "normal", 1, "A/B/C/E"},
+ {0, "A/B/C/F", "normal", 2, "A/B/C/F"},
+ {0, "A/B/G", "normal", 3, "A/B/G"},
+ {0, "A/B/G/H", "normal", 3, "A/B/G/H"},
+ {0, "A/B/G/I", "normal", 3, "A/B/G/I"},
+ {0, "A/B/G/J", "normal", 3, "A/B/G/J"},
+ {0, "A/B/K", "normal", 1, "A/B/K"},
+ {0, "A/B/K/L", "normal", 1, "A/B/K/L"},
+ {0, "A/B/K/M", "normal", 1, "A/B/K/M"},
+ {0, "A/N", "normal", 4, "A/N"},
+ {0, "A/N/O", "normal", 3, "A/N/O"},
+ {0, "A/N/P", "normal", 1, "A/N/P"},
+ {0, "A/N/Q", "normal", 1, "A/N/Q"},
+ {0, "A/R", "normal", 4, "A/R"},
+ {0, "A/R/S", "normal", 4, "A/R/S"},
+ {0, "A/R/S/T", "normal", 4, "A/R/S/T"},
+ {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE},
+ {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE},
+ {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE},
+ {3, "A/N/P", "normal", NO_COPY_FROM},
+ {3, "A/N/Q", "base-deleted", NO_COPY_FROM, "Q"},
+ {4, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"},
+ {4, "A/B/C/F", "base-deleted", NO_COPY_FROM},
+ {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"},
+ {4, "A/B/G/J", "normal", NO_COPY_FROM},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(svn_wc__db_op_make_copy(b.wc_ctx->db, sbox_wc_path(&b, "A"),
+ NULL, NULL, pool));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, "", NOT_MOVED, "k"},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {0, "A/B/C/E", "normal", 1, "A/B/C/E"},
+ {0, "A/B/C/F", "normal", 2, "A/B/C/F"},
+ {0, "A/B/G", "normal", 3, "A/B/G"},
+ {0, "A/B/G/H", "normal", 3, "A/B/G/H"},
+ {0, "A/B/G/I", "normal", 3, "A/B/G/I"},
+ {0, "A/B/G/J", "normal", 3, "A/B/G/J"},
+ {0, "A/B/K", "normal", 1, "A/B/K"},
+ {0, "A/B/K/L", "normal", 1, "A/B/K/L"},
+ {0, "A/B/K/M", "normal", 1, "A/B/K/M"},
+ {0, "A/N", "normal", 4, "A/N"},
+ {0, "A/N/O", "normal", 3, "A/N/O"},
+ {0, "A/N/P", "normal", 1, "A/N/P"},
+ {0, "A/N/Q", "normal", 1, "A/N/Q"},
+ {0, "A/R", "normal", 4, "A/R"},
+ {0, "A/R/S", "normal", 4, "A/R/S"},
+ {0, "A/R/S/T", "normal", 4, "A/R/S/T"},
+ {1, "A", "normal", 4, "A"},
+ {1, "A/B", "not-present", 3, "A/B"},
+ {1, "A/B/C", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/D", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"},
+ {1, "A/B/C/F", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/G", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"},
+ {1, "A/B/G/I", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/G/J", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/K", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/K/L", "base-deleted", NO_COPY_FROM},
+ {1, "A/B/K/M", "base-deleted", NO_COPY_FROM},
+ {1, "A/N", "normal", 4, "A/N"},
+ {1, "A/N/O", "not-present", 3, "A/N/O"},
+ {1, "A/N/P", "not-present", 1, "A/N/P"},
+ {1, "A/N/Q", "not-present", 1, "A/N/Q", FALSE, "Q"},
+ {1, "A/R", "normal", 4, "A/R"},
+ {1, "A/R/S", "normal", 4, "A/R/S"},
+ {1, "A/R/S/T", "normal", 4, "A/R/S/T"},
+ {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE},
+ {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE},
+ {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE},
+ {2, "A/B", "normal", 3, "A/B"},
+ {2, "A/B/C", "not-present", 2, "A/B/C"},
+ {2, "A/B/G", "normal", 3, "A/B/G"},
+ {2, "A/B/G/H", "normal", 3, "A/B/G/H"},
+ {2, "A/B/G/I", "normal", 3, "A/B/G/I"},
+ {2, "A/B/G/J", "normal", 3, "A/B/G/J"},
+ {2, "A/B/K", "not-present", 1, "A/B/K"},
+ {3, "A/B/C", "normal", 2, "A/B/C"},
+ {3, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {3, "A/B/C/E", "not-present", 1, "A/B/C/E"},
+ {3, "A/B/C/F", "normal", 2, "A/B/C/F"},
+ {3, "A/B/K", "normal", 1, "A/B/K"},
+ {3, "A/B/K/L", "normal", 1, "A/B/K/L"},
+ {3, "A/B/K/M", "normal", 1, "A/B/K/M"},
+ {3, "A/N/O", "normal", 3, "A/N/O"},
+ {3, "A/N/P", "normal", NO_COPY_FROM},
+ {4, "A/B/C/F", "base-deleted", NO_COPY_FROM},
+ {4, "A/B/G/H", "base-deleted", NO_COPY_FROM},
+ {4, "A/B/G/J", "normal", NO_COPY_FROM},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(verify_db(&b));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+make_copy_and_delete_mixed(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "make_copy_and_del_mixed", opts, pool));
+
+ SVN_ERR(sbox_wc_mkdir(&b, "A"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/E"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/H"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/I"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/L"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/M"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/O"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/Q"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R/S"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/R/S/T"));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_update(&b, "", 1));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r2", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r3", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r4", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+ SVN_ERR(sbox_wc_propset(&b, "k", "r5", ""));
+ SVN_ERR(sbox_wc_commit(&b, ""));
+
+ SVN_ERR(sbox_wc_update(&b, "", 5));
+ SVN_ERR(sbox_wc_update(&b, "A", 4));
+ SVN_ERR(sbox_wc_update(&b, "A/B", 3));
+ SVN_ERR(sbox_wc_update(&b, "A/B/C", 2));
+ SVN_ERR(sbox_wc_update(&b, "A/B/K", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/N/O", 3));
+
+ SVN_ERR(sbox_wc_delete(&b, "A/B/C/F"));
+ SVN_ERR(sbox_wc_delete(&b, "A/B/G/J"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J"));
+
+ SVN_ERR(sbox_wc_update(&b, "A/N/P", 1));
+ SVN_ERR(sbox_wc_update(&b, "A/N/Q", 1));
+ SVN_ERR(sbox_wc_delete(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_mkdir(&b, "A/N/P"));
+ SVN_ERR(sbox_wc_move(&b, "A/N/Q", "Q"));
+ SVN_ERR(sbox_wc_move(&b, "A/B/G/H", "H"));
+
+ /* And something that can't be represented */
+ SVN_ERR(sbox_wc_update(&b, "A/B/C/E", 1));
+ SVN_ERR(sbox_wc_move(&b, "A/B/C/E", "E"));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, "", NOT_MOVED, "k"},
+ {0, "A", "normal", 4, "A"},
+ {0, "A/B", "normal", 3, "A/B"},
+ {0, "A/B/C", "normal", 2, "A/B/C"},
+ {0, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {0, "A/B/C/E", "normal", 1, "A/B/C/E"},
+ {0, "A/B/C/F", "normal", 2, "A/B/C/F"},
+ {0, "A/B/G", "normal", 3, "A/B/G"},
+ {0, "A/B/G/H", "normal", 3, "A/B/G/H"},
+ {0, "A/B/G/I", "normal", 3, "A/B/G/I"},
+ {0, "A/B/G/J", "normal", 3, "A/B/G/J"},
+ {0, "A/B/K", "normal", 1, "A/B/K"},
+ {0, "A/B/K/L", "normal", 1, "A/B/K/L"},
+ {0, "A/B/K/M", "normal", 1, "A/B/K/M"},
+ {0, "A/N", "normal", 4, "A/N"},
+ {0, "A/N/O", "normal", 3, "A/N/O"},
+ {0, "A/N/P", "normal", 1, "A/N/P"},
+ {0, "A/N/Q", "normal", 1, "A/N/Q"},
+ {0, "A/R", "normal", 4, "A/R"},
+ {0, "A/R/S", "normal", 4, "A/R/S"},
+ {0, "A/R/S/T", "normal", 4, "A/R/S/T"},
+ {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE},
+ {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE},
+ {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE},
+ {3, "A/N/P", "normal", NO_COPY_FROM},
+ {3, "A/N/Q", "base-deleted", NO_COPY_FROM, "Q"},
+ {4, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"},
+ {4, "A/B/C/F", "base-deleted", NO_COPY_FROM},
+ {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"},
+ {4, "A/B/G/J", "normal", NO_COPY_FROM},
+
+ {0}
+ };
+
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(svn_wc__db_base_remove(b.wc_ctx->db, sbox_wc_path(&b, "A"),
+ TRUE, TRUE, FALSE, 99,
+ NULL, NULL, pool));
+
+ {
+ nodes_row_t nodes[] = {
+ {0, "", "normal", 5, "", NOT_MOVED, "k"},
+ {0, "A", "not-present", 99, "A"},
+ {1, "A", "normal", 4, "A"},
+ {1, "A/B", "not-present", 3, "A/B"},
+ {1, "A/N", "normal", 4, "A/N"},
+ {1, "A/N/O", "not-present", 3, "A/N/O"},
+ {1, "A/N/P", "not-present", 1, "A/N/P"},
+ {1, "A/N/Q", "not-present", 1, "A/N/Q", FALSE},
+ {1, "A/R", "normal", 4, "A/R"},
+ {1, "A/R/S", "normal", 4, "A/R/S"},
+ {1, "A/R/S/T", "normal", 4, "A/R/S/T"},
+ {1, "E", "normal", 1, "A/B/C/E"},
+ {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE},
+ {1, "Q", "normal", 1, "A/N/Q"},
+ {2, "A/B", "normal", 3, "A/B"},
+ {2, "A/B/C", "not-present", 2, "A/B/C"},
+ {2, "A/B/G", "normal", 3, "A/B/G"},
+ {2, "A/B/G/H", "normal", 3, "A/B/G/H"},
+ {2, "A/B/G/I", "normal", 3, "A/B/G/I"},
+ {2, "A/B/G/J", "normal", 3, "A/B/G/J"},
+ {3, "A/B/C", "normal", 2, "A/B/C"},
+ {3, "A/B/C/D", "normal", 2, "A/B/C/D"},
+ {3, "A/B/C/E", "not-present", 1, "A/B/C/E"},
+ {3, "A/B/C/F", "normal", 2, "A/B/C/F"},
+ {2, "A/B/K", "not-present", 1, "A/B/K"},
+ {3, "A/B/K", "normal", 1, "A/B/K"},
+ {3, "A/B/K/L", "normal", 1, "A/B/K/L"},
+ {3, "A/B/K/M", "normal", 1, "A/B/K/M"},
+ {3, "A/N/O", "normal", 3, "A/N/O"},
+ {3, "A/N/P", "normal", NO_COPY_FROM},
+ {4, "A/B/C/F", "base-deleted", NO_COPY_FROM},
+ {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"},
+ {4, "A/B/G/J", "normal", NO_COPY_FROM},
+
+ {0}
+ };
+
+ /* This currently fails because Q and E are still marked as moved,
+ while there is nothing to be moved. */
+ SVN_ERR(check_db_rows(&b, "", nodes));
+ }
+
+ SVN_ERR(verify_db(&b));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_global_commit(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "global_commit", opts, pool));
+
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 2, "A/B" },
+ { 0, "A/B/C", "normal", 2, "A/B/C" },
+ { 0, "A/B/D", "normal", 2, "A/B/D" },
+ { 0, "A/B/D/E", "normal", 2, "A/B/D/E" },
+ { 0, "A/F", "normal", 2, "A/F" },
+ { 0, "A/F/G", "normal", 2, "A/F/G" },
+ { 0, "A/F/H", "normal", 2, "A/F/H" },
+ { 0, "A/F/E", "normal", 2, "A/F/E" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 1, "C", "normal", 2, "A/B/C", MOVED_HERE},
+ { 1, "E", "normal", 2, "A/B/D/E", MOVED_HERE},
+ { 2, "A/B", "normal", 3, "some", MOVED_HERE },
+ { 2, "A/B/C", "base-deleted", NO_COPY_FROM, "C" },
+ { 2, "A/B/D", "normal", 3, "some/D", MOVED_HERE},
+ { 2, "A/B/D/E", "not-present", 3, "some/D/E", FALSE, "E", TRUE},
+ { 3, "A/B/C", "normal", NO_COPY_FROM},
+ { 2, "A/F", "normal", 1, "S2" },
+ { 2, "A/F/G", "normal", 1, "S2/G" },
+ { 2, "A/F/H", "not-present", 1, "S2/H" },
+ { 2, "A/F/E", "base-deleted", NO_COPY_FROM },
+ { 1, "some", "normal", 3, "some", FALSE, "A/B"},
+ { 0 }
+ };
+ SVN_ERR(insert_dirs(&b, before));
+ SVN_ERR(check_db_rows(&b, "", before)); /* Check move insertion logic */
+ SVN_ERR(verify_db(&b));
+ }
+
+ /* This should break the moves */
+ SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B"),
+ 5, 5, 700, "me", NULL, NULL,
+ FALSE, FALSE, NULL, pool));
+ {
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/D", "normal", 5, "A/B/D"},
+ { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"},
+ { 0, "A/F", "normal", 2, "A/F" },
+ { 0, "A/F/G", "normal", 2, "A/F/G" },
+ { 0, "A/F/H", "normal", 2, "A/F/H" },
+ { 0, "A/F/E", "normal", 2, "A/F/E" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 1, "C", "normal", 2, "A/B/C"},
+ { 1, "E", "normal", 2, "A/B/D/E"},
+ { 1, "some", "normal", 3, "some"},
+ { 3, "A/B/C", "normal", NO_COPY_FROM},
+ { 2, "A/F", "normal", 1, "S2" },
+ { 2, "A/F/G", "normal", 1, "S2/G" },
+ { 2, "A/F/H", "not-present", 1, "S2/H" },
+ { 2, "A/F/E", "base-deleted", NO_COPY_FROM },
+ { 0 }
+ };
+
+ SVN_ERR(check_db_rows(&b, "", after));
+ SVN_ERR(verify_db(&b));
+ }
+
+ SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db,
+ sbox_wc_path(&b, "A/F"),
+ 6, 6, 800, "me", NULL, NULL,
+ FALSE, FALSE, NULL, pool));
+
+ {
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/D", "normal", 5, "A/B/D"},
+ { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"},
+ { 0, "A/F", "normal", 6, "A/F" },
+ { 0, "A/F/G", "normal", 6, "A/F/G" },
+ { 0, "A/F/H", "not-present", 6, "A/F/H" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 1, "C", "normal", 2, "A/B/C"},
+ { 1, "E", "normal", 2, "A/B/D/E"},
+ { 1, "some", "normal", 3, "some"},
+ { 3, "A/B/C", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+
+ SVN_ERR(check_db_rows(&b, "", after));
+ SVN_ERR(verify_db(&b));
+ }
+
+ SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"),
+ 7, 7, 900, "me", NULL, NULL,
+ FALSE, FALSE, NULL, pool));
+
+ {
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 5, "A/B" },
+ { 0, "A/B/C", "normal", 7, "A/B/C"},
+ { 0, "A/B/D", "normal", 5, "A/B/D"},
+ { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"},
+ { 0, "A/F", "normal", 6, "A/F" },
+ { 0, "A/F/G", "normal", 6, "A/F/G" },
+ { 0, "A/F/H", "not-present", 6, "A/F/H" },
+ { 0, "A/X", "normal", 2, "A/X" },
+ { 0, "A/X/Y", "incomplete", 2, "A/X/Y" },
+ { 1, "some", "normal", 3, "some"},
+ { 1, "E", "normal", 2, "A/B/D/E"},
+ { 1, "C", "normal", 2, "A/B/C"},
+ { 0 }
+ };
+
+ SVN_ERR(check_db_rows(&b, "", after));
+ SVN_ERR(verify_db(&b));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_global_commit_switched(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "global_commit_switched", opts, pool));
+ {
+ nodes_row_t before[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ /* A/B is switched... The libsvn_client layer tries to prevent this,
+ because it has such an unexpected behavior. */
+ { 0, "A/B", "normal", 2, "N/B" },
+ { 0, "A/B/C", "normal", 2, "N/B/C" },
+ { 0, "A/B/C/D", "normal", 2, "N/B/C/D" },
+ { 0, "A/B/C/E", "normal", 2, "N/B/C/E" },
+ { 2, "A/B", "normal", 3, "Z/B" },
+ { 2, "A/B/C", "normal", 3, "Z/B/C" },
+ { 2, "A/B/C/D", "normal", 3, "Z/B/C/D" },
+ { 2, "A/B/C/E", "base-deleted", NO_COPY_FROM },
+ /* not-present nodes have an 'uninteresting path',
+ which doesn't have to be as implied by ancestor at same depth */
+ { 2, "A/B/C/F", "not-present", 3, "ZZ-Z-Z_ZZ_Z_Z" },
+ { 2, "A/B/C/G", "normal", 3, "Z/B/C/G" },
+ { 2, "A/B/C/G/H", "normal", 3, "Z/B/C/G/H" },
+
+ { 3, "A/B/C", "normal", 4, "Q/C" },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/C/G", "normal", 4, "Q/C/G" },
+ { 3, "A/B/C/G/H", "base-deleted", NO_COPY_FROM },
+
+ { 4, "A/B/C/F", "normal", NO_COPY_FROM },
+ { 5, "A/B/C/G/H", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(insert_dirs(&b, before));
+ SVN_ERR(verify_db(&b));
+ }
+
+ SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B"),
+ 7, 7, 12, "me", NULL, NULL,
+ FALSE, FALSE, NULL, pool));
+
+ {
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ /* The commit is applied as A/B, because the path is calculated from A,
+ and not the shadowed node at A/B. (Fixed in r1663991) */
+ { 0, "A/B", "normal", 7, "A/B" },
+ { 0, "A/B/C", "normal", 7, "A/B/C" },
+ { 0, "A/B/C/D", "normal", 7, "A/B/C/D" },
+ /* Even calculated path of not-present is fixed */
+ { 0, "A/B/C/F", "not-present", 7, "A/B/C/F" },
+ { 0, "A/B/C/G", "normal", 7, "A/B/C/G" },
+ { 0, "A/B/C/G/H", "normal", 7, "A/B/C/G/H" },
+
+ /* The higher layers are unaffected */
+ { 3, "A/B/C", "normal", 4, "Q/C" },
+ { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM },
+ { 3, "A/B/C/G", "normal", 4, "Q/C/G" },
+ { 3, "A/B/C/G/H", "base-deleted", NO_COPY_FROM },
+
+ { 4, "A/B/C/F", "normal", NO_COPY_FROM },
+ { 5, "A/B/C/G/H", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(verify_db(&b));
+ SVN_ERR(check_db_rows(&b, "", after));
+ }
+
+ SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db,
+ sbox_wc_path(&b, "A/B/C"),
+ 8, 8, 12, "me", NULL, NULL,
+ FALSE, FALSE, NULL, pool));
+
+ {
+ nodes_row_t after[] = {
+ { 0, "", "normal", 2, "" },
+ { 0, "A", "normal", 2, "A" },
+ { 0, "A/B", "normal", 7, "A/B" },
+ /* Base deleted and not-present are now gone */
+ { 0, "A/B/C", "normal", 8, "A/B/C" },
+ { 0, "A/B/C/G", "normal", 8, "A/B/C/G" },
+
+ { 4, "A/B/C/F", "normal", NO_COPY_FROM },
+ { 5, "A/B/C/G/H", "normal", NO_COPY_FROM },
+ { 0 }
+ };
+ SVN_ERR(verify_db(&b));
+ SVN_ERR(check_db_rows(&b, "", after));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* ---------------------------------------------------------------------- */
+/* The list of test functions */
+
+static int max_threads = 4;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_wc_wc_copies,
+ "test_wc_wc_copies"),
+ SVN_TEST_OPTS_PASS(test_reverts,
+ "test_reverts"),
+ SVN_TEST_OPTS_PASS(test_deletes,
+ "test_deletes"),
+ SVN_TEST_OPTS_PASS(test_delete_of_copies,
+ "test_delete_of_copies"),
+ SVN_TEST_OPTS_PASS(test_delete_with_base,
+ "test_delete_with_base"),
+ SVN_TEST_OPTS_PASS(test_adds,
+ "test_adds"),
+ SVN_TEST_OPTS_PASS(test_repo_wc_copies,
+ "test_repo_wc_copies"),
+ SVN_TEST_OPTS_PASS(test_delete_with_update,
+ "test_delete_with_update"),
+ SVN_TEST_OPTS_PASS(test_adds_change_kind,
+ "test_adds_change_kind"),
+ SVN_TEST_OPTS_PASS(test_base_dir_insert_remove,
+ "test_base_dir_insert_remove"),
+ SVN_TEST_OPTS_PASS(test_db_make_copy,
+ "test_db_make_copy"),
+ SVN_TEST_OPTS_PASS(test_wc_move,
+ "test_wc_move"),
+ SVN_TEST_OPTS_PASS(test_mixed_rev_copy,
+ "test_mixed_rev_copy"),
+ SVN_TEST_OPTS_PASS(test_delete_of_replace,
+ "test_delete_of_replace"),
+ SVN_TEST_OPTS_PASS(test_del_replace_not_present,
+ "test_del_replace_not_present"),
+ SVN_TEST_OPTS_PASS(test_op_revert,
+ "test_op_revert"),
+ SVN_TEST_OPTS_PASS(test_op_revert_changelist,
+ "test_op_revert_changelist"),
+ SVN_TEST_OPTS_PASS(test_children_of_replaced_dir,
+ "test_children_of_replaced_dir"),
+ SVN_TEST_OPTS_PASS(test_op_delete,
+ "test_op_delete"),
+ SVN_TEST_OPTS_PASS(test_child_replace_with_same_origin,
+ "test_child_replace_with_same"),
+ SVN_TEST_OPTS_PASS(test_shadowed_update,
+ "test_shadowed_update"),
+ SVN_TEST_OPTS_PASS(test_copy_of_deleted,
+ "test_copy_of_deleted (issue #3873)"),
+#ifndef DARWIN
+ SVN_TEST_OPTS_PASS(test_case_rename,
+ "test_case_rename on case (in)sensitive system"),
+#else
+ /* apr doesn't implement APR_FILEPATH_TRUENAME for MAC OS yet */
+ SVN_TEST_OPTS_XFAIL(test_case_rename,
+ "test_case_rename on case (in)sensitive system"),
+#endif
+ SVN_TEST_OPTS_PASS(commit_file_external,
+ "commit_file_external (issue #4002)"),
+ SVN_TEST_OPTS_PASS(revert_file_externals,
+ "revert_file_externals"),
+ SVN_TEST_OPTS_PASS(copy_file_externals,
+ "copy_file_externals"),
+ SVN_TEST_OPTS_PASS(copy_wc_wc_server_excluded,
+ "test_wc_wc_copy_server_excluded"),
+ SVN_TEST_OPTS_PASS(incomplete_switch,
+ "incomplete_switch (issue 4040)"),
+ SVN_TEST_OPTS_PASS(nested_moves_child_first,
+ "nested_moves_child_first"),
+ SVN_TEST_OPTS_PASS(nested_moves_child_last,
+ "nested_moves_child_last"),
+ SVN_TEST_OPTS_PASS(move_in_copy,
+ "move_in_copy"),
+ SVN_TEST_OPTS_PASS(move_in_replace,
+ "move_in_replace"),
+ SVN_TEST_OPTS_PASS(copy_a_move,
+ "copy_a_move"),
+ SVN_TEST_OPTS_PASS(move_to_swap,
+ "move_to_swap"),
+ SVN_TEST_OPTS_PASS(revert_nested_move,
+ "revert_nested_move"),
+ SVN_TEST_OPTS_PASS(move_on_move,
+ "move_on_move"),
+ SVN_TEST_OPTS_PASS(move_on_move2,
+ "move_on_move2"),
+ SVN_TEST_OPTS_PASS(move_added,
+ "move_added"),
+ SVN_TEST_OPTS_PASS(move_update,
+ "move_update"),
+ SVN_TEST_OPTS_PASS(test_scan_delete,
+ "scan_delete"),
+ SVN_TEST_OPTS_PASS(test_follow_moved_to,
+ "follow_moved_to"),
+ SVN_TEST_OPTS_WIMP(mixed_rev_move,
+ "mixed_rev_move",
+ "needs different libsvn_wc entry point"),
+ SVN_TEST_OPTS_PASS(update_prop_mod_into_moved,
+ "update_prop_mod_into_moved"),
+ SVN_TEST_OPTS_PASS(nested_move_update,
+ "nested_move_update"),
+ SVN_TEST_OPTS_PASS(nested_move_commit,
+ "nested_move_commit (issue 4291)"),
+ SVN_TEST_OPTS_PASS(nested_move_update2,
+ "nested_move_update2"),
+ SVN_TEST_OPTS_PASS(move_update_conflicts,
+ "move_update_conflicts"),
+ SVN_TEST_OPTS_PASS(move_update_delete_mods,
+ "move_update_delete_mods"),
+ SVN_TEST_OPTS_PASS(nested_moves2,
+ "nested_moves2"),
+ SVN_TEST_OPTS_PASS(move_in_delete,
+ "move_in_delete (issue 4303)"),
+ SVN_TEST_OPTS_PASS(switch_move,
+ "switch_move"),
+ SVN_TEST_OPTS_PASS(move_replace,
+ "move_replace"),
+ SVN_TEST_OPTS_PASS(layered_moved_to,
+ "layered_moved_to"),
+ SVN_TEST_OPTS_PASS(update_within_move,
+ "update_within_move"),
+ SVN_TEST_OPTS_PASS(commit_moved_descendant,
+ "commit_moved_descendant"),
+ SVN_TEST_OPTS_XFAIL(commit_moved_away_descendant,
+ "commit_moved_away_descendant"),
+ SVN_TEST_OPTS_PASS(finite_move_update_bump,
+ "finite_move_update_bump"),
+ SVN_TEST_OPTS_PASS(move_away_delete_update,
+ "move_away_delete_update"),
+ SVN_TEST_OPTS_PASS(move_not_present_variants,
+ "move_not_present_variants"),
+ SVN_TEST_OPTS_PASS(update_child_under_add,
+ "update_child_under_add (issue 4111)"),
+ SVN_TEST_OPTS_PASS(delete_over_moved_away,
+ "delete_over_moved_away"),
+ SVN_TEST_OPTS_PASS(movedto_opdepth,
+ "moved_to op_depth"),
+ SVN_TEST_OPTS_PASS(new_basemove,
+ "new_basemove"),
+ SVN_TEST_OPTS_PASS(move_back,
+ "move_back (issue 4302)"),
+ SVN_TEST_OPTS_PASS(move_update_subtree,
+ "move_update_subtree (issue 4232)"),
+ SVN_TEST_OPTS_PASS(move_parent_into_child,
+ "move_parent_into_child (issue 4333)"),
+ SVN_TEST_OPTS_PASS(move_depth_expand,
+ "move depth expansion"),
+ SVN_TEST_OPTS_XFAIL(move_retract,
+ "move retract (issue 4336)"),
+ SVN_TEST_OPTS_XFAIL(move_delete_file_externals,
+ "move/delete file externals (issue 4293)"),
+ SVN_TEST_OPTS_PASS(update_with_tree_conflict,
+ "update with tree conflict (issue 4347)"),
+ SVN_TEST_OPTS_PASS(move_update_parent_replace,
+ "move update with replaced parent (issue 4388)"),
+ SVN_TEST_OPTS_PASS(copy_mixed_rev_mods,
+ "copy mixed-rev with mods"),
+ SVN_TEST_OPTS_PASS(move_child_to_parent_revert,
+ "move child to parent and revert (issue 4436)"),
+ SVN_TEST_OPTS_PASS(move_delete_intermediate,
+ "move more than once, delete intermediate"),
+ SVN_TEST_OPTS_XFAIL(move_revert_intermediate,
+ "move more than once, revert intermediate"),
+ SVN_TEST_OPTS_PASS(move_replace_ancestor_with_child,
+ "move replace ancestor with child"),
+ SVN_TEST_OPTS_PASS(move_twice_within_delete,
+ "move twice and then delete"),
+ SVN_TEST_OPTS_PASS(del4_update_edit_AAA,
+ "del4: edit AAA"),
+ SVN_TEST_OPTS_XFAIL(del4_update_delete_AAA,
+ "del4: delete AAA"),
+ SVN_TEST_OPTS_XFAIL(del4_update_add_AAA,
+ "del4: add AAA"),
+ SVN_TEST_OPTS_XFAIL(del4_update_replace_AAA,
+ "del4: replace AAA"),
+ SVN_TEST_OPTS_PASS(del4_update_delself_AAA,
+ "del4: delete self AAA"),
+ SVN_TEST_OPTS_XFAIL(del4_update_replaceself_AAA,
+ "del4: replace self AAA"),
+ SVN_TEST_OPTS_PASS(move4_update_edit_AAA,
+ "move4: edit AAA"),
+ SVN_TEST_OPTS_XFAIL(move4_update_delete_AAA,
+ "move4: delete AAA"),
+ SVN_TEST_OPTS_XFAIL(move4_update_add_AAA,
+ "move4: add AAA"),
+ SVN_TEST_OPTS_XFAIL(move4_update_replace_AAA,
+ "move4: replace AAA"),
+ SVN_TEST_OPTS_PASS(move4_update_delself_AAA,
+ "move4: delete self AAA"),
+ SVN_TEST_OPTS_XFAIL(move4_update_replaceself_AAA,
+ "move4: replace self AAA"),
+ SVN_TEST_OPTS_PASS(simple_move_bump,
+ "simple move bump"),
+ SVN_TEST_OPTS_PASS(movedhere_extract_retract,
+ "movedhere extract retract"),
+ SVN_TEST_OPTS_PASS(repo_wc_copy,
+ "repo_wc_copy"),
+ SVN_TEST_OPTS_PASS(break_move_in_delete,
+ "break move in delete (issue 4491)"),
+ SVN_TEST_OPTS_PASS(nested_move_delete,
+ "nested move delete"),
+ SVN_TEST_OPTS_XFAIL(move_within_mixed_move,
+ "move within mixed move"),
+ SVN_TEST_OPTS_PASS(move_edit_obstruction,
+ "move edit obstruction"),
+ SVN_TEST_OPTS_PASS(move_deep_bump,
+ "move deep bump"),
+ SVN_TEST_OPTS_PASS(make_copy_mixed,
+ "make a copy of a mixed revision tree"),
+ SVN_TEST_OPTS_PASS(make_copy_and_delete_mixed,
+ "make a copy of a mixed revision tree and del"),
+ SVN_TEST_OPTS_PASS(test_global_commit,
+ "test global commit"),
+ SVN_TEST_OPTS_PASS(test_global_commit_switched,
+ "test global commit switched"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/pristine-store-test.c b/subversion/tests/libsvn_wc/pristine-store-test.c
new file mode 100644
index 0000000..d9ed077
--- /dev/null
+++ b/subversion/tests/libsvn_wc/pristine-store-test.c
@@ -0,0 +1,324 @@
+/*
+ * pristine-store-test.c : test the pristine-store subsystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+#include <apr_general.h>
+
+#include "svn_types.h"
+
+/* Make sure SVN_DEPRECATED is defined as empty before including svn_io.h.
+ We don't want to trigger deprecation warnings. */
+#ifdef SVN_DEPRECATED
+#undef SVN_DEPRECATED
+#endif
+#define SVN_DEPRECATED
+#include "svn_io.h"
+
+#include "svn_dirent_uri.h"
+#include "svn_pools.h"
+#include "svn_repos.h"
+#include "svn_wc.h"
+#include "svn_client.h"
+
+#include "utils.h"
+
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#include "../../libsvn_wc/wc-queries.h"
+#include "../../libsvn_wc/workqueue.h"
+
+#include "private/svn_wc_private.h"
+
+#include "../svn_test.h"
+
+
+/* Create repos and WC, set *WC_ABSPATH to the WC path, and set *DB to a new
+ * DB context. */
+static svn_error_t *
+create_repos_and_wc(const char **wc_abspath,
+ svn_wc__db_t **db,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_test__sandbox_t sandbox;
+
+ SVN_ERR(svn_test__sandbox_create(&sandbox, test_name, opts, pool));
+ *wc_abspath = sandbox.wc_abspath;
+ *db = sandbox.wc_ctx->db;
+
+ return SVN_NO_ERROR;
+}
+
+/* Exercise the pristine text API with a simple write and read. */
+static svn_error_t *
+pristine_write_read(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *wc_abspath;
+
+ svn_wc__db_install_data_t *install_data;
+ svn_stream_t *pristine_stream;
+ apr_size_t sz;
+
+ const char data[] = "Blah";
+ svn_string_t *data_string = svn_string_create(data, pool);
+ svn_checksum_t *data_sha1, *data_md5;
+
+ SVN_ERR(create_repos_and_wc(&wc_abspath, &db,
+ "pristine_write_read", opts, pool));
+
+ /* Write DATA into a new temporary pristine file, set PRISTINE_TMP_ABSPATH
+ * to its path and set DATA_SHA1 and DATA_MD5 to its checksums. */
+ SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream,
+ &install_data,
+ &data_sha1, &data_md5,
+ db, wc_abspath,
+ pool, pool));
+
+ sz = strlen(data);
+ SVN_ERR(svn_stream_write(pristine_stream, data, &sz));
+ SVN_ERR(svn_stream_close(pristine_stream));
+
+ /* Ensure it's not already in the store. */
+ {
+ svn_boolean_t present;
+
+ SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
+ pool));
+ SVN_TEST_ASSERT(! present);
+ }
+
+ /* Install the new pristine file, referenced by its checksum. */
+ SVN_ERR(svn_wc__db_pristine_install(install_data,
+ data_sha1, data_md5, pool));
+
+ /* Ensure it is now found in the store. */
+ {
+ svn_boolean_t present;
+
+ SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
+ pool));
+ SVN_TEST_ASSERT(present);
+ }
+
+ /* Look up its MD-5 from its SHA-1, and check it's the same MD-5. */
+ {
+ const svn_checksum_t *looked_up_md5;
+
+ SVN_ERR(svn_wc__db_pristine_get_md5(&looked_up_md5, db, wc_abspath,
+ data_sha1, pool, pool));
+ SVN_TEST_ASSERT(looked_up_md5->kind == svn_checksum_md5);
+ SVN_TEST_ASSERT(svn_checksum_match(data_md5, looked_up_md5));
+ }
+
+ /* Read the pristine text back and verify it's the same content. */
+ {
+ svn_stream_t *data_stream = svn_stream_from_string(data_string, pool);
+ svn_stream_t *data_read_back;
+ svn_boolean_t same;
+
+ SVN_ERR(svn_wc__db_pristine_read(&data_read_back, NULL, db, wc_abspath,
+ data_sha1, pool, pool));
+ SVN_ERR(svn_stream_contents_same2(&same, data_read_back, data_stream,
+ pool));
+ SVN_TEST_ASSERT(same);
+ }
+
+ /* Trivially test the "remove if unreferenced" API: it's not referenced
+ so we should be able to remove it. */
+ {
+ svn_error_t *err;
+ svn_stream_t *data_read_back;
+
+ SVN_ERR(svn_wc__db_pristine_remove(db, wc_abspath, data_sha1, pool));
+ err = svn_wc__db_pristine_read(&data_read_back, NULL, db, wc_abspath,
+ data_sha1, pool, pool);
+ SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_PATH_NOT_FOUND);
+ }
+
+ /* Ensure it's no longer found in the store. */
+ {
+ svn_boolean_t present;
+
+ SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
+ pool));
+ SVN_TEST_ASSERT(! present);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test deleting a pristine text while it is open for reading. */
+static svn_error_t *
+pristine_delete_while_open(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_wc__db_t *db;
+ const char *wc_abspath;
+ svn_wc__db_install_data_t *install_data;
+ svn_stream_t *pristine_stream;
+ svn_stream_t *contents;
+ apr_size_t sz;
+
+ const char data[] = "Blah";
+ svn_checksum_t *data_sha1, *data_md5;
+
+ SVN_ERR(create_repos_and_wc(&wc_abspath, &db,
+ "pristine_delete_while_open", opts, pool));
+
+ SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream,
+ &install_data,
+ &data_sha1, &data_md5,
+ db, wc_abspath,
+ pool, pool));
+
+ sz = strlen(data);
+ SVN_ERR(svn_stream_write(pristine_stream, data, &sz));
+ SVN_ERR(svn_stream_close(pristine_stream));
+ SVN_ERR(svn_wc__db_pristine_install(install_data,
+ data_sha1, data_md5, pool));
+
+ /* Open it for reading */
+ SVN_ERR(svn_wc__db_pristine_read(&contents, NULL, db, wc_abspath, data_sha1,
+ pool, pool));
+
+ /* Delete it */
+ SVN_ERR(svn_wc__db_pristine_remove(db, wc_abspath, data_sha1, pool));
+
+ /* Continue to read from it */
+ {
+ char buffer[4];
+ apr_size_t len = 4;
+
+ SVN_ERR(svn_stream_read_full(contents, buffer, &len));
+ SVN_TEST_ASSERT(len == 4);
+ SVN_TEST_ASSERT(memcmp(buffer, data, len) == 0);
+ }
+
+ /* Ensure it's no longer found in the store. (The file may still exist as
+ * an orphan, depending on the implementation.) */
+ {
+ svn_boolean_t present;
+
+ SVN_ERR(svn_wc__db_pristine_check(&present, db, wc_abspath, data_sha1,
+ pool));
+ SVN_TEST_ASSERT(! present);
+ }
+
+ /* Close the read stream */
+ SVN_ERR(svn_stream_close(contents));
+
+ return SVN_NO_ERROR;
+}
+
+/* Check that the store rejects an attempt to replace an existing pristine
+ * text with different text.
+ *
+ * White-box knowledge: The implementation compares the file sizes but
+ * doesn't compare the text itself, so in this test we ensure the second
+ * text is a different size. */
+static svn_error_t *
+reject_mismatching_text(const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+#ifdef SVN_DEBUG /* The pristine store only checks this in debug mode. */
+ svn_wc__db_t *db;
+ const char *wc_abspath;
+
+ const char data[] = "Blah";
+ svn_checksum_t *data_sha1, *data_md5;
+
+ const char data2[] = "Baz";
+
+ SVN_ERR(create_repos_and_wc(&wc_abspath, &db,
+ "reject_mismatching_text", opts, pool));
+
+ /* Install a pristine text. */
+ {
+ svn_wc__db_install_data_t *install_data;
+ svn_stream_t *pristine_stream;
+ apr_size_t sz;
+
+ SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream,
+ &install_data,
+ &data_sha1, &data_md5,
+ db, wc_abspath,
+ pool, pool));
+
+ sz = strlen(data);
+ SVN_ERR(svn_stream_write(pristine_stream, data, &sz));
+ SVN_ERR(svn_stream_close(pristine_stream));
+
+ SVN_ERR(svn_wc__db_pristine_install(install_data,
+ data_sha1, data_md5,
+ pool));
+ }
+
+ /* Try to install the wrong pristine text against the same checksum.
+ * Should fail. */
+ {
+ svn_wc__db_install_data_t *install_data;
+ svn_stream_t *pristine_stream;
+ apr_size_t sz;
+
+ SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream,
+ &install_data,
+ &data_sha1, &data_md5,
+ db, wc_abspath,
+ pool, pool));
+
+ sz = strlen(data2);
+ SVN_ERR(svn_stream_write(pristine_stream, data2, &sz));
+ SVN_ERR(svn_stream_close(pristine_stream));
+
+ SVN_ERR(svn_wc__db_pristine_install(install_data,
+ data_sha1, data_md5,
+ pool));
+ }
+
+ return SVN_NO_ERROR;
+#else
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "The consistency check to be tested is only "
+ "active in debug-mode builds");
+#endif
+}
+
+
+static int max_threads = -1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(pristine_write_read,
+ "pristine_write_read"),
+ SVN_TEST_OPTS_PASS(pristine_delete_while_open,
+ "pristine_delete_while_open"),
+ SVN_TEST_OPTS_PASS(reject_mismatching_text,
+ "reject_mismatching_text"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/utils.c b/subversion/tests/libsvn_wc/utils.c
new file mode 100644
index 0000000..766f93d
--- /dev/null
+++ b/subversion/tests/libsvn_wc/utils.c
@@ -0,0 +1,707 @@
+/* utils.c --- wc/client test utilities
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_error.h"
+#include "svn_client.h"
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+
+#include "utils.h"
+
+#include "../svn_test_fs.h"
+
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc-queries.h"
+#define SVN_WC__I_AM_WC_DB
+#include "../../libsvn_wc/wc_db_private.h"
+#include "../../libsvn_wc/token-map.h"
+svn_error_t *
+svn_test__create_client_ctx(svn_client_ctx_t **ctx,
+ svn_test__sandbox_t *sbox,
+ apr_pool_t *result_pool)
+{
+ SVN_ERR(svn_client_create_context2(ctx, NULL, result_pool));
+
+ SVN_ERR(svn_test__init_auth_baton(&(*ctx)->auth_baton,
+ result_pool));
+
+ if (sbox)
+ (*ctx)->wc_ctx = sbox->wc_ctx;
+
+ return SVN_NO_ERROR;
+}
+
+/* Create an empty repository and WC for the test TEST_NAME. Set *REPOS_URL
+ * to the URL of the new repository, *REPOS_DIR to its local path and
+ * *WC_ABSPATH to the root path of the
+ * new WC.
+ *
+ * Create the repository and WC in subdirectories called
+ * REPOSITORIES_WORK_DIR/TEST_NAME and WCS_WORK_DIR/TEST_NAME respectively,
+ * within the current working directory.
+ *
+ * Register the repo and WC to be cleaned up when the test suite exits. */
+static svn_error_t *
+create_repos_and_wc(const char **repos_url,
+ const char **repos_dir,
+ const char **wc_abspath,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *repos_path = svn_relpath_join(REPOSITORIES_WORK_DIR, test_name,
+ pool);
+ const char *wc_path = svn_relpath_join(WCS_WORK_DIR, test_name, pool);
+
+ /* Remove the repo and WC dirs if they already exist, to ensure the test
+ * will run even if a previous failed attempt was not cleaned up. */
+ SVN_ERR(svn_io_remove_dir2(repos_path, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool));
+
+ /* Create the parent dirs of the repo and WC if necessary. */
+ SVN_ERR(svn_io_make_dir_recursively(REPOSITORIES_WORK_DIR, pool));
+ SVN_ERR(svn_io_make_dir_recursively(WCS_WORK_DIR, pool));
+
+ /* Create a repos. Register it for clean-up. Set *REPOS_URL to its path. */
+ {
+ /* Use a subpool to create the repository and then destroy the subpool
+ so the repository's underlying filesystem is closed. If opts->fs_type
+ is BDB this prevents any attempt to open a second environment handle
+ within the same process when we checkout the WC below. BDB 4.4+ allows
+ only a single environment handle to be open per process. */
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_test__create_repos2(NULL, repos_url, repos_dir, repos_path,
+ opts, pool, subpool));
+ svn_pool_destroy(subpool);
+ }
+
+ /* Create a WC. Set *WC_ABSPATH to its path. */
+ {
+ apr_pool_t *subpool = svn_pool_create(pool); /* To cleanup CTX */
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t head_rev = { svn_opt_revision_head, {0} };
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, NULL, subpool));
+ SVN_ERR(svn_dirent_get_absolute(wc_abspath, wc_path, pool));
+ SVN_ERR(svn_client_checkout3(NULL, *repos_url, *wc_abspath,
+ &head_rev, &head_rev, svn_depth_infinity,
+ FALSE /* ignore_externals */,
+ FALSE /* allow_unver_obstructions */,
+ ctx, subpool));
+ svn_pool_destroy(subpool);
+ }
+
+ /* Register this WC for cleanup. */
+ svn_test_add_dir_cleanup(*wc_abspath);
+
+ return SVN_NO_ERROR;
+}
+
+WC_QUERIES_SQL_DECLARE_STATEMENTS(statements);
+
+svn_error_t *
+svn_test__create_fake_wc(const char *wc_abspath,
+ const char *extra_statements,
+ const svn_test__nodes_data_t nodes[],
+ const svn_test__actual_data_t actuals[],
+
+ apr_pool_t *scratch_pool)
+{
+ const char *dotsvn_abspath = svn_dirent_join(wc_abspath, ".svn",
+ scratch_pool);
+ svn_sqlite__db_t *sdb;
+ const char **my_statements;
+ int i;
+ svn_sqlite__stmt_t *stmt;
+ const apr_int64_t wc_id = 1;
+
+ /* Allocate MY_STATEMENTS in RESULT_POOL because the SDB will continue to
+ * refer to it over its lifetime. */
+ my_statements = apr_palloc(scratch_pool, 7 * sizeof(const char *));
+ my_statements[0] = statements[STMT_CREATE_SCHEMA];
+ my_statements[1] = statements[STMT_INSTALL_SCHEMA_STATISTICS];
+ my_statements[2] = extra_statements;
+ my_statements[3] = NULL;
+
+ /* Create fake-wc/SUBDIR/.svn/ for placing the metadata. */
+ SVN_ERR(svn_io_make_dir_recursively(dotsvn_abspath, scratch_pool));
+ SVN_ERR(svn_wc__db_util_open_db(&sdb, wc_abspath, "wc.db",
+ svn_sqlite__mode_rwcreate,
+ FALSE /* exclusive */, 0 /* timeout */,
+ my_statements,
+ scratch_pool, scratch_pool));
+ for (i = 0; my_statements[i] != NULL; i++)
+ SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ i));
+
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ if (!nodes && !actuals)
+ return SVN_NO_ERROR;
+
+ /* Re-open with normal set of statements */
+ SVN_ERR(svn_wc__db_util_open_db(&sdb, wc_abspath, "wc.db",
+ svn_sqlite__mode_readwrite,
+ FALSE /* exclusive */, 0 /* timeout */,
+ statements,
+ scratch_pool, scratch_pool));
+
+ if (nodes)
+ {
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb,
+ STMT_INSERT_NODE));
+
+ for (i = 0; nodes[i].local_relpath; i++)
+ {
+ SVN_ERR(svn_sqlite__bindf(stmt, "isdsnnns",
+ wc_id,
+ nodes[i].local_relpath,
+ nodes[i].op_depth,
+ nodes[i].local_relpath[0]
+ ? svn_relpath_dirname(nodes[i].local_relpath,
+ scratch_pool)
+ : NULL,
+ nodes[i].presence));
+
+ if (nodes[i].repos_relpath)
+ {
+ SVN_ERR(svn_sqlite__bind_int64(stmt, 5, nodes[i].repos_id));
+ SVN_ERR(svn_sqlite__bind_text(stmt, 6, nodes[i].repos_relpath));
+ SVN_ERR(svn_sqlite__bind_revnum(stmt, 7, nodes[i].revision));
+ }
+
+ if (nodes[i].depth)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 9, nodes[i].depth));
+
+ if (nodes[i].kind != 0)
+ SVN_ERR(svn_sqlite__bind_token(stmt, 10, kind_map, nodes[i].kind));
+
+ if (nodes[i].last_author || nodes[i].last_date)
+ {
+ SVN_ERR(svn_sqlite__bind_revnum(stmt, 11, nodes[i].last_revision));
+ SVN_ERR(svn_sqlite__bind_int64(stmt, 12, nodes[i].last_date));
+ SVN_ERR(svn_sqlite__bind_text(stmt, 13, nodes[i].last_author));
+ }
+
+ if (nodes[i].checksum)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 14, nodes[i].checksum));
+
+ if (nodes[i].properties)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 15, nodes[i].properties));
+
+ if (nodes[i].recorded_size || nodes[i].recorded_time)
+ {
+ SVN_ERR(svn_sqlite__bind_int64(stmt, 16, nodes[i].recorded_size));
+ SVN_ERR(svn_sqlite__bind_int64(stmt, 17, nodes[i].recorded_time));
+ }
+
+ /* 18 is DAV cache */
+
+ if (nodes[i].symlink_target)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 19, nodes[i].symlink_target));
+
+ if (nodes[i].file_external)
+ SVN_ERR(svn_sqlite__bind_int(stmt, 20, 1));
+
+ if (nodes[i].moved_to)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 21, nodes[i].moved_to));
+
+ if (nodes[i].moved_here)
+ SVN_ERR(svn_sqlite__bind_int(stmt, 22, 1));
+
+ if (nodes[i].inherited_props)
+ SVN_ERR(svn_sqlite__bind_text(stmt, 23, nodes[i].inherited_props));
+
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ }
+ }
+
+ if (actuals)
+ {
+ SVN_ERR(svn_sqlite__get_statement(&stmt, sdb,
+ STMT_INSERT_ACTUAL_NODE));
+
+ for (i = 0; actuals[i].local_relpath; i++)
+ {
+ SVN_ERR(svn_sqlite__bindf(stmt, "isssss",
+ wc_id,
+ actuals[i].local_relpath,
+ actuals[i].local_relpath[0]
+ ? svn_relpath_dirname(actuals[i].local_relpath,
+ scratch_pool)
+ : NULL,
+ actuals[i].properties,
+ actuals[i].changelist,
+ actuals[i].conflict_data));
+
+ SVN_ERR(svn_sqlite__step_done(stmt));
+ }
+ }
+
+ SVN_ERR(svn_sqlite__close(sdb));
+
+ return SVN_NO_ERROR;
+}
+
+
+svn_error_t *
+svn_test__sandbox_create(svn_test__sandbox_t *sandbox,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ sandbox->pool = pool;
+ SVN_ERR(create_repos_and_wc(&sandbox->repos_url, &sandbox->repos_dir,
+ &sandbox->wc_abspath,
+ test_name, opts, pool));
+ SVN_ERR(svn_wc_context_create(&sandbox->wc_ctx, NULL, pool, pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_file_write(svn_test__sandbox_t *b, const char *path, const char *text)
+{
+ apr_file_t *f;
+
+ SVN_ERR(svn_io_file_open(&f, sbox_wc_path(b, path),
+ (APR_WRITE | APR_CREATE | APR_TRUNCATE),
+ APR_OS_DEFAULT,
+ b->pool));
+
+ SVN_ERR(svn_io_file_write_full(f, text, strlen(text), NULL, b->pool));
+
+ SVN_ERR(svn_io_file_close(f, b->pool));
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_add(svn_test__sandbox_t *b, const char *path)
+{
+ const char *parent_abspath;
+
+ path = sbox_wc_path(b, path);
+ parent_abspath = svn_dirent_dirname(path, b->pool);
+ SVN_ERR(svn_wc__acquire_write_lock(NULL, b->wc_ctx, parent_abspath, FALSE,
+ b->pool, b->pool));
+ SVN_ERR(svn_wc_add_from_disk3(b->wc_ctx, path, NULL /*props*/,
+ FALSE /* skip checks */,
+ NULL, NULL, b->pool));
+ SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, parent_abspath, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_disk_mkdir(svn_test__sandbox_t *b, const char *path)
+{
+ path = sbox_wc_path(b, path);
+ SVN_ERR(svn_io_dir_make(path, APR_FPROT_OS_DEFAULT, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_mkdir(svn_test__sandbox_t *b, const char *path)
+{
+ SVN_ERR(sbox_disk_mkdir(b, path));
+ SVN_ERR(sbox_wc_add(b, path));
+ return SVN_NO_ERROR;
+}
+
+#if 0 /* not used */
+/* Copy the file or directory tree FROM_PATH to TO_PATH which must not exist
+ * beforehand. */
+svn_error_t *
+sbox_disk_copy(svn_test__sandbox_t *b, const char *from_path, const char *to_path)
+{
+ const char *to_dir, *to_name;
+
+ from_path = sbox_wc_path(b, from_path);
+ to_path = sbox_wc_path(b, to_path);
+ svn_dirent_split(&to_dir, &to_name, to_path, b->pool);
+ return svn_io_copy_dir_recursively(from_path, to_dir, to_name,
+ FALSE, NULL, NULL, b->pool);
+}
+#endif
+
+svn_error_t *
+sbox_wc_copy(svn_test__sandbox_t *b, const char *from_path, const char *to_path)
+{
+ const char *parent_abspath;
+
+ from_path = sbox_wc_path(b, from_path);
+ to_path = sbox_wc_path(b, to_path);
+ parent_abspath = svn_dirent_dirname(to_path, b->pool);
+ SVN_ERR(svn_wc__acquire_write_lock(NULL, b->wc_ctx, parent_abspath, FALSE,
+ b->pool, b->pool));
+ SVN_ERR(svn_wc_copy3(b->wc_ctx, from_path, to_path, FALSE,
+ NULL, NULL, NULL, NULL, b->pool));
+ SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, parent_abspath, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_copy_url(svn_test__sandbox_t *b, const char *from_url,
+ svn_revnum_t revision, const char *to_path)
+{
+ apr_pool_t *scratch_pool = b->pool;
+ svn_client_ctx_t *ctx;
+ svn_opt_revision_t rev = { svn_opt_revision_unspecified, {0} };
+ svn_client_copy_source_t* src;
+ apr_array_header_t *sources = apr_array_make(
+ scratch_pool, 1,
+ sizeof(svn_client_copy_source_t *));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool));
+
+ if (SVN_IS_VALID_REVNUM(revision))
+ {
+ rev.kind = svn_opt_revision_number;
+ rev.value.number = revision;
+ }
+
+ src = apr_pcalloc(scratch_pool, sizeof(*src));
+
+ src->path = from_url;
+ src->revision = &rev;
+ src->peg_revision = &rev;
+
+ APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = src;
+
+ SVN_ERR(svn_client_copy7(sources, sbox_wc_path(b, to_path),
+ FALSE /* copy_as_child */,
+ FALSE /* make_parents */,
+ FALSE /* ignore_externals */,
+ FALSE /* metadata_only */,
+ FALSE, NULL /* pin_external */,
+ NULL /* revprops */,
+ NULL, NULL, /* commit_callback */
+ ctx, scratch_pool));
+
+ ctx->wc_ctx = NULL;
+
+ return SVN_NO_ERROR;
+}
+
+
+svn_error_t *
+sbox_wc_revert(svn_test__sandbox_t *b, const char *path, svn_depth_t depth)
+{
+ const char *abspath = sbox_wc_path(b, path);
+ const char *dir_abspath;
+ const char *lock_root_abspath;
+
+ if (strcmp(abspath, b->wc_abspath))
+ dir_abspath = svn_dirent_dirname(abspath, b->pool);
+ else
+ dir_abspath = abspath;
+
+ SVN_ERR(svn_wc__acquire_write_lock(&lock_root_abspath, b->wc_ctx,
+ dir_abspath, FALSE /* lock_anchor */,
+ b->pool, b->pool));
+ SVN_ERR(svn_wc_revert5(b->wc_ctx, abspath, depth,
+ FALSE /* use_commit_times */,
+ NULL /* changelist_filter */,
+ FALSE /* clear_changelists */,
+ FALSE /* metadata_only */,
+ NULL, NULL, /* cancel baton + func */
+ NULL, NULL, /* notify baton + func */
+ b->pool));
+ SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, lock_root_abspath, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_delete(svn_test__sandbox_t *b, const char *path)
+{
+ const char *abspath = sbox_wc_path(b, path);
+ const char *dir_abspath = svn_dirent_dirname(abspath, b->pool);
+ const char *lock_root_abspath;
+
+ SVN_ERR(svn_wc__acquire_write_lock(&lock_root_abspath, b->wc_ctx,
+ dir_abspath, FALSE,
+ b->pool, b->pool));
+ SVN_ERR(svn_wc_delete4(b->wc_ctx, abspath, FALSE, TRUE,
+ NULL, NULL, /* cancel baton + func */
+ NULL, NULL, /* notify baton + func */
+ b->pool));
+ SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, lock_root_abspath, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_exclude(svn_test__sandbox_t *b, const char *path)
+{
+ const char *abspath = sbox_wc_path(b, path);
+ const char *lock_root_abspath;
+
+ SVN_ERR(svn_wc__acquire_write_lock(&lock_root_abspath, b->wc_ctx,
+ abspath, TRUE,
+ b->pool, b->pool));
+ SVN_ERR(svn_wc_exclude(b->wc_ctx, abspath,
+ NULL, NULL, /* cancel baton + func */
+ NULL, NULL, /* notify baton + func */
+ b->pool));
+ SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, lock_root_abspath, b->pool));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_commit_ex(svn_test__sandbox_t *b,
+ apr_array_header_t *targets,
+ svn_depth_t depth)
+{
+ svn_client_ctx_t *ctx;
+ apr_pool_t *scratch_pool = svn_pool_create(b->pool);
+ svn_error_t *err;
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool));
+
+ /* A successfull commit doesn't close the ra session, but leaves that
+ to the caller. This leaves the BDB handle open, which might cause
+ problems in further test code. (op_depth_tests.c's repo_wc_copy) */
+ err = svn_client_commit6(targets, depth,
+ FALSE /* keep_locks */,
+ FALSE /* keep_changelist */,
+ TRUE /* commit_as_operations */,
+ TRUE /* include_file_externals */,
+ FALSE /* include_dir_externals */,
+ NULL, NULL, NULL, NULL, ctx, scratch_pool);
+
+ svn_pool_destroy(scratch_pool);
+
+ return svn_error_trace(err);
+}
+
+svn_error_t *
+sbox_wc_commit(svn_test__sandbox_t *b, const char *path)
+{
+ apr_array_header_t *targets = apr_array_make(b->pool, 1,
+ sizeof(const char *));
+
+ APR_ARRAY_PUSH(targets, const char *) = sbox_wc_path(b, path);
+ return sbox_wc_commit_ex(b, targets, svn_depth_infinity);
+}
+
+svn_error_t *
+sbox_wc_update_depth(svn_test__sandbox_t *b,
+ const char *path,
+ svn_revnum_t revnum,
+ svn_depth_t depth,
+ svn_boolean_t sticky)
+{
+ svn_client_ctx_t *ctx;
+ apr_array_header_t *result_revs;
+ apr_array_header_t *paths = apr_array_make(b->pool, 1,
+ sizeof(const char *));
+ svn_opt_revision_t revision;
+
+ if (SVN_IS_VALID_REVNUM(revnum))
+ {
+ revision.kind = svn_opt_revision_number;
+ revision.value.number = revnum;
+ }
+ else
+ {
+ revision.kind = svn_opt_revision_head;
+ }
+
+ APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, path);
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ /* Note: Tree conflict resolver tests for libsvn_client depend on this
+ * passing FALSE for adds_as_modifications so that tree conflicts are
+ * created in case of add vs add upon update. */
+ return svn_client_update4(&result_revs, paths, &revision, depth,
+ sticky, FALSE, FALSE, FALSE, FALSE,
+ ctx, b->pool);
+}
+
+svn_error_t *
+sbox_wc_update(svn_test__sandbox_t *b, const char *path, svn_revnum_t revnum)
+{
+ SVN_ERR(sbox_wc_update_depth(b, path, revnum, svn_depth_unknown, FALSE));
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_wc_switch(svn_test__sandbox_t *b,
+ const char *path,
+ const char *url,
+ svn_depth_t depth)
+{
+ svn_client_ctx_t *ctx;
+ svn_revnum_t result_rev;
+ svn_opt_revision_t head_rev = { svn_opt_revision_head, {0} };
+
+ url = apr_pstrcat(b->pool, b->repos_url, url, SVN_VA_NULL);
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ return svn_client_switch3(&result_rev, sbox_wc_path(b, path), url,
+ &head_rev, &head_rev, depth,
+ FALSE /* depth_is_sticky */,
+ TRUE /* ignore_externals */,
+ FALSE /* allow_unver_obstructions */,
+ TRUE /* ignore_ancestry */,
+ ctx, b->pool);
+}
+
+svn_error_t *
+sbox_wc_resolved(svn_test__sandbox_t *b, const char *path)
+{
+ return sbox_wc_resolve(b, path, svn_depth_infinity,
+ svn_wc_conflict_choose_merged);
+}
+
+svn_error_t *
+sbox_wc_resolve(svn_test__sandbox_t *b, const char *path, svn_depth_t depth,
+ svn_wc_conflict_choice_t conflict_choice)
+{
+ const char *lock_abspath;
+ svn_error_t *err;
+
+ SVN_ERR(svn_wc__acquire_write_lock_for_resolve(&lock_abspath, b->wc_ctx,
+ sbox_wc_path(b, path),
+ b->pool, b->pool));
+ err = svn_wc__resolve_conflicts(b->wc_ctx, sbox_wc_path(b, path),
+ depth,
+ TRUE /* resolve_text */,
+ "" /* resolve_prop (ALL props) */,
+ TRUE /* resolve_tree */,
+ conflict_choice,
+ NULL, NULL, /* conflict func */
+ NULL, NULL, /* cancellation */
+ NULL, NULL, /* notification */
+ b->pool);
+
+ err = svn_error_compose_create(err, svn_wc__release_write_lock(b->wc_ctx,
+ lock_abspath,
+ b->pool));
+ return err;
+}
+
+svn_error_t *
+sbox_wc_resolve_prop(svn_test__sandbox_t *b, const char *path,
+ const char *propname,
+ svn_wc_conflict_choice_t conflict_choice)
+{
+ const char *lock_abspath;
+ svn_error_t *err;
+
+ SVN_ERR(svn_wc__acquire_write_lock_for_resolve(&lock_abspath, b->wc_ctx,
+ sbox_wc_path(b, path),
+ b->pool, b->pool));
+ err = svn_wc__resolve_conflicts(b->wc_ctx, sbox_wc_path(b, path),
+ svn_depth_empty,
+ FALSE,
+ propname,
+ FALSE,
+ conflict_choice,
+ NULL, NULL, /* conflict func */
+ NULL, NULL, /* cancellation */
+ NULL, NULL, /* notification */
+ b->pool);
+
+ err = svn_error_compose_create(err, svn_wc__release_write_lock(b->wc_ctx,
+ lock_abspath,
+ b->pool));
+ return err;
+}
+
+
+svn_error_t *
+sbox_wc_move(svn_test__sandbox_t *b, const char *src, const char *dst)
+{
+ svn_client_ctx_t *ctx;
+ apr_array_header_t *paths = apr_array_make(b->pool, 1,
+ sizeof(const char *));
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, src);
+ return svn_client_move7(paths, sbox_wc_path(b, dst),
+ FALSE /* move_as_child */,
+ FALSE /* make_parents */,
+ TRUE /* allow_mixed_revisions */,
+ FALSE /* metadata_only */,
+ NULL /* revprop_table */,
+ NULL, NULL, /* commit callback */
+ ctx, b->pool);
+}
+
+svn_error_t *
+sbox_wc_propset(svn_test__sandbox_t *b,
+ const char *name,
+ const char *value,
+ const char *path)
+{
+ svn_client_ctx_t *ctx;
+ apr_array_header_t *paths = apr_array_make(b->pool, 1,
+ sizeof(const char *));
+ svn_string_t *pval = value ? svn_string_create(value, b->pool) : NULL;
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool));
+
+ APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, path);
+ return svn_client_propset_local(name, pval, paths, svn_depth_empty,
+ TRUE /* skip_checks */,
+ NULL, ctx, b->pool);
+}
+
+svn_error_t *
+sbox_wc_relocate(svn_test__sandbox_t *b,
+ const char *new_repos_url)
+{
+ apr_pool_t *scratch_pool = b->pool;
+ svn_client_ctx_t *ctx;
+
+ SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool));
+
+ SVN_ERR(svn_client_relocate2(b->wc_abspath, b->repos_url,
+ new_repos_url, FALSE, ctx,scratch_pool));
+
+ b->repos_url = apr_pstrdup(b->pool, new_repos_url);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+sbox_add_and_commit_greek_tree(svn_test__sandbox_t *b)
+{
+ const struct svn_test__tree_entry_t *node;
+
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ {
+ if (node->contents)
+ {
+ SVN_ERR(sbox_file_write(b, node->path, node->contents));
+ SVN_ERR(sbox_wc_add(b, node->path));
+ }
+ else
+ {
+ SVN_ERR(sbox_wc_mkdir(b, node->path));
+ }
+ }
+
+ SVN_ERR(sbox_wc_commit(b, ""));
+
+ return SVN_NO_ERROR;
+}
diff --git a/subversion/tests/libsvn_wc/utils.h b/subversion/tests/libsvn_wc/utils.h
new file mode 100644
index 0000000..260139d
--- /dev/null
+++ b/subversion/tests/libsvn_wc/utils.h
@@ -0,0 +1,244 @@
+/* utils.h --- wc/client test utilities
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifndef SVN_TEST_UTILS_H
+#define SVN_TEST_UTILS_H
+
+#include <apr_pools.h>
+#include "svn_error.h"
+#include "svn_client.h"
+
+#include "../svn_test.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*-------------------------------------------------------------------*/
+
+/** Helper routines for creating repositories and WCs. **/
+
+
+#define REPOSITORIES_WORK_DIR "svn-test-work/repositories"
+#define WCS_WORK_DIR "svn-test-work/working-copies"
+
+
+/* The "sandbox" is a work space including a working copy and a repository.
+ * Functions are provided for easy manipulation of the WC. Paths given to
+ * these functions can be relative to the WC root as stored in the sandbox
+ * object, or can be absolute paths. */
+
+/* An object holding the state of a test sand-box. */
+typedef struct svn_test__sandbox_t
+{
+ /* The WC context object. */
+ svn_wc_context_t *wc_ctx;
+ /* The repository URL. */
+ const char *repos_url;
+ /* Local path to the repository */
+ const char *repos_dir;
+ /* The absolute local path of the WC root. */
+ const char *wc_abspath;
+ /* A pool that can be used for all allocations. */
+ apr_pool_t *pool;
+} svn_test__sandbox_t;
+
+
+/* Create an empty repository and WC for the test TEST_NAME. Fill in
+ * *SANDBOX with all the details.
+ *
+ * Create the repository and WC in subdirectories called
+ * REPOSITORIES_WORK_DIR/TEST_NAME and WCS_WORK_DIR/TEST_NAME respectively,
+ * within the current working directory.
+ *
+ * Register the repo and WC to be cleaned up when the test suite exits. */
+svn_error_t *
+svn_test__sandbox_create(svn_test__sandbox_t *sandbox,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+/* ---------------------------------------------------------------------- */
+/* Functions for easy manipulation of a WC. Paths given to these functions
+ * can be relative to the WC root as stored in the WC baton. */
+
+/* Return the abspath of PATH which is absolute or relative to the WC in B. */
+#define sbox_wc_path(b, path) \
+ (svn_dirent_join((b)->wc_abspath, (path), (b)->pool))
+
+/* Create a file on disk at PATH, with TEXT as its content. */
+svn_error_t *
+sbox_file_write(svn_test__sandbox_t *b, const char *path, const char *text);
+
+/* Schedule for addition the single node that exists on disk at PATH,
+ * non-recursively. */
+svn_error_t *
+sbox_wc_add(svn_test__sandbox_t *b, const char *path);
+
+/* Create a single directory on disk. */
+svn_error_t *
+sbox_disk_mkdir(svn_test__sandbox_t *b, const char *path);
+
+/* Create a single directory on disk and schedule it for addition. */
+svn_error_t *
+sbox_wc_mkdir(svn_test__sandbox_t *b, const char *path);
+
+/* Copy the WC file or directory tree FROM_PATH to TO_PATH which must not
+ * exist beforehand. */
+svn_error_t *
+sbox_wc_copy(svn_test__sandbox_t *b, const char *from_path, const char *to_path);
+
+svn_error_t *
+sbox_wc_copy_url(svn_test__sandbox_t *b, const char *from_url,
+ svn_revnum_t revision, const char *to_path);
+
+svn_error_t *
+sbox_wc_relocate(svn_test__sandbox_t *b,
+ const char *new_repos_url);
+
+/* Revert a WC file or directory tree at PATH */
+svn_error_t *
+sbox_wc_revert(svn_test__sandbox_t *b, const char *path, svn_depth_t depth);
+
+/* */
+svn_error_t *
+sbox_wc_delete(svn_test__sandbox_t *b, const char *path);
+
+/* */
+svn_error_t *
+sbox_wc_exclude(svn_test__sandbox_t *b, const char *path);
+
+/* */
+svn_error_t *
+sbox_wc_commit(svn_test__sandbox_t *b, const char *path);
+
+/* */
+svn_error_t *
+sbox_wc_commit_ex(svn_test__sandbox_t *b,
+ apr_array_header_t *targets,
+ svn_depth_t depth);
+
+/* */
+svn_error_t *
+sbox_wc_update(svn_test__sandbox_t *b, const char *path, svn_revnum_t revnum);
+
+svn_error_t *
+sbox_wc_update_depth(svn_test__sandbox_t *b,
+ const char *path,
+ svn_revnum_t revnum,
+ svn_depth_t depth,
+ svn_boolean_t sticky);
+
+svn_error_t *
+sbox_wc_switch(svn_test__sandbox_t *b,
+ const char *path,
+ const char *url,
+ svn_depth_t depth);
+
+/* */
+svn_error_t *
+sbox_wc_resolved(svn_test__sandbox_t *b, const char *path);
+
+/* */
+svn_error_t *
+sbox_wc_resolve(svn_test__sandbox_t *b, const char *path, svn_depth_t depth,
+ svn_wc_conflict_choice_t conflict_choice);
+
+/* */
+svn_error_t *
+sbox_wc_resolve_prop(svn_test__sandbox_t *b, const char *path,
+ const char *propname,
+ svn_wc_conflict_choice_t conflict_choice);
+
+/* */
+svn_error_t *
+sbox_wc_move(svn_test__sandbox_t *b, const char *src, const char *dst);
+
+/* Set property NAME to VALUE on PATH. If VALUE=NULL, delete the property. */
+svn_error_t *
+sbox_wc_propset(svn_test__sandbox_t *b,
+ const char *name,
+ const char *value,
+ const char *path);
+
+/* Create the Greek tree on disk in the WC, and commit it. */
+svn_error_t *
+sbox_add_and_commit_greek_tree(svn_test__sandbox_t *b);
+
+/* Initial data to store in NODES */
+typedef struct svn_test__nodes_data_t
+{
+ int op_depth;
+ const char *local_relpath;
+ const char *presence;
+ int repos_id;
+ const char *repos_relpath;
+ svn_revnum_t revision;
+ svn_boolean_t moved_here;
+ const char *moved_to;
+ svn_node_kind_t kind;
+ const char *properties;
+ const char *depth;
+ const char *checksum;
+ const char *symlink_target;
+ svn_revnum_t last_revision;
+ apr_time_t last_date;
+ const char *last_author;
+ svn_boolean_t file_external;
+ const char *inherited_props;
+ svn_filesize_t recorded_size;
+ apr_time_t recorded_time;
+} svn_test__nodes_data_t;
+
+/* Initial data to store in ACTUAL */
+typedef struct svn_test__actual_data_t
+{
+ const char *local_relpath;
+ const char *properties;
+ const char *changelist;
+ const char *conflict_data;
+} svn_test__actual_data_t;
+
+/* Create a WC directory at WC_ABSPATH containing a fake WC DB, generated by
+ * executing the SQL statements EXTRA_STATEMENTS in addition to the standard
+ * WC DB schema. */
+svn_error_t *
+svn_test__create_fake_wc(const char *wc_abspath,
+ const char *extra_statements,
+ const svn_test__nodes_data_t nodes[],
+ const svn_test__actual_data_t actuals[],
+ apr_pool_t *scratch_pool);
+
+
+/* Create a client context for the specified sandbox */
+svn_error_t *
+svn_test__create_client_ctx(svn_client_ctx_t **ctx,
+ svn_test__sandbox_t *sbox,
+ apr_pool_t *result_pool);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVN_TEST_UTILS_H */
diff --git a/subversion/tests/libsvn_wc/wc-incomplete-tester.c b/subversion/tests/libsvn_wc/wc-incomplete-tester.c
new file mode 100644
index 0000000..2aac605
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-incomplete-tester.c
@@ -0,0 +1,97 @@
+/*
+ * wc-incomplete-tester.c : mark a directory incomplete at a given revision
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_types.h"
+#include "svn_path.h"
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#include <stdio.h>
+#include <stdlib.h>
+
+static svn_error_t *
+incomplete(const char *wc_path,
+ const char *rev_str,
+ const char *repos_relpath,
+ apr_pool_t *pool)
+{
+ svn_wc_context_t *wc_ctx;
+ const char *local_abspath;
+ apr_int64_t revnum;
+
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+
+ SVN_ERR(svn_path_cstring_to_utf8(&wc_path, wc_path, pool));
+ wc_path = svn_dirent_canonicalize(wc_path, pool);
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, wc_path, pool));
+
+ SVN_ERR(svn_cstring_atoi64(&revnum, rev_str));
+
+ if (repos_relpath)
+ repos_relpath = svn_relpath_canonicalize(repos_relpath, pool);
+ else
+ SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL,
+ &repos_relpath,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ wc_ctx->db, local_abspath, pool, pool));
+
+ SVN_ERR(svn_wc__db_temp_op_start_directory_update(wc_ctx->db,
+ local_abspath,
+ repos_relpath,
+ (svn_revnum_t)revnum,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+int main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+
+ if (argc != 3 && argc != 4)
+ {
+ fprintf(stderr,
+ "Usage: wc-incomplete-tester WCPATH REVISION [REPOS_RELPATH]\n"
+ "Mark WCPATH incomplete at REVISION [and REPOS_RELPATH]\n");
+ exit(EXIT_FAILURE);
+ }
+
+ if (apr_initialize())
+ {
+ fprintf(stderr, "apr_initialize failed\n");
+ exit(EXIT_FAILURE);
+ }
+ pool = svn_pool_create(NULL);
+
+ err = incomplete(argv[1], argv[2], (argc == 4 ? argv[3] : NULL), pool);
+ if (err)
+ svn_handle_error2(err, stderr, TRUE, "wc-incomplete-tester: ");
+
+ svn_pool_destroy(pool);
+ apr_terminate();
+
+ return EXIT_SUCCESS;
+}
diff --git a/subversion/tests/libsvn_wc/wc-lock-tester.c b/subversion/tests/libsvn_wc/wc-lock-tester.c
new file mode 100644
index 0000000..1daee66
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-lock-tester.c
@@ -0,0 +1,136 @@
+/*
+ * wc-lock-tester.c : wrapper around svn_wc__acquire_write_lock()
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+
+#include "svn_types.h"
+#include "svn_pools.h"
+
+#include "svn_cmdline.h"
+#include "svn_dirent_uri.h"
+#include "svn_path.h"
+#include "svn_wc.h"
+
+#include "private/svn_wc_private.h"
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#include "../../libsvn_wc/workqueue.h"
+
+#include "svn_private_config.h"
+
+#define USAGE_MSG \
+ "Usage: %s [-1|-r|-w] DIRNAME\n" \
+ "\n" \
+ "Locks one directory (-1), or a tree recursively (-r), or locks\n" \
+ "recursively and creates an outstanding work queue item (-w)\n"
+
+static svn_error_t *
+obtain_lock(const char *path, svn_boolean_t recursive,
+ svn_boolean_t populate_work_queue,
+ apr_pool_t *scratch_pool)
+{
+ const char *local_abspath;
+ svn_wc_context_t *wc_ctx;
+
+ SVN_ERR(svn_path_cstring_to_utf8(&path, path, scratch_pool));
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, scratch_pool));
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, scratch_pool, scratch_pool));
+
+ if (recursive)
+ {
+ /* The WC-NG way */
+ SVN_ERR(svn_wc__acquire_write_lock(NULL, wc_ctx, local_abspath, FALSE,
+ scratch_pool, scratch_pool));
+ }
+ else
+ {
+ SVN_ERR(svn_wc__db_wclock_obtain(wc_ctx->db, local_abspath, 0, FALSE,
+ scratch_pool));
+ }
+
+ if (populate_work_queue)
+ {
+ svn_skel_t *work_item;
+
+ /* Add an arbitrary work item to the work queue for DB, but don't
+ * run the work queue. */
+ SVN_ERR(svn_wc__wq_build_sync_file_flags(&work_item, wc_ctx->db,
+ local_abspath, scratch_pool,
+ scratch_pool));
+ SVN_ERR(svn_wc__db_wq_add(wc_ctx->db, local_abspath, work_item,
+ scratch_pool));
+ }
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "Lock on '%s' obtained, and we "
+ "are not going to release it.\n",
+ svn_dirent_local_style(local_abspath,
+ scratch_pool)));
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+ svn_boolean_t recursive;
+ svn_boolean_t populate_work_queue;
+
+ if (argc != 3
+ || (strcmp(argv[1], "-1") && apr_strnatcmp(argv[1], "-r") &&
+ apr_strnatcmp(argv[1], "-w")))
+ {
+ fprintf(stderr, USAGE_MSG, argv[0]);
+ exit(EXIT_FAILURE);
+ }
+
+ if (apr_initialize() != APR_SUCCESS)
+ {
+ fprintf(stderr, "apr_initialize() failed.\n");
+ exit(1);
+ }
+
+ /* set up the global pool */
+ pool = svn_pool_create(NULL);
+
+ populate_work_queue = (strcmp(argv[1], "-w") == 0);
+ recursive = ((strcmp(argv[1], "-1") != 0) || populate_work_queue);
+
+ err = obtain_lock(argv[2], recursive, populate_work_queue, pool);
+
+ if (err)
+ {
+ svn_handle_error2(err, stderr, FALSE, "wc-lock-tester: ");
+ svn_error_clear(err);
+ exit_code = EXIT_FAILURE;
+ }
+
+ /* Clean up, and get outta here */
+ svn_pool_destroy(pool);
+ apr_terminate();
+
+ return exit_code;
+}
diff --git a/subversion/tests/libsvn_wc/wc-queries-test.c b/subversion/tests/libsvn_wc/wc-queries-test.c
new file mode 100644
index 0000000..0a828c5
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-queries-test.c
@@ -0,0 +1,1068 @@
+/*
+ * wc-queries-test.c -- test the evaluation of the wc Sqlite queries
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_hash.h"
+#include "svn_ctype.h"
+#include "private/svn_dep_compat.h"
+
+#include "svn_private_config.h"
+
+#include "../svn_test.h"
+
+#ifdef SVN_SQLITE_INLINE
+/* Import the sqlite3 API vtable from sqlite3wrapper.c */
+# define SQLITE_OMIT_DEPRECATED
+# include <sqlite3ext.h>
+extern const sqlite3_api_routines *const svn_sqlite3__api_funcs;
+extern int (*const svn_sqlite3__api_initialize)(void);
+extern int (*const svn_sqlite3__api_config)(int, ...);
+# define sqlite3_api svn_sqlite3__api_funcs
+# define sqlite3_initialize svn_sqlite3__api_initialize
+# define sqlite3_config svn_sqlite3__api_config
+#else
+# include <sqlite3.h>
+#endif
+
+#include "../../libsvn_wc/wc-queries.h"
+
+WC_QUERIES_SQL_DECLARE_STATEMENTS(wc_queries);
+WC_QUERIES_SQL_DECLARE_STATEMENT_INFO(wc_query_info);
+
+/* The first query after the normal wc queries */
+#define STMT_SCHEMA_FIRST STMT_CREATE_SCHEMA
+
+#define SQLITE_ERR(x) do \
+{ \
+ int sqlite_err__temp = (x); \
+ if (sqlite_err__temp != SQLITE_OK) \
+ return svn_error_createf(SVN_ERR_SQLITE_ERROR, \
+ NULL, "sqlite: %s", \
+ sqlite3_errmsg(sdb)); \
+} while (0)
+
+/* Schema creation statements fail during preparing when the table
+ already exists, and must be evaluated before testing the
+ queries. Statements above STMT_SCHEMA_FIRST only need to be
+ included here when they need to be evaluated before testing the
+ statements */
+static const int schema_statements[] =
+{
+ /* Usual tables */
+ STMT_CREATE_SCHEMA,
+ STMT_INSTALL_SCHEMA_STATISTICS,
+ /* Memory tables */
+ STMT_CREATE_TARGETS_LIST,
+ STMT_CREATE_CHANGELIST_LIST,
+ STMT_CREATE_CHANGELIST_TRIGGER,
+ STMT_CREATE_TARGET_PROP_CACHE,
+ STMT_CREATE_REVERT_LIST,
+ STMT_CREATE_DELETE_LIST,
+ STMT_CREATE_UPDATE_MOVE_LIST,
+ -1 /* final marker */
+};
+
+/* These statements currently trigger warnings. It would be nice if
+ we could annotate these in wc-queries.sql */
+static const int slow_statements[] =
+{
+ /* Operate on the entire WC */
+ STMT_SELECT_ALL_NODES, /* schema validation code */
+
+ /* Updates all records for a repository (designed slow) */
+ STMT_UPDATE_LOCK_REPOS_ID,
+
+ /* Full temporary table read */
+ STMT_INSERT_ACTUAL_EMPTIES,
+ STMT_INSERT_ACTUAL_EMPTIES_FILES,
+ STMT_SELECT_REVERT_LIST_RECURSIVE,
+ STMT_SELECT_DELETE_LIST,
+ STMT_SELECT_UPDATE_MOVE_LIST,
+ STMT_FIND_REPOS_PATH_IN_WC,
+
+ /* Designed as slow to avoid penalty on other queries */
+ STMT_SELECT_UNREFERENCED_PRISTINES,
+
+ /* Slow, but just if foreign keys are enabled:
+ * STMT_DELETE_PRISTINE_IF_UNREFERENCED,
+ */
+ STMT_HAVE_STAT1_TABLE, /* Queries sqlite_master which has no index */
+
+ -1 /* final marker */
+};
+
+/* Statements that just read the first record from a table,
+ using the primary key. Specialized as different sqlite
+ versions produce different results */
+static const int primary_key_statements[] =
+{
+ /* Is there a record? ### Can we somehow check for LIMIT 1,
+ and primary key instead of adding a list? */
+ STMT_LOOK_FOR_WORK,
+ STMT_SELECT_WORK_ITEM,
+
+ -1 /* final marker */
+};
+
+/* Helper function to determine if a statement is in a list */
+static svn_boolean_t
+in_list(const int list[], int stmt_idx)
+{
+ int i;
+
+ for (i = 0; list[i] != -1; i++)
+ {
+ if (list[i] == stmt_idx)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/* Helpers to determine if a statement is in a common list */
+#define is_slow_statement(stmt_idx) in_list(slow_statements, stmt_idx)
+#define is_schema_statement(stmt_idx) \
+ ((stmt_idx >= STMT_SCHEMA_FIRST) || in_list(schema_statements, stmt_idx))
+
+
+/* Create an in-memory db for evaluating queries */
+static svn_error_t *
+create_memory_db(sqlite3 **db,
+ apr_pool_t *pool)
+{
+ sqlite3 *sdb;
+ int i;
+
+ /* Create an in-memory raw database */
+ SVN_TEST_ASSERT(sqlite3_initialize() == SQLITE_OK);
+ SQLITE_ERR(sqlite3_open_v2("", &sdb,
+ SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE,
+ NULL));
+
+ /* Create schema */
+ for (i = 0; schema_statements[i] != -1; i++)
+ {
+ SQLITE_ERR(sqlite3_exec(sdb, wc_queries[schema_statements[i]], NULL, NULL, NULL));
+ }
+
+ *db = sdb;
+ return SVN_NO_ERROR;
+}
+
+/* Verify sqlite3 runtime version */
+static svn_error_t *
+test_sqlite_version(apr_pool_t *scratch_pool)
+{
+ printf("DBG: Using Sqlite %s\n", sqlite3_libversion());
+
+ if (sqlite3_libversion_number() != SQLITE_VERSION_NUMBER)
+ printf("DBG: Compiled against Sqlite %s\n", SQLITE_VERSION);
+
+ if (sqlite3_libversion_number() < SQLITE_VERSION_NUMBER)
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Compiled against Sqlite %s (at runtime we have Sqlite %s)",
+ SQLITE_VERSION, sqlite3_libversion());
+
+#if !SQLITE_VERSION_AT_LEAST(3, 7, 9)
+ return svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Sqlite upgrade recommended:\n"
+ "****************************************************************\n"
+ "* Subversion needs at least SQLite 3.7.9 to work optimally *\n"
+ "* *\n"
+ "* With older versions, at least some queries that are expected *\n"
+ "* to be using an index are not. This makes some operations use *\n"
+ "* every node in the working copy instead of just one. *\n"
+ "* *\n"
+ "* While Subversion works correctly in this case, you may see *\n"
+ "* slowdowns of WELL MORE THAN 1000* in some cases! *\n"
+ "* *\n"
+ "* *\n"
+ "* SQLITE UPGRADE RECOMMENDED *\n"
+ "****************************************************************\n");
+#else
+ return SVN_NO_ERROR;
+#endif
+}
+
+/* Parse all normal queries */
+static svn_error_t *
+test_parsable(apr_pool_t *scratch_pool)
+{
+ sqlite3 *sdb;
+ int i;
+
+ SVN_ERR(create_memory_db(&sdb, scratch_pool));
+
+ for (i=0; i < STMT_SCHEMA_FIRST; i++)
+ {
+ sqlite3_stmt *stmt;
+ const char *text = wc_queries[i];
+
+ if (is_schema_statement(i))
+ continue;
+
+ /* Some of our statement texts contain multiple queries. We prepare
+ them all. */
+ while (*text != '\0')
+ {
+ const char *tail;
+ int r = sqlite3_prepare_v2(sdb, text, -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ return svn_error_createf(SVN_ERR_SQLITE_ERROR, NULL,
+ "Preparing %s failed: %s\n%s",
+ wc_query_info[i][0],
+ sqlite3_errmsg(sdb),
+ text);
+
+ SQLITE_ERR(sqlite3_finalize(stmt));
+
+ /* Continue after the current statement */
+ text = tail;
+ }
+ }
+
+ SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */
+
+ return SVN_NO_ERROR;
+}
+
+/* Contains a parsed record from EXPLAIN QUERY PLAN */
+struct explanation_item
+{
+ const char *operation;
+ const char *table;
+ const char *alias;
+ svn_boolean_t scan;
+ svn_boolean_t search;
+ svn_boolean_t covered_by_index;
+ svn_boolean_t primary_key;
+ svn_boolean_t automatic_index;
+ const char *index;
+ const char *expressions;
+ const char *expected;
+
+ const char *compound_left;
+ const char *compound_right;
+ svn_boolean_t create_btree;
+
+ int expression_vars;
+ int expected_rows;
+};
+
+#define MATCH_TOKEN(x, y) (x && (strcmp(x, y) == 0))
+
+/* Simple parser for the Sqlite textual explanation into an explanation_item.
+ Writes "DBG:" lines when sqlite produces unexpected results. When no
+ valid explanation_item can be parsed sets *PARSED_ITEM to NULL, otherwise
+ to a valid result. */
+static svn_error_t *
+parse_explanation_item(struct explanation_item **parsed_item,
+ const char *text,
+ apr_pool_t *result_pool)
+{
+ struct explanation_item *item = apr_pcalloc(result_pool, sizeof(*item));
+ char *token;
+ char *last;
+ char *tmp = apr_pstrdup(result_pool, text);
+ const char *tmp_end = &tmp[strlen(tmp)];
+
+ *parsed_item = NULL;
+
+ item->operation = apr_strtok(tmp, " ", &last);
+
+ if (!item->operation)
+ {
+ return SVN_NO_ERROR;
+ }
+
+ item->scan = MATCH_TOKEN(item->operation, "SCAN");
+
+ if (item->scan || MATCH_TOKEN(item->operation, "SEARCH"))
+ {
+ item->search = TRUE; /* Search or scan */
+ token = apr_strtok(NULL, " ", &last);
+
+ if (MATCH_TOKEN(token, "TABLE"))
+ {
+ item->table = apr_strtok(NULL, " ", &last);
+ }
+ else if (MATCH_TOKEN(token, "SUBQUERY"))
+ {
+ item->table = apr_psprintf(result_pool, "SUBQUERY-%s",
+ apr_strtok(NULL, " ", &last));
+ }
+ else
+ {
+ printf("DBG: Expected 'TABLE', got '%s' in '%s'\n", token, text);
+ return SVN_NO_ERROR; /* Nothing to parse */
+ }
+
+ token = apr_strtok(NULL, " ", &last);
+
+ /* Skip alias */
+ if (MATCH_TOKEN(token, "AS"))
+ {
+ item->alias = apr_strtok(NULL, " ", &last);
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ if (MATCH_TOKEN(token, "USING"))
+ {
+ token = apr_strtok(NULL, " ", &last);
+
+ if (MATCH_TOKEN(token, "AUTOMATIC"))
+ {
+ /* Pain: A temporary index is created */
+ item->automatic_index = TRUE;
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ /* Handle COVERING */
+ if (MATCH_TOKEN(token, "COVERING"))
+ {
+ /* Bonus: Query will be answered by just using the index */
+ item->covered_by_index = TRUE;
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ if (MATCH_TOKEN(token, "INDEX"))
+ {
+ item->index = apr_strtok(NULL, " ", &last);
+ }
+ else if (MATCH_TOKEN(token, "INTEGER"))
+ {
+ token = apr_strtok(NULL, " ", &last);
+ if (!MATCH_TOKEN(token, "PRIMARY"))
+ {
+ printf("DBG: Expected 'PRIMARY', got '%s' in '%s'\n",
+ token, text);
+ return SVN_NO_ERROR;
+ }
+
+ token = apr_strtok(NULL, " ", &last);
+ if (!MATCH_TOKEN(token, "KEY"))
+ {
+ printf("DBG: Expected 'KEY', got '%s' in '%s'\n",
+ token, text);
+ return SVN_NO_ERROR;
+ }
+
+ item->primary_key = TRUE;
+ }
+ else
+ {
+ printf("DBG: Expected 'INDEX' or 'PRIMARY', got '%s' in '%s'\n",
+ token, text);
+ return SVN_NO_ERROR;
+ }
+
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ if (token && token[0] == '(' && token[1] != '~')
+ {
+ /* Undo the tokenization to switch parser rules */
+ size_t token_len = strlen(token);
+
+ if (token + token_len < tmp_end)
+ token[token_len] = ' ';
+
+ if (token[token_len] == '\0')
+ last[-1] = ' ';
+
+ token++; /* Skip the '(' */
+
+ item->expressions = apr_strtok(token, ")", &last);
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ if (token && *token == '(' && token[1] == '~')
+ {
+ /* Undo the tokenization to switch parser rules */
+ size_t token_len = strlen(token);
+
+ if (token + token_len < tmp_end)
+ token[token_len] = ' ';
+
+ if (token[token_len] == '\0')
+ last[-1] = ' ';
+
+ token += 2; /* Skip "(~" */
+
+ item->expected = apr_strtok(token, ")", &last);
+ token = apr_strtok(NULL, " ", &last);
+ }
+
+ if (token)
+ {
+ printf("DBG: Unexpected token '%s' in '%s'\n",
+ token, text);
+ return SVN_NO_ERROR;
+ }
+
+ /* Parsing successful */
+ }
+ else if (MATCH_TOKEN(item->operation, "EXECUTE"))
+ {
+ /* Subquery handling */
+ return SVN_NO_ERROR;
+ }
+ else if (MATCH_TOKEN(item->operation, "COMPOUND"))
+ {
+ /* Handling temporary table (E.g. UNION) */
+
+ token = apr_strtok(NULL, " ", &last);
+ if (!MATCH_TOKEN(token, "SUBQUERIES"))
+ {
+ printf("DBG: Expected 'SUBQUERIES', got '%s' in '%s'\n", token,
+ text);
+ return SVN_NO_ERROR;
+ }
+
+ item->compound_left = apr_strtok(NULL, " ", &last);
+ token = apr_strtok(NULL, " ", &last);
+
+ if (!MATCH_TOKEN(token, "AND"))
+ {
+ printf("DBG: Expected 'AND', got '%s' in '%s'\n", token, text);
+ return SVN_NO_ERROR;
+ }
+
+ item->compound_right = apr_strtok(NULL, " ", &last);
+
+ token = apr_strtok(NULL, " ", &last);
+ if (MATCH_TOKEN(token, "USING"))
+ {
+ token = apr_strtok(NULL, " ", &last);
+ if (!MATCH_TOKEN(token, "TEMP"))
+ {
+ printf("DBG: Expected 'TEMP', got '%s' in '%s'\n", token, text);
+ }
+ token = apr_strtok(NULL, " ", &last);
+ if (!MATCH_TOKEN(token, "B-TREE"))
+ {
+ printf("DBG: Expected 'B-TREE', got '%s' in '%s'\n", token,
+ text);
+ }
+ item->create_btree = TRUE;
+ }
+ }
+ else if (MATCH_TOKEN(item->operation, "USE"))
+ {
+ /* Using a temporary table for ordering results */
+ /* ### Need parsing */
+ item->create_btree = TRUE;
+ }
+ else
+ {
+ printf("DBG: Unhandled sqlite operation '%s' in explanation\n", item->operation);
+ return SVN_NO_ERROR;
+ }
+
+ if (item->expressions)
+ {
+ const char *p;
+
+ for (p = item->expressions; *p; p++)
+ {
+ if (*p == '?')
+ item->expression_vars++;
+ }
+ }
+ if (item->expected)
+ {
+ item->expected_rows = atoi(item->expected);
+ }
+
+ *parsed_item = item;
+ return SVN_NO_ERROR;
+}
+
+/* Sqlite has an SQLITE_OMIT_EXPLAIN compilation flag, which may make
+ explain query just evaluate the query. Some older versions use a
+ different number of columns (and different texts) for
+ EXPLAIN query plan.
+
+ If none of this is true set *SUPPORTED to TRUE, otherwise to FALSE */
+static svn_error_t *
+supported_explain_query_plan(svn_boolean_t *supported,
+ sqlite3 *sdb,
+ apr_pool_t *scratch_pool)
+{
+ sqlite3_stmt *stmt;
+ int r;
+
+ *supported = TRUE;
+
+ r = sqlite3_prepare(sdb, "EXPLAIN QUERY PLAN SELECT 1",
+ -1, &stmt, NULL);
+
+ if (r != SQLITE_OK)
+ {
+ *supported = FALSE;
+ return SVN_NO_ERROR;
+ }
+
+ if (sqlite3_step(stmt) == SQLITE_ROW)
+ {
+ if (sqlite3_column_count(stmt) < 4)
+ {
+ *supported = FALSE;
+ /* Fall through */
+ }
+ }
+
+ SQLITE_ERR(sqlite3_reset(stmt));
+ SQLITE_ERR(sqlite3_finalize(stmt));
+ return SVN_NO_ERROR;
+}
+
+
+/* Returns TRUE if TABLE_NAME specifies a nodes table, which should be indexed
+ by wc_id and either local_relpath or parent_relpath */
+static svn_boolean_t
+is_node_table(const char *table_name)
+{
+ return (apr_strnatcasecmp(table_name, "nodes") == 0
+ || apr_strnatcasecmp(table_name, "actual_node") == 0
+ || apr_strnatcasecmp(table_name, "externals") == 0
+ || apr_strnatcasecmp(table_name, "lock") == 0
+ || apr_strnatcasecmp(table_name, "wc_lock") == 0
+ || FALSE);
+}
+
+/* Returns TRUE if TABLE specifies an intermediate result table, which is
+ allowed to have table scans, etc. */
+static svn_boolean_t
+is_result_table(const char *table_name)
+{
+ return (apr_strnatcasecmp(table_name, "target_prop_cache") == 0
+ || apr_strnatcasecmp(table_name, "changelist_list") == 0
+ || FALSE);
+}
+
+static svn_error_t *
+test_query_expectations(apr_pool_t *scratch_pool)
+{
+ sqlite3 *sdb;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ svn_error_t *warnings = NULL;
+ svn_boolean_t supports_query_info;
+
+ SVN_ERR(create_memory_db(&sdb, scratch_pool));
+
+ SVN_ERR(supported_explain_query_plan(&supports_query_info, sdb,
+ scratch_pool));
+ if (!supports_query_info)
+ {
+ SQLITE_ERR(sqlite3_close(sdb));
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "Sqlite doesn't support EXPLAIN QUERY PLAN");
+ }
+
+ for (i=0; i < STMT_SCHEMA_FIRST; i++)
+ {
+ sqlite3_stmt *stmt;
+ const char *tail;
+ int r;
+ svn_boolean_t warned = FALSE;
+ apr_array_header_t *rows = NULL;
+
+ if (is_schema_statement(i))
+ continue;
+
+ /* Prepare statement to find if it is a single statement. */
+ r = sqlite3_prepare_v2(sdb, wc_queries[i], -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ continue; /* Parse failure is already reported by 'test_parable' */
+
+ SQLITE_ERR(sqlite3_finalize(stmt));
+ if (tail[0] != '\0')
+ continue; /* Multi-queries are currently not testable */
+
+ svn_pool_clear(iterpool);
+
+ r = sqlite3_prepare_v2(sdb,
+ apr_pstrcat(iterpool,
+ "EXPLAIN QUERY PLAN ",
+ wc_queries[i],
+ SVN_VA_NULL),
+ -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ continue; /* EXPLAIN not enabled or doesn't support this query */
+
+ while (SQLITE_ROW == (r = sqlite3_step(stmt)))
+ {
+ /*int iSelectid;
+ int iOrder;
+ int iFrom;*/
+ const unsigned char *zDetail;
+ char *detail;
+ struct explanation_item *item;
+
+ /* ### The following code is correct for current Sqlite versions
+ ### (tested with 3.7.x), but the EXPLAIN QUERY PLAN output
+ ### is not guaranteed to be stable for future versions. */
+
+ /* Names as in Sqlite documentation */
+ /*iSelectid = sqlite3_column_int(stmt, 0);
+ iOrder = sqlite3_column_int(stmt, 1);
+ iFrom = sqlite3_column_int(stmt, 2);*/
+ zDetail = sqlite3_column_text(stmt, 3);
+
+ if (! zDetail)
+ continue;
+
+ if (!rows)
+ rows = apr_array_make(iterpool, 10, sizeof(const char*));
+
+ detail = apr_pstrdup(iterpool, (const char*)zDetail);
+
+ APR_ARRAY_PUSH(rows, const char *) = detail;
+
+ SVN_ERR(parse_explanation_item(&item, detail, iterpool));
+
+ if (!item)
+ continue; /* Not parsable or not interesting */
+
+ if (item->search
+ && item->automatic_index)
+ {
+ warned = TRUE;
+ if (!is_slow_statement(i))
+ {
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "%s: "
+ "Creates a temporary index: %s\n",
+ wc_query_info[i][0], wc_queries[i]);
+ }
+ }
+ else if (item->search && item->primary_key)
+ {
+ /* Nice */
+ }
+ else if (item->search
+ && ((item->expression_vars < 2 && is_node_table(item->table))
+ || (item->expression_vars < 1))
+ && !is_result_table(item->table))
+ {
+ if (in_list(primary_key_statements, i))
+ {
+ /* Reported as primary key index usage in Sqlite 3.7,
+ as table scan in 3.8+, while the execution plan is
+ identical: read first record from table */
+ }
+ else if (!is_slow_statement(i))
+ {
+ warned = TRUE;
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "%s: "
+ "Uses %s with only %d index component: (%s)\n%s",
+ wc_query_info[i][0], item->table,
+ item->expression_vars, item->expressions,
+ wc_queries[i]);
+ }
+ else
+ warned = TRUE;
+ }
+ else if (item->search && !item->index)
+ {
+ warned = TRUE;
+ if (!is_slow_statement(i))
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "%s: "
+ "Query on %s doesn't use an index:\n%s",
+ wc_query_info[i][0], item->table, wc_queries[i]);
+ }
+ else if (item->scan && !is_result_table(item->table))
+ {
+ warned = TRUE;
+ if (!is_slow_statement(i))
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "Query %s: "
+ "Performs scan on %s:\n%s",
+ wc_query_info[i][0], item->table, wc_queries[i]);
+ }
+ else if (item->create_btree)
+ {
+ warned = TRUE;
+ if (!is_slow_statement(i))
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "Query %s: Creates a temporary B-TREE:\n%s",
+ wc_query_info[i][0], wc_queries[i]);
+ }
+ }
+ SQLITE_ERR(sqlite3_reset(stmt));
+ SQLITE_ERR(sqlite3_finalize(stmt));
+
+ if (!warned && is_slow_statement(i))
+ {
+ printf("DBG: Expected %s to be reported as slow, but it wasn't\n",
+ wc_query_info[i][0]);
+ }
+
+ if (rows && warned != is_slow_statement(i))
+ {
+ int w;
+ svn_error_t *info = NULL;
+ for (w = rows->nelts-1; w >= 0; w--)
+ {
+ if (warned)
+ info = svn_error_createf(SVN_ERR_SQLITE_CONSTRAINT, info,
+ "|%s", APR_ARRAY_IDX(rows, w,
+ const char*));
+ else
+ printf("|%s\n", APR_ARRAY_IDX(rows, w, const char*));
+ }
+
+ warnings = svn_error_compose_create(warnings, info);
+ }
+ }
+ SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */
+
+ return warnings;
+}
+
+static svn_error_t *
+test_query_duplicates(apr_pool_t *scratch_pool)
+{
+ sqlite3 *sdb;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ svn_error_t *warnings = NULL;
+ svn_boolean_t supports_query_info;
+ apr_hash_t *sha_to_query = apr_hash_make(scratch_pool);
+
+ SVN_ERR(create_memory_db(&sdb, scratch_pool));
+
+ SVN_ERR(supported_explain_query_plan(&supports_query_info, sdb,
+ scratch_pool));
+ if (!supports_query_info)
+ {
+ SQLITE_ERR(sqlite3_close(sdb));
+ return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL,
+ "Sqlite doesn't support EXPLAIN QUERY PLAN");
+ }
+
+ for (i = 0; i < STMT_SCHEMA_FIRST; i++)
+ {
+ sqlite3_stmt *stmt;
+ const char *tail;
+ int r;
+ svn_stringbuf_t *result;
+ svn_checksum_t *checksum;
+
+ if (is_schema_statement(i))
+ continue;
+
+ /* Prepare statement to find if it is a single statement. */
+ r = sqlite3_prepare_v2(sdb, wc_queries[i], -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ continue; /* Parse failure is already reported by 'test_parable' */
+
+ SQLITE_ERR(sqlite3_finalize(stmt));
+ if (tail[0] != '\0')
+ continue; /* Multi-queries are currently not testable */
+
+ svn_pool_clear(iterpool);
+
+ r = sqlite3_prepare_v2(sdb,
+ apr_pstrcat(iterpool,
+ "EXPLAIN ",
+ wc_queries[i],
+ SVN_VA_NULL),
+ -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ continue; /* EXPLAIN not enabled or doesn't support this query */
+
+ result = svn_stringbuf_create_empty(iterpool);
+
+ while (SQLITE_ROW == (r = sqlite3_step(stmt)))
+ {
+ int col;
+
+ for (col = 0; col < sqlite3_column_count(stmt); col++)
+ {
+ const char *txt = (const char*)sqlite3_column_text(stmt, col);
+ if (txt)
+ svn_stringbuf_appendcstr(result, txt);
+
+ svn_stringbuf_appendcstr(result, "|");
+ }
+
+ svn_stringbuf_appendcstr(result, "\n");
+ }
+
+ SQLITE_ERR(sqlite3_reset(stmt));
+ SQLITE_ERR(sqlite3_finalize(stmt));
+
+ SVN_ERR(svn_checksum(&checksum, svn_checksum_sha1,
+ result->data, result->len,
+ iterpool));
+
+ {
+ const char *hex = svn_checksum_to_cstring(checksum, scratch_pool);
+ const char *other;
+
+ other = svn_hash_gets(sha_to_query, hex);
+ if (other)
+ {
+ warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings,
+ "Query %s has an identical execution plan as %s",
+ wc_query_info[i][0], other);
+ }
+ else
+ svn_hash_sets(sha_to_query, hex, wc_query_info[i][0]);
+ }
+ }
+ SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */
+
+ return warnings;
+}
+
+/* Helper to verify a bit of data in the sqlite3 statistics */
+static int
+parse_stat_data(const char *stat)
+{
+ int n = 0;
+ apr_int64_t last = APR_INT64_MAX;
+ while (*stat)
+ {
+ apr_int64_t v;
+ char *next;
+
+ if (*stat < '0' || *stat > '9')
+ return -2;
+
+ errno = 0;
+ v = apr_strtoi64(stat, &next, 10);
+
+ /* All numbers specify the average number of rows
+ with the same values in all columns left of it,
+ so the value must be >= 1 and lower than or equal
+ to all previous seen numbers */
+ if (v <= 0 || (v > last) || (errno != 0))
+ return -1;
+
+ last = v;
+
+ n++;
+ stat = next;
+
+ if (*stat == ' ')
+ stat++;
+ }
+
+ return n;
+}
+
+static svn_error_t *
+test_schema_statistics(apr_pool_t *scratch_pool)
+{
+ sqlite3 *sdb;
+ sqlite3_stmt *stmt;
+
+ SVN_ERR(create_memory_db(&sdb, scratch_pool));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "CREATE TABLE shadow_stat1(tbl TEXT, idx TEXT, stat TEXT)",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO shadow_stat1 (tbl, idx, stat) "
+ "SELECT tbl, idx, stat FROM sqlite_stat1",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "DROP TABLE sqlite_stat1",
+ NULL, NULL, NULL));
+
+ /* Insert statement to give index at least 1 record */
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO nodes (wc_id, local_relpath, op_depth,"
+ " presence, kind) "
+ "VALUES (1, '', 0, 'normal', 'dir')",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO actual_node (wc_id, local_relpath) "
+ "VALUES (1, '')",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO lock (repos_id, repos_relpath, lock_token) "
+ "VALUES (1, '', '')",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO EXTERNALS (wc_id, local_relpath,"
+ " parent_relpath, repos_id,"
+ " presence, kind, def_local_relpath,"
+ " def_repos_relpath) "
+ "VALUES (1, 'subdir', '', 1, 'normal', 'dir', '', '')",
+ NULL, NULL, NULL));
+
+ /* These are currently not necessary for query optimization, but it's better
+ to tell Sqlite how we intend to use this table anyway */
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO wc_lock (wc_id, local_dir_relpath) "
+ "VALUES (1, '')",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "INSERT INTO WORK_QUEUE (work) "
+ "VALUES ('')",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_exec(sdb,
+ "ANALYZE",
+ NULL, NULL, NULL));
+
+ SQLITE_ERR(
+ sqlite3_prepare(sdb, "SELECT s.tbl, s.idx, s.stat, r.stat "
+ "FROM shadow_stat1 s "
+ "LEFT JOIN sqlite_stat1 r ON "
+ "s.tbl=r.tbl and s.idx=r.idx",
+ -1, &stmt, NULL));
+
+ while (sqlite3_step(stmt) == SQLITE_ROW)
+ {
+ const char *wc_stat = (const char*)sqlite3_column_text(stmt, 2);
+ const char *sqlite_stat = (const char*)sqlite3_column_text(stmt, 3);
+
+ if (! sqlite_stat)
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Schema statistic failure:"
+ " Refering to unknown index '%s' on '%s'",
+ sqlite3_column_text(stmt, 1),
+ sqlite3_column_text(stmt, 0));
+ }
+
+ if (parse_stat_data(wc_stat) != parse_stat_data(sqlite_stat))
+ {
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Schema statistic failure:"
+ " Column mismatch for '%s' on '%s'",
+ sqlite3_column_text(stmt, 1),
+ sqlite3_column_text(stmt, 0));
+ }
+ }
+
+ SQLITE_ERR(sqlite3_reset(stmt));
+ SQLITE_ERR(sqlite3_finalize(stmt));
+
+ SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */
+
+ return SVN_NO_ERROR;
+}
+
+/* An SQLite application defined function that allows SQL queries to
+ use "relpath_depth(local_relpath)". */
+static void relpath_depth_sqlite(sqlite3_context* context,
+ int argc,
+ sqlite3_value* values[])
+{
+ SVN_ERR_MALFUNCTION_NO_RETURN(); /* STUB! */
+}
+
+/* Parse all verify/check queries */
+static svn_error_t *
+test_verify_parsable(apr_pool_t *scratch_pool)
+{
+ sqlite3 *sdb;
+ int i;
+
+ SVN_ERR(create_memory_db(&sdb, scratch_pool));
+
+ SQLITE_ERR(sqlite3_create_function(sdb, "relpath_depth", 1, SQLITE_ANY, NULL,
+ relpath_depth_sqlite, NULL, NULL));
+
+ for (i=STMT_VERIFICATION_TRIGGERS; wc_queries[i]; i++)
+ {
+ sqlite3_stmt *stmt;
+ const char *text = wc_queries[i];
+
+ /* Some of our statement texts contain multiple queries. We prepare
+ them all. */
+ while (*text != '\0')
+ {
+ const char *tail;
+ int r = sqlite3_prepare_v2(sdb, text, -1, &stmt, &tail);
+
+ if (r != SQLITE_OK)
+ return svn_error_createf(SVN_ERR_SQLITE_ERROR, NULL,
+ "Preparing %s failed: %s\n%s",
+ wc_query_info[i][0],
+ sqlite3_errmsg(sdb),
+ text);
+
+ SQLITE_ERR(sqlite3_finalize(stmt));
+
+ /* Continue after the current statement */
+ text = tail;
+ }
+ }
+
+ SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */
+
+ return SVN_NO_ERROR;
+}
+
+
+static int max_threads = 1;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_PASS2(test_sqlite_version,
+ "sqlite up-to-date"),
+ SVN_TEST_PASS2(test_parsable,
+ "queries are parsable"),
+ SVN_TEST_PASS2(test_query_expectations,
+ "test query expectations"),
+ SVN_TEST_PASS2(test_query_duplicates,
+ "test query duplicates"),
+ SVN_TEST_PASS2(test_schema_statistics,
+ "test schema statistics"),
+ SVN_TEST_PASS2(test_verify_parsable,
+ "verify queries are parsable"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/libsvn_wc/wc-test-queries.h b/subversion/tests/libsvn_wc/wc-test-queries.h
new file mode 100644
index 0000000..b454af3
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-test-queries.h
@@ -0,0 +1,112 @@
+/* This file is automatically generated from wc-test-queries.sql and subversion/tests/libsvn_wc/token-map.h.
+ * Do not edit this file -- edit the source and rerun gen-make.py */
+
+#define STMT_SELECT_NODES_INFO 0
+#define STMT_0_INFO {"STMT_SELECT_NODES_INFO", NULL}
+#define STMT_0 \
+ "SELECT op_depth, n.presence, n.local_relpath, revision, " \
+ " repos_path, file_external, def_local_relpath, moved_to, moved_here, " \
+ " properties " \
+ "FROM nodes n " \
+ "LEFT OUTER JOIN externals e " \
+ " ON n.wc_id = e.wc_id " \
+ " AND n.local_relpath = e.local_relpath " \
+ "WHERE n.wc_id = ?1 " \
+ " AND (n.local_relpath = ?2 OR (((n.local_relpath) > (CASE (?2) WHEN '' THEN '' ELSE (?2) || '/' END)) AND ((n.local_relpath) < CASE (?2) WHEN '' THEN X'FFFF' ELSE (?2) || '0' END))) " \
+ ""
+
+#define STMT_SELECT_ACTUAL_INFO 1
+#define STMT_1_INFO {"STMT_SELECT_ACTUAL_INFO", NULL}
+#define STMT_1 \
+ "SELECT local_relpath " \
+ "FROM actual_node " \
+ "WHERE wc_id = ?1 " \
+ " AND conflict_data is NOT NULL " \
+ " AND (local_relpath = ?2 OR (((local_relpath) > (CASE (?2) WHEN '' THEN '' ELSE (?2) || '/' END)) AND ((local_relpath) < CASE (?2) WHEN '' THEN X'FFFF' ELSE (?2) || '0' END))) " \
+ ""
+
+#define STMT_DELETE_NODES 2
+#define STMT_2_INFO {"STMT_DELETE_NODES", NULL}
+#define STMT_2 \
+ "DELETE FROM nodes; " \
+ ""
+
+#define STMT_INSERT_NODE 3
+#define STMT_3_INFO {"STMT_INSERT_NODE", NULL}
+#define STMT_3 \
+ "INSERT INTO nodes (local_relpath, op_depth, presence, repos_path, " \
+ " revision, parent_relpath, moved_to, moved_here, " \
+ " properties, wc_id, repos_id, kind, " \
+ " depth) " \
+ " VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, 1, " \
+ " CASE WHEN ?3 != 'base-deleted' THEN 1 END, " \
+ " 'dir', " \
+ " CASE WHEN ?3 in ('normal', 'incomplete') " \
+ " THEN 'infinity' END) " \
+ ""
+
+#define STMT_DELETE_ACTUAL 4
+#define STMT_4_INFO {"STMT_DELETE_ACTUAL", NULL}
+#define STMT_4 \
+ "DELETE FROM actual_node; " \
+ ""
+
+#define STMT_INSERT_ACTUAL 5
+#define STMT_5_INFO {"STMT_INSERT_ACTUAL", NULL}
+#define STMT_5 \
+ "INSERT INTO actual_node (local_relpath, parent_relpath, changelist, wc_id) " \
+ " VALUES (?1, ?2, ?3, 1) " \
+ ""
+
+#define STMT_ENSURE_EMPTY_PRISTINE 6
+#define STMT_6_INFO {"STMT_ENSURE_EMPTY_PRISTINE", NULL}
+#define STMT_6 \
+ "INSERT OR IGNORE INTO pristine (checksum, md5_checksum, size, refcount) " \
+ " VALUES ('$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', " \
+ " '$md5 $d41d8cd98f00b204e9800998ecf8427e', " \
+ " 0, 0) " \
+ ""
+
+#define STMT_NODES_SET_FILE 7
+#define STMT_7_INFO {"STMT_NODES_SET_FILE", NULL}
+#define STMT_7 \
+ "UPDATE nodes " \
+ " SET kind = 'file', " \
+ " checksum = '$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', " \
+ " depth = NULL " \
+ "WHERE wc_id = 1 and local_relpath = ?1 " \
+ ""
+
+#define STMT_SELECT_ALL_ACTUAL 8
+#define STMT_8_INFO {"STMT_SELECT_ALL_ACTUAL", NULL}
+#define STMT_8 \
+ "SELECT local_relpath FROM actual_node WHERE wc_id = 1 " \
+ ""
+
+#define WC_TEST_QUERIES_SQL_DECLARE_STATEMENTS(varname) \
+ static const char * const varname[] = { \
+ STMT_0, \
+ STMT_1, \
+ STMT_2, \
+ STMT_3, \
+ STMT_4, \
+ STMT_5, \
+ STMT_6, \
+ STMT_7, \
+ STMT_8, \
+ NULL \
+ }
+
+#define WC_TEST_QUERIES_SQL_DECLARE_STATEMENT_INFO(varname) \
+ static const char * const varname[][2] = { \
+ STMT_0_INFO, \
+ STMT_1_INFO, \
+ STMT_2_INFO, \
+ STMT_3_INFO, \
+ STMT_4_INFO, \
+ STMT_5_INFO, \
+ STMT_6_INFO, \
+ STMT_7_INFO, \
+ STMT_8_INFO, \
+ {NULL, NULL} \
+ }
diff --git a/subversion/tests/libsvn_wc/wc-test-queries.sql b/subversion/tests/libsvn_wc/wc-test-queries.sql
new file mode 100644
index 0000000..613819a
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-test-queries.sql
@@ -0,0 +1,78 @@
+/* wc-test-queries.sql -- queries used to verify wc metadata from
+ * the C tests.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+-- STMT_SELECT_NODES_INFO
+SELECT op_depth, n.presence, n.local_relpath, revision,
+ repos_path, file_external, def_local_relpath, moved_to, moved_here,
+ properties
+FROM nodes n
+LEFT OUTER JOIN externals e
+ ON n.wc_id = e.wc_id
+ AND n.local_relpath = e.local_relpath
+WHERE n.wc_id = ?1
+ AND (n.local_relpath = ?2 OR IS_STRICT_DESCENDANT_OF(n.local_relpath, ?2))
+
+-- STMT_SELECT_ACTUAL_INFO
+SELECT local_relpath
+FROM actual_node
+WHERE wc_id = ?1
+ AND conflict_data is NOT NULL
+ AND (local_relpath = ?2 OR IS_STRICT_DESCENDANT_OF(local_relpath, ?2))
+
+-- STMT_DELETE_NODES
+DELETE FROM nodes;
+
+-- STMT_INSERT_NODE
+INSERT INTO nodes (local_relpath, op_depth, presence, repos_path,
+ revision, parent_relpath, moved_to, moved_here,
+ properties, wc_id, repos_id, kind,
+ depth)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, 1,
+ CASE WHEN ?3 != 'base-deleted' THEN 1 END,
+ 'dir',
+ CASE WHEN ?3 in ('normal', 'incomplete')
+ THEN 'infinity' END)
+
+-- STMT_DELETE_ACTUAL
+DELETE FROM actual_node;
+
+-- STMT_INSERT_ACTUAL
+INSERT INTO actual_node (local_relpath, parent_relpath, changelist, wc_id)
+ VALUES (?1, ?2, ?3, 1)
+
+-- STMT_ENSURE_EMPTY_PRISTINE
+INSERT OR IGNORE INTO pristine (checksum, md5_checksum, size, refcount)
+ VALUES ('$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ '$md5 $d41d8cd98f00b204e9800998ecf8427e',
+ 0, 0)
+
+-- STMT_NODES_SET_FILE
+UPDATE nodes
+ SET kind = 'file',
+ checksum = '$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ depth = NULL
+WHERE wc_id = 1 and local_relpath = ?1
+
+-- STMT_SELECT_ALL_ACTUAL
+SELECT local_relpath FROM actual_node WHERE wc_id = 1
+
diff --git a/subversion/tests/libsvn_wc/wc-test.c b/subversion/tests/libsvn_wc/wc-test.c
new file mode 100644
index 0000000..8ba6c77
--- /dev/null
+++ b/subversion/tests/libsvn_wc/wc-test.c
@@ -0,0 +1,521 @@
+/*
+ * wc-test.c : test WC APIs
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <apr_pools.h>
+#include <apr_general.h>
+#include <apr_md5.h>
+
+#define SVN_DEPRECATED
+
+#include "svn_types.h"
+#include "svn_io.h"
+#include "svn_dirent_uri.h"
+#include "svn_pools.h"
+#include "svn_repos.h"
+#include "svn_wc.h"
+#include "svn_client.h"
+#include "svn_hash.h"
+
+#include "utils.h"
+
+#include "private/svn_wc_private.h"
+#include "private/svn_sqlite.h"
+#include "private/svn_dep_compat.h"
+#include "../../libsvn_wc/wc.h"
+#include "../../libsvn_wc/wc_db.h"
+#define SVN_WC__I_AM_WC_DB
+#include "../../libsvn_wc/wc_db_private.h"
+
+#include "../svn_test.h"
+
+#ifdef _MSC_VER
+#pragma warning(disable: 4221) /* nonstandard extension used */
+#endif
+
+
+/* ---------------------------------------------------------------------- */
+/* The test functions */
+
+/* Structure for testing node_get_base and node_get_origin. */
+struct base_origin_t
+{
+ /* Path to create and test, WC-relative */
+ const char *path;
+ /* Expected base rev. "-1" means no base. (Expected base path
+ * == base_rev valid ? path : NULL) */
+ svn_revnum_t base_rev;
+ /* Path to copy from, WC-relative */
+ const char *src_path;
+ /* Expected "origin" */
+ struct {
+ const char *path;
+ svn_revnum_t rev;
+ } origin;
+};
+
+/* Data for testing node_get_base and node_get_origin. */
+static struct base_origin_t base_origin_subtests[] =
+ {
+ /* file copied onto nothing */
+ { "A/C/copy1", -1, "iota", {"iota", 1} },
+
+ /* dir copied onto nothing */
+ { "A/C/copy2", -1, "A/B/E", {"A/B/E", 1} },
+
+ /* replacement: file copied over a schedule-delete file */
+ { "A/B/lambda", 1, "iota", {"iota", 1} },
+
+ /* replacement: dir copied over a schedule-delete dir */
+ { "A/D/G", 1, "A/B/E", {"A/B/E", 1} },
+
+ /* replacement: dir copied over a schedule-delete file */
+ { "A/D/gamma", 1, "A/B/E", {"A/B/E", 1} },
+
+ /* replacement: file copied over a schedule-delete dir */
+ { "A/D/H", 1, "iota", {"iota", 1} },
+
+ { 0 }
+ };
+
+/* Create a WC containing lots of different node states, in the sandbox B. */
+static svn_error_t *
+create_wc_for_base_and_origin_tests(svn_test__sandbox_t *b)
+{
+ struct base_origin_t *copy;
+
+ SVN_ERR(sbox_add_and_commit_greek_tree(b));
+
+ /* Copy various things */
+ for (copy = base_origin_subtests; copy->src_path; copy++)
+ {
+ if (SVN_IS_VALID_REVNUM(copy->base_rev))
+ SVN_ERR(sbox_wc_delete(b, copy->path));
+ SVN_ERR(sbox_wc_copy(b, copy->src_path, copy->path));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_wc__node_get_base(). */
+static svn_error_t *
+test_node_get_base(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "node_get_base", opts, pool));
+
+ SVN_ERR(create_wc_for_base_and_origin_tests(b));
+
+ {
+ struct base_origin_t *subtest;
+
+ for (subtest = base_origin_subtests; subtest->path; subtest++)
+ {
+ const char *local_abspath
+ = svn_dirent_join(b->wc_abspath, subtest->path, b->pool);
+ svn_revnum_t revision;
+ const char *repos_relpath, *repos_root_url, *repos_uuid;
+
+ SVN_ERR(svn_wc__node_get_base(NULL, &revision, &repos_relpath,
+ &repos_root_url, &repos_uuid,
+ NULL,
+ b->wc_ctx, local_abspath,
+ TRUE /* ignore_enoent */,
+ b->pool, b->pool));
+ SVN_TEST_ASSERT(revision == subtest->base_rev);
+ if (SVN_IS_VALID_REVNUM(subtest->base_rev))
+ {
+ SVN_TEST_STRING_ASSERT(repos_relpath, subtest->path);
+ SVN_TEST_STRING_ASSERT(repos_root_url, b->repos_url);
+ SVN_TEST_ASSERT(repos_uuid != NULL);
+ }
+ else
+ {
+ SVN_TEST_STRING_ASSERT(repos_relpath, NULL);
+ SVN_TEST_STRING_ASSERT(repos_root_url, NULL);
+ SVN_TEST_STRING_ASSERT(repos_uuid, NULL);
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Test svn_wc__node_get_origin(). */
+static svn_error_t *
+test_node_get_origin(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t *b = apr_palloc(pool, sizeof(*b));
+
+ SVN_ERR(svn_test__sandbox_create(b, "node_get_origin", opts, pool));
+
+ SVN_ERR(create_wc_for_base_and_origin_tests(b));
+
+ {
+ struct base_origin_t *subtest;
+
+ for (subtest = base_origin_subtests; subtest->path; subtest++)
+ {
+ const char *local_abspath
+ = svn_dirent_join(b->wc_abspath, subtest->path, b->pool);
+ svn_revnum_t revision;
+ const char *repos_relpath, *repos_root_url, *repos_uuid;
+
+ SVN_ERR(svn_wc__node_get_origin(NULL, &revision, &repos_relpath,
+ &repos_root_url, &repos_uuid, NULL,
+ NULL,
+ b->wc_ctx, local_abspath, FALSE,
+ b->pool, b->pool));
+ SVN_TEST_ASSERT(revision == subtest->origin.rev);
+ if (SVN_IS_VALID_REVNUM(subtest->origin.rev))
+ {
+ SVN_TEST_STRING_ASSERT(repos_relpath, subtest->origin.path);
+ SVN_TEST_STRING_ASSERT(repos_root_url, b->repos_url);
+ SVN_TEST_ASSERT(repos_uuid != NULL);
+ }
+ else
+ {
+ SVN_TEST_STRING_ASSERT(repos_relpath, NULL);
+ SVN_TEST_STRING_ASSERT(repos_root_url, NULL);
+ SVN_TEST_STRING_ASSERT(repos_uuid, NULL);
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_externals_parse(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ int i;
+ struct external_info
+ {
+ const char *line;
+ const char *url;
+ const char *local_path;
+ svn_revnum_t peg_rev;
+ svn_revnum_t rev;
+
+ } items[] = {
+ {
+ "dir http://server/svn/a",
+ "http://server/svn/a",
+ "dir"
+ },
+ {
+ "/svn/home dir",
+ "u://svr/svn/home",
+ "dir"
+ },
+ {
+ "//server/home dir",
+ "u://server/home",
+ "dir"
+ },
+ {
+ "../../../../home dir",
+ "u://svr/svn/home",
+ "dir",
+ },
+ {
+ "^/../repB/tools/scripts scripts",
+ "u://svr/svn/cur/repB/tools/scripts",
+ "scripts"
+ },
+ {
+ "^/../repB/tools/README.txt scripts/README.txt",
+ "u://svr/svn/cur/repB/tools/README.txt",
+ "scripts/README.txt"
+ },
+ };
+
+ for (i = 0; i < sizeof(items) / sizeof(items[0]); i++)
+ {
+ apr_array_header_t *results;
+ svn_wc_external_item2_t *external_item;
+ const char *resolved_url;
+ SVN_ERR(svn_wc_parse_externals_description3(&results, "/my/current/dir",
+ items[i].line, FALSE, pool));
+
+ SVN_TEST_ASSERT(results && results->nelts == 1);
+
+ external_item = APR_ARRAY_IDX(results, 0, svn_wc_external_item2_t *);
+
+ SVN_ERR(svn_wc__resolve_relative_external_url(&resolved_url,
+ external_item,
+ "u://svr/svn/cur/dir",
+ "u://svr/svn/cur/dir/sd/fl",
+ pool, pool));
+
+ SVN_TEST_STRING_ASSERT(resolved_url, items[i].url);
+ SVN_TEST_STRING_ASSERT(external_item->target_dir, items[i].local_path);
+
+ if (items[i].peg_rev != 0)
+ SVN_TEST_ASSERT(external_item->peg_revision.value.number
+ == items[i].peg_rev);
+ if (items[i].rev != 0)
+ SVN_TEST_ASSERT(external_item->revision.value.number == items[i].rev);
+ SVN_TEST_ASSERT(svn_uri_is_canonical(resolved_url, pool));
+ }
+
+
+ return SVN_NO_ERROR;
+
+}
+
+static svn_error_t *
+test_externals_parse_erratic(apr_pool_t *pool)
+{
+ svn_error_t *err;
+ apr_array_header_t *list = NULL;
+
+ err = svn_wc_parse_externals_description3(
+ &list, "parent_dir",
+ "^/valid/but/should/not/be/on/record wc_target\n"
+ "because_this_is_an_error",
+ FALSE, pool);
+
+ /* DESC above has an error, so expect one. */
+ SVN_TEST_ASSERT(err != NULL);
+ svn_error_clear(err);
+
+ /* svn_wc_parse_externals_description3() should not
+ touch LIST when DESC had an error.*/
+ SVN_TEST_ASSERT(list == NULL);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_legacy_commit1(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_wc_adm_access_t *adm_access;
+ const char *lambda;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "legacy_commit1", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_copied"));
+
+ lambda = sbox_wc_path(&b, "A_copied/B/lambda");
+
+
+ SVN_ERR(svn_io_remove_file2(lambda, FALSE, pool));
+ SVN_ERR(svn_io_copy_file(sbox_wc_path(&b, "iota"), lambda, FALSE, pool));
+ SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, b.wc_abspath, TRUE, -1,
+ NULL, NULL, pool));
+
+ {
+ svn_wc_status2_t *status;
+
+ SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool));
+
+ SVN_TEST_ASSERT(status != NULL);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->copied == TRUE);
+ }
+
+ /* Simulate a very old style svn ci . -m "QQQ" on the WC root */
+ SVN_ERR(svn_wc_process_committed4(sbox_wc_path(&b, "A_copied"), adm_access,
+ TRUE, 12, "2014-10-01T19:00:50.966679Z",
+ "me", NULL, TRUE, TRUE,
+ NULL, pool));
+
+ {
+ unsigned char digest[APR_MD5_DIGESTSIZE];
+
+ /* Use the fact that iota has the same checksum to ease committing */
+
+ SVN_ERR(svn_io_file_checksum (digest, lambda, pool));
+
+ SVN_ERR(svn_wc_process_committed4(lambda, adm_access,
+ TRUE, 12, "2014-10-01T19:00:50.966679Z",
+ "me", NULL, TRUE, TRUE,
+ digest, pool));
+ }
+
+ {
+ svn_wc_status2_t *status;
+
+ SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool));
+
+ /* Node is still modified, as we didn't change the text base! */
+ SVN_TEST_ASSERT(status != NULL);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->copied == FALSE);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_legacy_commit2(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_wc_adm_access_t *adm_access;
+ const char *lambda;
+ svn_wc_committed_queue_t *queue;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "legacy_commit2", opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ SVN_ERR(sbox_wc_copy(&b, "A", "A_copied"));
+
+ lambda = sbox_wc_path(&b, "A_copied/B/lambda");
+
+ SVN_ERR(svn_io_remove_file2(lambda, FALSE, pool));
+ SVN_ERR(svn_io_copy_file(sbox_wc_path(&b, "iota"), lambda, FALSE, pool));
+
+ SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, b.wc_abspath, TRUE, -1,
+ NULL, NULL, pool));
+
+ {
+ svn_wc_status2_t *status;
+
+ SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool));
+
+ SVN_TEST_ASSERT(status != NULL);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified);
+ SVN_TEST_ASSERT(status->copied == TRUE);
+ }
+
+ /* Simulate an old style svn ci . -m "QQQ" on the WC root */
+ queue = svn_wc_committed_queue_create(pool);
+ SVN_ERR(svn_wc_queue_committed(&queue, sbox_wc_path(&b, "A_copied"), adm_access,
+ TRUE, NULL, FALSE, FALSE, NULL, pool));
+ {
+ unsigned char digest[APR_MD5_DIGESTSIZE];
+
+ /* Use the fact that iota has the same checksum to ease committing */
+
+ SVN_ERR(svn_io_file_checksum(digest, lambda, pool));
+
+ SVN_ERR(svn_wc_queue_committed(&queue, lambda, adm_access, FALSE, NULL,
+ FALSE, FALSE, digest, pool));
+ }
+
+ SVN_ERR(svn_wc_process_committed_queue(queue, adm_access,
+ 12, "2014-10-01T19:00:50.966679Z",
+ "me", pool));
+
+ {
+ svn_wc_status2_t *status;
+
+ SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool));
+
+ /* Node is still modified, as we didn't change the text base! */
+ SVN_TEST_ASSERT(status != NULL);
+ SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal);
+ SVN_TEST_ASSERT(status->copied == FALSE);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+test_internal_file_modified(const svn_test_opts_t *opts, apr_pool_t *pool)
+{
+ svn_test__sandbox_t b;
+ svn_boolean_t modified;
+ const char *iota_path;
+ apr_time_t time;
+
+ SVN_ERR(svn_test__sandbox_create(&b, "internal_file_modified_p",
+ opts, pool));
+ SVN_ERR(sbox_add_and_commit_greek_tree(&b));
+
+ iota_path = sbox_wc_path(&b, "iota");
+
+ /* No modification, timestamps match.*/
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, FALSE, pool));
+ SVN_TEST_ASSERT(!modified);
+
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, TRUE, pool));
+ SVN_TEST_ASSERT(!modified);
+
+ /* Change timestamp on 'iota' and check. */
+ SVN_ERR(svn_io_file_affected_time(&time, iota_path, pool));
+ SVN_ERR(svn_io_set_file_affected_time(time + apr_time_from_sec(1),
+ iota_path, pool));
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, FALSE, pool));
+ SVN_TEST_ASSERT(!modified);
+
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, TRUE, pool));
+ SVN_TEST_ASSERT(!modified);
+
+ /* Modify 'iota' to be different size. */
+ SVN_ERR(sbox_file_write(&b, iota_path, "new iota"));
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, FALSE, pool));
+ SVN_TEST_ASSERT(modified);
+
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, TRUE, pool));
+ SVN_TEST_ASSERT(modified);
+
+ /* Working copy is smart and able to detect changes in files of different
+ * size even if timestamp didn't change. */
+ SVN_ERR(svn_io_set_file_affected_time(time, iota_path, pool));
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, FALSE, pool));
+ SVN_TEST_ASSERT(modified);
+
+ SVN_ERR(svn_wc__internal_file_modified_p(&modified, b.wc_ctx->db,
+ iota_path, TRUE, pool));
+ SVN_TEST_ASSERT(modified);
+
+ return SVN_NO_ERROR;
+}
+
+/* ---------------------------------------------------------------------- */
+/* The list of test functions */
+
+static int max_threads = 2;
+
+static struct svn_test_descriptor_t test_funcs[] =
+ {
+ SVN_TEST_NULL,
+ SVN_TEST_OPTS_PASS(test_node_get_base,
+ "test_node_get_base"),
+ SVN_TEST_OPTS_PASS(test_node_get_origin,
+ "test_node_get_origin"),
+ SVN_TEST_OPTS_PASS(test_externals_parse,
+ "test svn_wc_parse_externals_description3"),
+ SVN_TEST_PASS2(test_externals_parse_erratic,
+ "parse erratic externals definition"),
+ SVN_TEST_OPTS_PASS(test_legacy_commit1,
+ "test legacy commit1"),
+ SVN_TEST_OPTS_PASS(test_legacy_commit2,
+ "test legacy commit2"),
+ SVN_TEST_OPTS_PASS(test_internal_file_modified,
+ "test internal_file_modified"),
+ SVN_TEST_NULL
+ };
+
+SVN_TEST_MAIN
diff --git a/subversion/tests/manual/README b/subversion/tests/manual/README
new file mode 100644
index 0000000..fa3149a
--- /dev/null
+++ b/subversion/tests/manual/README
@@ -0,0 +1,3 @@
+These tests have not made it into the automatic test suite, probably because
+they have no automatic validation, or because they take an insane amount of
+time to finish.
diff --git a/subversion/tests/manual/tree-conflicts-add-vs-add.py b/subversion/tests/manual/tree-conflicts-add-vs-add.py
new file mode 100755
index 0000000..659a9d2
--- /dev/null
+++ b/subversion/tests/manual/tree-conflicts-add-vs-add.py
@@ -0,0 +1,423 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Setup your environment so that `which svn` shows the svn you want to test.
+# Just run this file, no parameters. Test files are created in /tmp/...
+# To adjust which tests are run, look at 'p = Permutations(' way below...
+#
+# This runs an insane amount of tests of add-vs.-add situations during update
+# and switch. The output scrolls by, and a summary for all tests with simple
+# greps and wc.db SELECT results is printed in the very end.
+#
+# There is no automatic validation. You have to read the results.
+#
+# To run a gdb in any given place, replace a 'svn()' with 'gdbsvn()',
+# presumably in either up() or sw().
+
+from subprocess import Popen, PIPE, call
+from types import FunctionType, ListType, TupleType
+import tempfile, os
+from itertools import product
+
+
+def run_cmd(cmd, verbose=True, shell=False):
+ if verbose:
+ if shell:
+ print('\n---', cmd)
+ else:
+ print('\n---', ' '.join(cmd))
+ p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell)
+ stdout,stderr = p.communicate()[0:2]
+ if verbose and stdout:
+ print stdout,
+ if verbose and stderr:
+ print stderr,
+ return stdout,stderr
+
+def qsvn(*args):
+ return run_cmd(['svn'] + list(args), False)
+
+def svn(*args):
+ return run_cmd(['svn'] + list(args))
+
+def gdbsvn(*args):
+ call(['gdb', '--args', 'svn'] + list(args))
+ return 'gdb', 'gdb'
+
+def shell(script):
+ return run_cmd(script, shell=True)
+
+def rewrite_file(path, contents, auto_newline='\n\n'):
+ dirname = os.path.dirname(path)
+ if dirname and not os.path.lexists(dirname):
+ os.makedirs(dirname)
+ f = open(path, "w")
+ f.write(contents + (auto_newline or ''))
+ f.close()
+
+def read_file(path):
+ try:
+ f = open(path, "r")
+ contents = f.read()
+ f.close()
+ return contents
+ except IOError:
+ return None
+
+def append_file(path, contents):
+ dirname = os.path.dirname(path)
+ if not os.path.lexists(dirname):
+ os.makedirs(dirname)
+ f = open(path, "a")
+ f.write(contents)
+ f.close()
+
+def remove_file(path):
+ if os.path.isfile(path):
+ os.remove(path)
+
+def tempdir():
+ return tempfile.mkdtemp(prefix='tc_add-')
+
+j = os.path.join
+
+f = 'f' # file
+d = 'd' # dir
+l = 'l' # symbolic link
+
+class TestContext:
+ def __init__(self):
+ self.base = tempdir()
+ self.repos = j(self.base, 'repos')
+ self.URL = 'file://' + self.repos
+ shell('svnadmin create "' + self.repos + '"')
+ self.WC = j(self.base, 'wc')
+ svn('checkout', self.URL, self.WC)
+
+ def create_wc2(self):
+ self.WC2 = j(self.base, 'wc2')
+ svn('checkout', self.URL, self.WC2)
+
+ def wc(self, *relpath):
+ if not relpath:
+ return self.WC
+ return j(self.WC, *relpath)
+
+ def wc2(self, *relpath):
+ if not relpath:
+ return self.WC2
+ return j(self.WC2, *relpath)
+
+ def url(self, relpath):
+ if not relpath:
+ return self.URL
+ return self.URL + '/' + relpath
+
+ def head(self):
+ out, err = qsvn('info', self.URL)
+ revstr = 'Revision: '
+ for line in out.split('\n'):
+ if line.startswith(revstr):
+ return int(line.strip()[len(revstr):])
+
+
+
+def unver(ctx, target, kind, content=None):
+ if not content:
+ content = 'content of ' + os.path.basename(target)
+
+ if kind == f:
+ rewrite_file(target, content)
+ shell('cat ' + target)
+ elif kind == l:
+ os.symlink('symlink', target)
+ else:
+ os.mkdir(target)
+
+
+
+def add(ctx, target, kind, content):
+ unver(ctx, target, kind, content)
+ svn('add', target)
+ svn('ps', 'PROP_add_' + content + '_' + kind,
+ 'content_add_' + content + '_' + kind,
+ target)
+
+
+
+def cp(ctx, suffix, target, kind, content=None):
+ if not content:
+ content = 'modified ' + os.path.basename(target)
+
+ if kind == f:
+ src = ctx.url('file' + suffix)
+ elif kind == l:
+ src = ctx.url('symlink' + suffix)
+ else:
+ src = ctx.url('dir' + suffix)
+
+ src = src + '@1'
+ svn('copy', src, target)
+ svn('ps', 'PROP_copied_' + content + '_' + kind + suffix,
+ 'content_' + content + '_' + kind + suffix,
+ target)
+ svn('status', target)
+
+
+def cp1(ctx, target, kind, content=None):
+ return cp(ctx, '1', target, kind, content)
+
+def cp2(ctx, target, kind, content=None):
+ return cp(ctx, '2', target, kind, content)
+
+def prepare_cp(ctx, kind, suffix):
+ if kind == f:
+ target = ctx.wc('file' + suffix)
+ if not os.path.lexists(target):
+ rewrite_file(target, 'copy source ' + suffix)
+ svn('add', target)
+ svn('ps', 'PROP_copy_source_' + kind + suffix,
+ 'content_copy_source_' + kind + suffix,
+ target)
+ elif kind == l:
+ target = ctx.wc('symlink' + suffix)
+ if not os.path.lexists(target):
+ os.symlink('copy_source' + suffix, target)
+ svn('add', target)
+ svn('ps', 'PROP_copy_source_' + kind + suffix,
+ 'content_copy_source_' + kind + suffix,
+ target)
+ else:
+ target = ctx.wc('dir' + suffix)
+ svn('mkdir', target)
+ svn('ps', 'PROP_copy_source_' + kind + suffix,
+ 'content_copy_source_' + kind + suffix,
+ target)
+
+def postpare_cp(ctx, kind, suffix):
+ if kind == f:
+ target = ctx.wc('file' + suffix)
+ rewrite_file(target, 'local mod on copy source ' + suffix)
+ svn('ps', 'PROP_local_copy_src_mod_' + kind + suffix,
+ 'content_local_copy_src_mod_' + kind + suffix,
+ target)
+ elif kind == l:
+ target = ctx.wc('symlink' + suffix)
+ svn('ps', 'PROP_local_copy_src_mod_' + kind + suffix,
+ 'content_local_copy_src_mod_' + kind + suffix,
+ target)
+ else:
+ target = ctx.wc('dir' + suffix)
+ svn('ps', 'PROP_local_copy_src_mod_' + kind + suffix,
+ 'content_local_copy_src_mod_' + kind + suffix,
+ target)
+
+
+def prepare(ctx, action, kind):
+ if action == cp1:
+ prepare_cp(ctx, kind, '1')
+ elif action == cp2:
+ prepare_cp(ctx, kind, '2')
+
+
+def postpare(ctx, action, kind):
+ if action == cp1:
+ postpare_cp(ctx, kind, '1')
+ elif action == cp2:
+ postpare_cp(ctx, kind, '2')
+
+
+
+def co(name, local_action, local_kind, incoming_action, incoming_kind):
+ ctx = TestContext()
+
+ prepare(ctx, local_action, local_kind)
+ prepare(ctx, incoming_action, incoming_kind)
+
+ svn('commit', '-mm', ctx.WC)
+ svn('up', ctx.WC)
+
+ head = ctx.head()
+ print(head)
+
+ ctx.create_wc2()
+ target = ctx.wc2(name)
+ incoming_action(ctx, target, incoming_kind, 'incoming')
+ svn('commit', '-mm', ctx.WC2)
+
+ target = ctx.wc(name)
+ local_action(ctx, target, local_kind, 'local')
+
+ postpare(ctx, local_action, local_kind)
+ postpare(ctx, incoming_action, incoming_kind)
+
+ # get conflicts
+ o1,e1 = shell('yes p | svn checkout "' + ctx.URL + '" ' +
+ '"' + ctx.WC + '"')
+ o2,e2 = svn('status', ctx.WC)
+ o3,e3 = run_cmd(['sqlite3', ctx.wc('.svn', 'wc.db'),
+ 'select local_relpath,properties from base_node; '
+ +'select local_relpath,properties from working_node; '
+ +'select local_relpath,properties from actual_node; '
+ ])
+ return o1, e1, o2, e2, o3, e3
+
+
+def up(name, local_action, local_kind, incoming_action, incoming_kind):
+ ctx = TestContext()
+
+ prepare(ctx, local_action, local_kind)
+ prepare(ctx, incoming_action, incoming_kind)
+
+ svn('commit', '-mm', ctx.WC)
+ svn('up', ctx.WC)
+
+ head = ctx.head()
+ print(head)
+
+ target = ctx.wc(name)
+ incoming_action(ctx, target, incoming_kind, 'incoming')
+ svn('commit', '-mm', ctx.WC)
+
+ # time warp
+ svn('update', '-r', str(head), ctx.WC)
+
+ local_action(ctx, target, local_kind, 'local')
+
+ postpare(ctx, local_action, local_kind)
+ postpare(ctx, incoming_action, incoming_kind)
+
+ # get conflicts
+ o1,e1 = svn('update', '--accept=postpone', ctx.WC)
+ o2,e2 = svn('status', ctx.WC)
+ o3,e3 = run_cmd(['sqlite3', ctx.wc('.svn', 'wc.db'),
+ 'select local_relpath,properties from base_node; '
+ +'select local_relpath,properties from working_node; '
+ +'select local_relpath,properties from actual_node; '
+ ])
+ return o1, e1, o2, e2, o3, e3
+
+
+def sw(name, local_action, local_kind, incoming_action, incoming_kind):
+ ctx = TestContext()
+ prepare(ctx, local_action, local_kind)
+ prepare(ctx, incoming_action, incoming_kind)
+ svn('commit', '-mm', ctx.WC)
+
+ svn('mkdir', '-mm', ctx.url('trunk'))
+ svn('copy', '-mm', ctx.url('trunk'), ctx.url('branch'))
+
+ svn('up', ctx.WC)
+
+ target = ctx.wc('branch', name)
+ incoming_action(ctx, target, incoming_kind, 'incoming')
+ svn('commit', '-mm', ctx.WC)
+ svn('up', ctx.WC)
+
+ target = ctx.wc('trunk', name)
+ local_action(ctx, target, local_kind, 'local')
+
+ postpare(ctx, local_action, local_kind)
+ postpare(ctx, incoming_action, incoming_kind)
+
+ # get conflicts
+ o1,e1 = svn('switch', '--accept=postpone', ctx.url('branch'), ctx.wc('trunk'))
+ o2,e2 = svn('status', ctx.WC)
+ o3,e3 = run_cmd(['sqlite3', ctx.wc('trunk', '.svn', 'wc.db'),
+ 'select local_relpath,properties from base_node; select'
+ + ' local_relpath,properties from working_node;'])
+ # This is a bit stupid. Someone rewire this.
+ return o1, e1, o2, e2, o3, e3
+
+
+# This controls which tests are run. All possible combinations are tested.
+# The elements are functions for up,sw and add,cp1,cp2,unver, and they are
+# simple strings for f (file), l (symlink), d (directory).
+#
+# cmd local action and kind incoming action and kind
+p = product((co,up,sw), (add,cp1,unver), (f,l,d), (add,cp2,cp1), (f,l,d))
+
+# Incoming cp1 is meant to match up only with local cp1. Also, cp1-cp1 is
+# supposed to perform identical copies in both incoming and local, so they
+# only make sense with matching kinds. Skip all rows that don't match this:
+skip = lambda row: (row[3] == cp1 and (row[4] != row[2] or row[1] != cp1)
+ #Select subsets if desired
+ #or (row[0] != up or row[3] not in [cp1, cp2])
+ #or (row[2] != l and row[4] != l)
+ #
+ #or row not in (
+ ##[up, cp1, l, add, l],
+ ##[up, cp1, f, cp2, l],
+ #[up, cp1, f, cp2, f],
+ ##[up, cp1, f, add, f],
+ ##[up, cp1, f, add, l],
+ ##[up, add, l, add, l],
+ #)
+ )
+
+
+
+
+def nameof(thing):
+ if isinstance(thing, FunctionType):
+ return thing.__name__
+ if isinstance(thing, ListType) or isinstance(thing, TupleType):
+ return '_'.join([ nameof(thang) for thang in thing])
+ return str(thing)
+
+
+def analyze(name, outs):
+ stats = []
+ for o in outs:
+ for line in o.split('\n'):
+ if (line.startswith('svn: ') or line.startswith(' > ')
+ or line.find(name) > -1):
+ stats.append(line)
+ return stats
+
+#os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
+
+results = []
+name = None
+try:
+ # there is probably a better way than this:
+ for row in list(p):
+ if skip(row):
+ continue
+ name = nameof(row)
+ print(name)
+ test_func = row[0]
+ results.append( (name, analyze( name, test_func( name, *row[1:] ) )) )
+except:
+ if name:
+ print('Error during', name)
+ raise
+finally:
+ lines = []
+ for result in results:
+ name = result[0]
+ if result[1]:
+ lines.append('----- ' + name)
+ for stat in result[1]:
+ lines.append(stat)
+ else:
+ lines.append('----- ' + name + ': nothing.')
+ dump = '\n'.join(lines)
+ print(dump)
+ rewrite_file('tree-conflicts-add-vs-add.py.results', dump)
diff --git a/subversion/tests/svn_test.h b/subversion/tests/svn_test.h
new file mode 100644
index 0000000..29724b3
--- /dev/null
+++ b/subversion/tests/svn_test.h
@@ -0,0 +1,387 @@
+/*
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifndef SVN_TEST_H
+#define SVN_TEST_H
+
+#ifndef SVN_ENABLE_DEPRECATION_WARNINGS_IN_TESTS
+#undef SVN_DEPRECATED
+#define SVN_DEPRECATED
+#endif /* ! SVN_ENABLE_DEPRECATION_WARNINGS_IN_TESTS */
+
+#include <stdio.h>
+
+#include <apr_pools.h>
+
+#include "svn_delta.h"
+#include "svn_path.h"
+#include "svn_types.h"
+#include "svn_error.h"
+#include "svn_string.h"
+#include "svn_auth.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/** Handy macro to test a condition, returning SVN_ERR_TEST_FAILED if FALSE
+ *
+ * This macro should be used in place of SVN_ERR_ASSERT() since we don't
+ * want to core-dump the test.
+ */
+#define SVN_TEST_ASSERT(expr) \
+ do { \
+ if (!(expr)) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "assertion '%s' failed at %s:%d", \
+ #expr, __FILE__, __LINE__); \
+ } while (0)
+
+/**
+ * Macro for testing assumptions when the context does not allow
+ * returning an svn_error_t*.
+ *
+ * Will write to stderr and cause a segfault if EXPR is false.
+ */
+#define SVN_TEST_ASSERT_NO_RETURN(expr) \
+ do { \
+ if (!(expr)) \
+ { \
+ unsigned int z_e_r_o_p_a_g_e__; \
+ fprintf(stderr, "TEST ASSERTION FAILED: %s\n", #expr); \
+ z_e_r_o_p_a_g_e__ = *(volatile unsigned int*)0; \
+ *(volatile unsigned int*)0 = z_e_r_o_p_a_g_e__; \
+ } \
+ } while (0)
+
+/** Handy macro for testing an expected svn_error_t return value.
+ * EXPECTED must be a real error (neither SVN_NO_ERROR nor APR_SUCCESS).
+ * The error returned by EXPR will be cleared.
+ */
+#define SVN_TEST_ASSERT_ERROR(expr, expected) \
+ do { \
+ svn_error_t *err__ = (expr); \
+ SVN_ERR_ASSERT((expected)); \
+ if (err__ == SVN_NO_ERROR || err__->apr_err != (expected)) \
+ return err__ ? svn_error_createf(SVN_ERR_TEST_FAILED, err__, \
+ "Expected error %s but got %s", \
+ svn_error_symbolic_name(expected), \
+ svn_error_symbolic_name( \
+ err__->apr_err)) \
+ : svn_error_createf(SVN_ERR_TEST_FAILED, err__, \
+ "Expected error %s but got %s", \
+ svn_error_symbolic_name(expected), \
+ "SVN_NO_ERROR"); \
+ svn_error_clear(err__); \
+ } while (0)
+
+/** Handy macro for testing that an svn_error_t is returned.
+ * The result must be neither SVN_NO_ERROR nor SVN_ERR_ASSERTION_FAIL.
+ * The error returned by EXPR will be cleared.
+ */
+#define SVN_TEST_ASSERT_ANY_ERROR(expr) \
+ do { \
+ svn_error_t *err__ = (expr); \
+ if (err__ == SVN_NO_ERROR || err__->apr_err == SVN_ERR_ASSERTION_FAIL)\
+ return err__ ? svn_error_createf(SVN_ERR_TEST_FAILED, err__, \
+ "Expected error but got %s", \
+ "SVN_ERR_ASSERTION_FAIL") \
+ : svn_error_createf(SVN_ERR_TEST_FAILED, err__, \
+ "Expected error but got %s", \
+ "SVN_NO_ERROR"); \
+ svn_error_clear(err__); \
+ } while (0)
+
+/** Handy macro for testing string equality.
+ *
+ * EXPR and/or EXPECTED_EXPR may be NULL which compares equal to NULL and
+ * not equal to any non-NULL string.
+ */
+#define SVN_TEST_STRING_ASSERT(expr, expected_expr) \
+ do { \
+ const char *tst_str1 = (expr); \
+ const char *tst_str2 = (expected_expr); \
+ \
+ if (tst_str2 == NULL && tst_str1 == NULL) \
+ break; \
+ if ((tst_str1 == NULL) || (tst_str2 == NULL) \
+ || (strcmp(tst_str2, tst_str1) != 0)) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "Strings not equal\n Expected: '%s'\n Found: '%s'" \
+ "\n at %s:%d", \
+ tst_str2, tst_str1, __FILE__, __LINE__); \
+ } while(0)
+
+ /** Handy macro for testing integer equality.
+ */
+#define SVN_TEST_INT_ASSERT(expr, expected_expr) \
+ do { \
+ apr_int64_t tst_int1 = (expr); \
+ apr_int64_t tst_int2 = (expected_expr); \
+ \
+ if (tst_int1 != tst_int2) \
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \
+ "Integers not equal\n" \
+ " Expected: %" APR_INT64_T_FMT "\n" \
+ " Found: %" APR_INT64_T_FMT "\n" \
+ " at %s:%d", \
+ tst_int2, tst_int1, __FILE__, __LINE__); \
+ } while(0)
+
+
+/* Baton for any arguments that need to be passed from main() to svn
+ * test functions.
+ */
+typedef struct svn_test_opts_t
+{
+ /* The name of the application (to generate unique names) */
+ const char *prog_name;
+ /* Description of the fs backend that should be used for testing. */
+ const char *fs_type;
+ /* Config file. */
+ const char *config_file;
+ /* Source dir. */
+ const char *srcdir;
+ /* Repository dir: temporary directory to create repositories in as subdir */
+ const char *repos_dir;
+ /* Repository url: The url to access REPOS_DIR as */
+ const char *repos_url;
+ /* Memcached server. */
+ const char *memcached_server;
+ /* Repository template: pre-created repository to copy for tests */
+ const char *repos_template;
+ /* Minor version to use for servers and FS backends, or zero to use
+ the current latest version. */
+ int server_minor_version;
+ svn_boolean_t verbose;
+ /* Add future "arguments" here. */
+} svn_test_opts_t;
+
+/* Prototype for test driver functions. */
+typedef svn_error_t* (*svn_test_driver2_t)(apr_pool_t *pool);
+
+/* Prototype for test driver functions which need options. */
+typedef svn_error_t* (*svn_test_driver_opts_t)(const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+/* Prototype for test predicate functions. */
+typedef svn_boolean_t (*svn_test_predicate_func_t)(const svn_test_opts_t *opts,
+ const char *predicate_value,
+ apr_pool_t *pool);
+
+/* Test modes. */
+enum svn_test_mode_t
+ {
+ svn_test_pass,
+ svn_test_xfail,
+ svn_test_skip,
+ svn_test_all
+ };
+
+/* Structure for runtime test predicates. */
+struct svn_test_predicate_t
+{
+ /* The predicate function. */
+ svn_test_predicate_func_t func;
+
+ /* The value that the predicate function tests. */
+ const char *value;
+
+ /* The test mode that's used if the predicate matches. */
+ enum svn_test_mode_t alternate_mode;
+
+ /* Description for the test log */
+ const char *description;
+};
+
+
+/* Each test gets a test descriptor, holding the function and other
+ * associated data.
+ */
+struct svn_test_descriptor_t
+{
+ /* Is the test marked XFAIL? */
+ enum svn_test_mode_t mode;
+
+ /* A pointer to the test driver function. */
+ svn_test_driver2_t func2;
+
+ /* A pointer to the test driver function. */
+ svn_test_driver_opts_t func_opts;
+
+ /* A descriptive message for this test. */
+ const char *msg;
+
+ /* An optional description of a work-in-progress test. */
+ const char *wip;
+
+ /* An optional runtiume predicate. */
+ struct svn_test_predicate_t predicate;
+};
+
+/* All Subversion test programs include an array of svn_test_descriptor_t's
+ * (all of our sub-tests) that begins and ends with a SVN_TEST_NULL entry.
+ * This descriptor must be passed to the svn_test_main function.
+ *
+ * MAX_THREADS is the number of concurrent tests to run. Set to 1 if
+ * all tests must be executed serially. Numbers less than 1 mean
+ * "unbounded".
+ */
+int svn_test_main(int argc, const char *argv[], int max_threads,
+ struct svn_test_descriptor_t *test_funcs);
+
+/* Boilerplate for the main function for each test program. */
+#define SVN_TEST_MAIN \
+ int main(int argc, const char *argv[]) \
+ { \
+ return svn_test_main(argc, argv, \
+ max_threads, test_funcs); \
+ }
+
+/* A null initializer for the test descriptor. */
+#define SVN_TEST_NULL {0}
+
+/* Initializer for PASS tests */
+#define SVN_TEST_PASS2(func, msg) {svn_test_pass, func, NULL, msg}
+
+/* Initializer for XFAIL tests */
+#define SVN_TEST_XFAIL2(func, msg) {svn_test_xfail, func, NULL, msg}
+
+/* Initializer for conditional XFAIL tests */
+#define SVN_TEST_XFAIL_COND2(func, p, msg) \
+ {(p) ? svn_test_xfail : svn_test_pass, func, NULL, msg}
+
+/* Initializer for SKIP tests */
+#define SVN_TEST_SKIP2(func, p, msg) \
+ {(p) ? svn_test_skip : svn_test_pass, func, NULL, msg}
+
+/* Similar macros, but for tests needing options. */
+#define SVN_TEST_OPTS_PASS(func, msg) {svn_test_pass, NULL, func, msg}
+#define SVN_TEST_OPTS_XFAIL(func, msg) {svn_test_xfail, NULL, func, msg}
+#define SVN_TEST_OPTS_XFAIL_COND(func, p, msg) \
+ {(p) ? svn_test_xfail : svn_test_pass, NULL, func, msg}
+#define SVN_TEST_OPTS_XFAIL_OTOH(func, msg, predicate) \
+ {svn_test_xfail, NULL, func, msg, NULL, predicate}
+#define SVN_TEST_OPTS_SKIP(func, p, msg) \
+ {(p) ? svn_test_skip : svn_test_pass, NULL, func, msg}
+
+/* Initializer for XFAIL tests for works-in-progress. */
+#define SVN_TEST_WIMP(func, msg, wip) \
+ {svn_test_xfail, func, NULL, msg, wip}
+#define SVN_TEST_WIMP_COND(func, p, msg, wip) \
+ {(p) ? svn_test_xfail : svn_test_pass, func, NULL, msg, wip}
+#define SVN_TEST_OPTS_WIMP(func, msg, wip) \
+ {svn_test_xfail, NULL, func, msg, wip}
+#define SVN_TEST_OPTS_WIMP_COND(func, p, msg, wip) \
+ {(p) ? svn_test_xfail : svn_test_pass, NULL, func, msg, wip}
+
+
+/* Return a pseudo-random number based on SEED, and modify SEED.
+ *
+ * This is a "good" pseudo-random number generator, intended to replace
+ * all those "bad" rand() implementations out there.
+ */
+apr_uint32_t svn_test_rand(apr_uint32_t *seed);
+
+
+/* Add PATH to the test cleanup list. */
+void svn_test_add_dir_cleanup(const char *path);
+
+
+/* A simple representation for a tree node. */
+typedef struct svn_test__tree_entry_t
+{
+ const char *path; /* relpath of this node */
+ const char *contents; /* text contents, or NULL for a directory */
+}
+svn_test__tree_entry_t;
+
+/* Wrapper for an array of svn_test__tree_entry_t's. */
+typedef struct svn_test__tree_t
+{
+ svn_test__tree_entry_t *entries;
+ int num_entries;
+}
+svn_test__tree_t;
+
+
+/* The standard Greek tree, terminated by a node with path=NULL. */
+extern const svn_test__tree_entry_t svn_test__greek_tree_nodes[21];
+
+
+/* Returns a path to BASENAME within the transient data area for the
+ current test. */
+const char *
+svn_test_data_path(const char* basename, apr_pool_t *result_pool);
+
+
+/* Some tests require the --srcdir option and should use this function
+ * to get it. If not provided, print a warning and attempt to run the
+ * tests under the assumption that --srcdir is the current directory. */
+svn_error_t *
+svn_test_get_srcdir(const char **srcdir,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+/* Initializes a standard auth baton for accessing the repositories */
+svn_error_t *
+svn_test__init_auth_baton(svn_auth_baton_t **baton,
+ apr_pool_t *result_pool);
+
+/* Create a temp folder for test & schedule it for automatic cleanup.
+ * Uses POOL for all allocations. */
+svn_error_t *
+svn_test_make_sandbox_dir(const char **sb_dir_p,
+ const char *sb_name,
+ apr_pool_t *pool);
+
+/*
+ * Test predicates
+ */
+
+#define SVN_TEST_PASS_IF_FS_TYPE_IS(fs_type) \
+ { svn_test__fs_type_is, fs_type, svn_test_pass, \
+ "PASS if fs-type = " fs_type }
+
+#define SVN_TEST_PASS_IF_FS_TYPE_IS_NOT(fs_type) \
+ { svn_test__fs_type_not, fs_type, svn_test_pass, \
+ "PASS if fs-type != " fs_type }
+
+/* Return TRUE if the fs-type in OPTS matches PREDICATE_VALUE. */
+svn_boolean_t
+svn_test__fs_type_is(const svn_test_opts_t *opts,
+ const char *predicate_value,
+ apr_pool_t *pool);
+
+
+/* Return TRUE if the fs-type in OPTS does not matches PREDICATE_VALUE. */
+svn_boolean_t
+svn_test__fs_type_not(const svn_test_opts_t *opts,
+ const char *predicate_value,
+ apr_pool_t *pool);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVN_TEST_H */
diff --git a/subversion/tests/svn_test_fs.c b/subversion/tests/svn_test_fs.c
new file mode 100644
index 0000000..22f0009
--- /dev/null
+++ b/subversion/tests/svn_test_fs.c
@@ -0,0 +1,972 @@
+/* svn_test_fs.c --- test helpers for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <apr_pools.h>
+
+#include "svn_test.h"
+
+#include "svn_string.h"
+#include "svn_utf.h"
+#include "svn_pools.h"
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_path.h"
+#include "svn_delta.h"
+#include "svn_hash.h"
+
+#include "svn_test_fs.h"
+
+
+/*-------------------------------------------------------------------*/
+
+/** Helper routines. **/
+
+
+static void
+fs_warning_handler(void *baton, svn_error_t *err)
+{
+ svn_handle_warning(stderr, err);
+}
+
+/* This is used only by bdb fs tests. */
+svn_error_t *
+svn_test__fs_new(svn_fs_t **fs_p, apr_pool_t *pool)
+{
+ apr_hash_t *fs_config = apr_hash_make(pool);
+ apr_hash_set(fs_config, SVN_FS_CONFIG_BDB_TXN_NOSYNC,
+ APR_HASH_KEY_STRING, "1");
+
+ *fs_p = svn_fs_new(fs_config, pool);
+ if (! *fs_p)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "Couldn't alloc a new fs object.");
+
+ /* Provide a warning function that just dumps the message to stderr. */
+ svn_fs_set_warning_func(*fs_p, fs_warning_handler, NULL);
+
+ return SVN_NO_ERROR;
+}
+
+
+static apr_hash_t *
+make_fs_config(const char *fs_type,
+ int server_minor_version,
+ apr_pool_t *pool)
+{
+ apr_hash_t *fs_config = apr_hash_make(pool);
+
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_BDB_TXN_NOSYNC, "1");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_BDB_LOG_AUTOREMOVE, "1");
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, fs_type);
+ if (server_minor_version)
+ {
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_COMPATIBLE_VERSION,
+ apr_psprintf(pool, "1.%d.0", server_minor_version));
+ if (server_minor_version == 6 || server_minor_version == 7)
+ svn_hash_sets(fs_config, SVN_FS_CONFIG_PRE_1_8_COMPATIBLE, "1");
+ else if (server_minor_version == 5)
+ apr_hash_set(fs_config, SVN_FS_CONFIG_PRE_1_6_COMPATIBLE,
+ APR_HASH_KEY_STRING, "1");
+ else if (server_minor_version == 4)
+ apr_hash_set(fs_config, SVN_FS_CONFIG_PRE_1_5_COMPATIBLE,
+ APR_HASH_KEY_STRING, "1");
+ else if (server_minor_version == 3)
+ apr_hash_set(fs_config, SVN_FS_CONFIG_PRE_1_4_COMPATIBLE,
+ APR_HASH_KEY_STRING, "1");
+ }
+ return fs_config;
+}
+
+
+static svn_error_t *
+create_fs(svn_fs_t **fs_p,
+ const char *name,
+ const char *fs_type,
+ int server_minor_version,
+ apr_hash_t *overlay_fs_config,
+ apr_pool_t *pool)
+{
+ apr_hash_t *fs_config = make_fs_config(fs_type, server_minor_version, pool);
+
+ if (overlay_fs_config)
+ fs_config = apr_hash_overlay(pool, overlay_fs_config, fs_config);
+
+ /* If there's already a repository named NAME, delete it. Doing
+ things this way means that repositories stick around after a
+ failure for postmortem analysis, but also that tests can be
+ re-run without cleaning out the repositories created by prior
+ runs. */
+ SVN_ERR(svn_io_remove_dir2(name, TRUE, NULL, NULL, pool));
+
+ SVN_ERR(svn_fs_create2(fs_p, name, fs_config, pool, pool));
+ if (! *fs_p)
+ return svn_error_create(SVN_ERR_FS_GENERAL, NULL,
+ "Couldn't alloc a new fs object.");
+
+ /* Provide a warning function that just dumps the message to stderr. */
+ svn_fs_set_warning_func(*fs_p, fs_warning_handler, NULL);
+
+ /* Register this fs for cleanup. */
+ svn_test_add_dir_cleanup(name);
+
+ return SVN_NO_ERROR;
+}
+
+/* If OPTS specifies a filesystem type of 'fsfs' and provides a config file,
+ * copy that file into the filesystem FS and set *MUST_REOPEN to TRUE, else
+ * set *MUST_REOPEN to FALSE. */
+static svn_error_t *
+maybe_install_fs_conf(svn_fs_t *fs,
+ const svn_test_opts_t *opts,
+ svn_boolean_t *must_reopen,
+ apr_pool_t *pool)
+{
+ *must_reopen = FALSE;
+ if (! opts->config_file)
+ return SVN_NO_ERROR;
+
+ if (strcmp(opts->fs_type, "fsfs") == 0)
+ {
+ *must_reopen = TRUE;
+ return svn_io_copy_file(opts->config_file,
+ svn_path_join(svn_fs_path(fs, pool),
+ "fsfs.conf", pool),
+ FALSE /* copy_perms */,
+ pool);
+ }
+
+ if (strcmp(opts->fs_type, "fsx") == 0)
+ {
+ *must_reopen = TRUE;
+ return svn_io_copy_file(opts->config_file,
+ svn_path_join(svn_fs_path(fs, pool),
+ "fsx.conf", pool),
+ FALSE /* copy_perms */,
+ pool);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+svn_error_t *
+svn_test__create_bdb_fs(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return create_fs(fs_p, name, "bdb", opts->server_minor_version, NULL, pool);
+}
+
+
+svn_error_t *
+svn_test__create_fs2(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_hash_t *fs_config,
+ apr_pool_t *pool)
+{
+ svn_boolean_t must_reopen;
+
+ SVN_ERR(create_fs(fs_p, name, opts->fs_type, opts->server_minor_version,
+ fs_config, pool));
+
+ SVN_ERR(maybe_install_fs_conf(*fs_p, opts, &must_reopen, pool));
+ if (must_reopen)
+ {
+ SVN_ERR(svn_fs_open2(fs_p, name, fs_config, pool, pool));
+ svn_fs_set_warning_func(*fs_p, fs_warning_handler, NULL);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__create_fs(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return svn_test__create_fs2(fs_p, name, opts, NULL, pool);
+}
+
+svn_error_t *
+svn_test__create_repos2(svn_repos_t **repos_p,
+ const char **repos_url,
+ const char **repos_dirent,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_repos_t *repos;
+ svn_boolean_t must_reopen;
+ const char *repos_abspath;
+ apr_pool_t *repos_pool = repos_p ? result_pool : scratch_pool;
+ svn_boolean_t init_svnserve = FALSE;
+ apr_hash_t *fs_config = make_fs_config(opts->fs_type,
+ opts->server_minor_version,
+ repos_pool);
+
+ if (repos_url && opts->repos_dir && opts->repos_url)
+ {
+ name = apr_psprintf(scratch_pool, "%s-%s", opts->prog_name,
+ svn_dirent_basename(name, NULL));
+
+ repos_abspath = svn_dirent_join(opts->repos_dir, name, scratch_pool);
+
+ SVN_ERR(svn_dirent_get_absolute(&repos_abspath, repos_abspath,
+ scratch_pool));
+
+ SVN_ERR(svn_io_make_dir_recursively(repos_abspath, scratch_pool));
+
+ *repos_url = svn_path_url_add_component2(opts->repos_url, name,
+ result_pool);
+
+ if (strstr(opts->repos_url, "svn://"))
+ init_svnserve = TRUE;
+ }
+ else
+ {
+ SVN_ERR(svn_dirent_get_absolute(&repos_abspath, name, scratch_pool));
+
+ if (repos_url)
+ SVN_ERR(svn_uri_get_file_url_from_dirent(repos_url, repos_abspath,
+ result_pool));
+ }
+
+ /* If there's already a repository named NAME, delete it. Doing
+ things this way means that repositories stick around after a
+ failure for postmortem analysis, but also that tests can be
+ re-run without cleaning out the repositories created by prior
+ runs. */
+ SVN_ERR(svn_io_remove_dir2(repos_abspath, TRUE, NULL, NULL, scratch_pool));
+
+ SVN_ERR(svn_repos_create(&repos, repos_abspath, NULL, NULL, NULL,
+ fs_config, repos_pool));
+
+ /* Register this repo for cleanup. */
+ svn_test_add_dir_cleanup(repos_abspath);
+
+ SVN_ERR(maybe_install_fs_conf(svn_repos_fs(repos), opts, &must_reopen,
+ scratch_pool));
+ if (must_reopen)
+ {
+ SVN_ERR(svn_repos_open3(&repos, repos_abspath, NULL, repos_pool,
+ scratch_pool));
+ }
+
+ svn_fs_set_warning_func(svn_repos_fs(repos), fs_warning_handler, NULL);
+
+ if (init_svnserve)
+ {
+ const char *cfg;
+ const char *pwd;
+
+ cfg = svn_dirent_join(repos_abspath, "conf/svnserve.conf", scratch_pool);
+ SVN_ERR(svn_io_remove_file2(cfg, FALSE, scratch_pool));
+ SVN_ERR(svn_io_file_create(cfg,
+ "[general]\n"
+ "auth-access = write\n"
+ "password-db = passwd\n",
+ scratch_pool));
+
+ pwd = svn_dirent_join(repos_abspath, "conf/passwd", scratch_pool);
+ SVN_ERR(svn_io_remove_file2(pwd, FALSE, scratch_pool));
+ SVN_ERR(svn_io_file_create(pwd,
+ "[users]\n"
+ "jrandom = rayjandom\n"
+ "jconstant = rayjandom\n",
+ scratch_pool));
+ }
+
+ if (repos_p)
+ *repos_p = repos;
+ if (repos_dirent)
+ *repos_dirent = apr_pstrdup(result_pool, repos_abspath);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__create_repos(svn_repos_t **repos_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ return svn_error_trace(
+ svn_test__create_repos2(repos_p, NULL, NULL, name,
+ opts, pool, pool));
+}
+
+svn_error_t *
+svn_test__stream_to_string(svn_stringbuf_t **string,
+ svn_stream_t *stream,
+ apr_pool_t *pool)
+{
+ char buf[10]; /* Making this really small because a) hey, they're
+ just tests, not the prime place to beg for
+ optimization, and b) we've had repository
+ problems in the past that only showed up when
+ reading a file into a buffer that couldn't hold the
+ file's whole contents -- the kind of thing you'd
+ like to catch while testing.
+
+ ### cmpilato todo: Perhaps some day this size can
+ be passed in as a parameter. Not high on my list
+ of priorities today, though. */
+
+ apr_size_t len;
+ svn_stringbuf_t *str = svn_stringbuf_create_empty(pool);
+
+ do
+ {
+ len = sizeof(buf);
+ SVN_ERR(svn_stream_read_full(stream, buf, &len));
+
+ /* Now copy however many bytes were *actually* read into str. */
+ svn_stringbuf_appendbytes(str, buf, len);
+
+ } while (len); /* Continue until we're told that no bytes were
+ read. */
+
+ *string = str;
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__set_file_contents(svn_fs_root_t *root,
+ const char *path,
+ const char *contents,
+ apr_pool_t *pool)
+{
+ svn_txdelta_window_handler_t consumer_func;
+ void *consumer_baton;
+ svn_string_t string;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ SVN_ERR(svn_fs_apply_textdelta(&consumer_func, &consumer_baton,
+ root, path, NULL, NULL, subpool));
+
+ string.data = contents;
+ string.len = strlen(contents);
+ SVN_ERR(svn_txdelta_send_string(&string, consumer_func,
+ consumer_baton, subpool));
+
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+svn_error_t *
+svn_test__get_file_contents(svn_fs_root_t *root,
+ const char *path,
+ svn_stringbuf_t **str,
+ apr_pool_t *pool)
+{
+ svn_stream_t *stream;
+
+ SVN_ERR(svn_fs_file_contents(&stream, root, path, pool));
+ SVN_ERR(svn_test__stream_to_string(str, stream, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Read all the entries in directory PATH under transaction or
+ revision root ROOT, copying their full paths into the TREE_ENTRIES
+ hash, and recursing when those entries are directories */
+static svn_error_t *
+get_dir_entries(apr_hash_t *tree_entries,
+ svn_fs_root_t *root,
+ const char *path,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *entries;
+ apr_hash_index_t *hi;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_pool_t *result_pool = apr_hash_pool_get(tree_entries);
+
+ SVN_ERR(svn_fs_dir_entries(&entries, root, path, scratch_pool));
+
+ /* Copy this list to the master list with the path prepended to the
+ names */
+ for (hi = apr_hash_first(scratch_pool, entries); hi; hi = apr_hash_next(hi))
+ {
+ void *val;
+ svn_fs_dirent_t *dirent;
+ const char *full_path;
+ svn_pool_clear(iterpool);
+
+ apr_hash_this(hi, NULL, NULL, &val);
+ dirent = val;
+
+ /* Calculate the full path of this entry (by appending the name
+ to the path thus far) */
+ full_path = svn_path_join(path, dirent->name, result_pool);
+
+ /* Now, copy this dirent to the master hash, but this time, use
+ the full path for the key */
+ apr_hash_set(tree_entries, full_path, APR_HASH_KEY_STRING, dirent);
+
+ /* If this entry is a directory, recurse into the tree. */
+ if (dirent->kind == svn_node_dir)
+ SVN_ERR(get_dir_entries(tree_entries, root, full_path, iterpool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Verify that PATH under ROOT is: a directory if contents is NULL;
+ a file with contents CONTENTS otherwise. */
+static svn_error_t *
+validate_tree_entry(svn_fs_root_t *root,
+ const char *path,
+ const char *contents,
+ apr_pool_t *pool)
+{
+ svn_stream_t *rstream;
+ svn_stringbuf_t *rstring;
+ svn_node_kind_t kind;
+ svn_boolean_t is_dir, is_file;
+
+ /* Verify that node types are reported consistently. */
+ SVN_ERR(svn_fs_check_path(&kind, root, path, pool));
+ SVN_ERR(svn_fs_is_dir(&is_dir, root, path, pool));
+ SVN_ERR(svn_fs_is_file(&is_file, root, path, pool));
+
+ SVN_TEST_ASSERT(!is_dir || kind == svn_node_dir);
+ SVN_TEST_ASSERT(!is_file || kind == svn_node_file);
+ SVN_TEST_ASSERT(is_dir || is_file);
+
+ /* Verify that this is the expected type of node */
+ if ((!is_dir && !contents) || (is_dir && contents))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "node '%s' in tree was of unexpected node type",
+ path);
+
+ /* Verify that the contents are as expected (files only) */
+ if (! is_dir)
+ {
+ svn_stringbuf_t *expected = svn_stringbuf_create(contents, pool);
+
+ /* File lengths. */
+ svn_filesize_t length;
+ SVN_ERR(svn_fs_file_length(&length, root, path, pool));
+ SVN_TEST_ASSERT(expected->len == length);
+
+ /* Text contents. */
+ SVN_ERR(svn_fs_file_contents(&rstream, root, path, pool));
+ SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool));
+ if (! svn_stringbuf_compare(rstring, expected))
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "node '%s' in tree had unexpected contents",
+ path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+
+/* Given a transaction or revision root (ROOT), check to see if the
+ tree that grows from that root has all the path entries, and only
+ those entries, passed in the array ENTRIES (which is an array of
+ NUM_ENTRIES tree_test_entry_t's) */
+svn_error_t *
+svn_test__validate_tree(svn_fs_root_t *root,
+ svn_test__tree_entry_t *entries,
+ int num_entries,
+ apr_pool_t *pool)
+{
+ apr_hash_t *tree_entries, *expected_entries;
+ apr_pool_t *subpool = svn_pool_create(pool);
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ svn_stringbuf_t *extra_entries = NULL;
+ svn_stringbuf_t *missing_entries = NULL;
+ svn_stringbuf_t *corrupt_entries = NULL;
+ apr_hash_index_t *hi;
+ int i;
+
+ /* There should be no entry with this name. */
+ const char *na_name = "es-vee-en";
+
+ /* Create our master hash for storing the entries */
+ tree_entries = apr_hash_make(subpool);
+
+ /* Recursively get the whole tree */
+ SVN_ERR(get_dir_entries(tree_entries, root, "", iterpool));
+ svn_pool_clear(iterpool);
+
+ /* Create a hash for storing our expected entries */
+ expected_entries = apr_hash_make(subpool);
+
+ /* Copy our array of expected entries into a hash. */
+ for (i = 0; i < num_entries; i++)
+ apr_hash_set(expected_entries, entries[i].path,
+ APR_HASH_KEY_STRING, &(entries[i]));
+
+ /* For each entry in our EXPECTED_ENTRIES hash, try to find that
+ entry in the TREE_ENTRIES hash given us by the FS. If we find
+ that object, remove it from the TREE_ENTRIES. If we don't find
+ it, there's a problem to report! */
+ for (hi = apr_hash_first(subpool, expected_entries);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const void *key;
+ apr_ssize_t keylen;
+ void *val;
+ svn_test__tree_entry_t *entry;
+
+ svn_pool_clear(iterpool);
+ apr_hash_this(hi, &key, &keylen, &val);
+ entry = val;
+
+ /* Verify that the entry exists in our full list of entries. */
+ val = apr_hash_get(tree_entries, key, keylen);
+ if (val)
+ {
+ svn_error_t *err;
+
+ if ((err = validate_tree_entry(root, entry->path,
+ entry->contents, iterpool)))
+ {
+ /* If we don't have a corrupt entries string, make one. */
+ if (! corrupt_entries)
+ corrupt_entries = svn_stringbuf_create_empty(subpool);
+
+ /* Append this entry name to the list of corrupt entries. */
+ svn_stringbuf_appendcstr(corrupt_entries, " ");
+ svn_stringbuf_appendbytes(corrupt_entries, (const char *)key,
+ keylen);
+ svn_stringbuf_appendcstr(corrupt_entries, "\n");
+ svn_error_clear(err);
+ }
+
+ apr_hash_set(tree_entries, key, keylen, NULL);
+ }
+ else
+ {
+ /* If we don't have a missing entries string, make one. */
+ if (! missing_entries)
+ missing_entries = svn_stringbuf_create_empty(subpool);
+
+ /* Append this entry name to the list of missing entries. */
+ svn_stringbuf_appendcstr(missing_entries, " ");
+ svn_stringbuf_appendbytes(missing_entries, (const char *)key,
+ keylen);
+ svn_stringbuf_appendcstr(missing_entries, "\n");
+ }
+ }
+
+ /* Any entries still left in TREE_ENTRIES are extra ones that are
+ not expected to be present. Assemble a string with their names. */
+ for (hi = apr_hash_first(subpool, tree_entries);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const void *key;
+ apr_ssize_t keylen;
+
+ apr_hash_this(hi, &key, &keylen, NULL);
+
+ /* If we don't have an extra entries string, make one. */
+ if (! extra_entries)
+ extra_entries = svn_stringbuf_create_empty(subpool);
+
+ /* Append this entry name to the list of missing entries. */
+ svn_stringbuf_appendcstr(extra_entries, " ");
+ svn_stringbuf_appendbytes(extra_entries, (const char *)key, keylen);
+ svn_stringbuf_appendcstr(extra_entries, "\n");
+ }
+
+ /* Test that non-existent paths will not be found.
+ * Skip this test if somebody sneakily added NA_NAME. */
+ if (!svn_hash_gets(expected_entries, na_name))
+ {
+ svn_node_kind_t kind;
+ svn_boolean_t is_dir, is_file;
+
+ /* Verify that the node is reported as "n/a". */
+ SVN_ERR(svn_fs_check_path(&kind, root, na_name, subpool));
+ SVN_ERR(svn_fs_is_dir(&is_dir, root, na_name, subpool));
+ SVN_ERR(svn_fs_is_file(&is_file, root, na_name, subpool));
+
+ SVN_TEST_ASSERT(kind == svn_node_none);
+ SVN_TEST_ASSERT(!is_file);
+ SVN_TEST_ASSERT(!is_dir);
+ }
+
+ if (missing_entries || extra_entries || corrupt_entries)
+ {
+ return svn_error_createf
+ (SVN_ERR_FS_GENERAL, NULL,
+ "Repository tree does not look as expected.\n"
+ "Corrupt entries:\n%s"
+ "Missing entries:\n%s"
+ "Extra entries:\n%s",
+ corrupt_entries ? corrupt_entries->data : "",
+ missing_entries ? missing_entries->data : "",
+ extra_entries ? extra_entries->data : "");
+ }
+
+ svn_pool_destroy(iterpool);
+ svn_pool_destroy(subpool);
+ return SVN_NO_ERROR;
+}
+
+
+svn_error_t *
+svn_test__validate_changes(svn_fs_root_t *root,
+ apr_hash_t *expected,
+ apr_pool_t *pool)
+{
+ svn_fs_path_change_iterator_t *iter;
+ apr_hash_t *actual;
+ apr_hash_index_t *hi;
+ svn_fs_path_change3_t *change;
+
+ SVN_ERR(svn_fs_paths_changed3(&iter, root, pool, pool));
+ SVN_ERR(svn_fs_path_change_get(&change, iter));
+
+ /* We collect all changes b/c this is the easiest way to check for an
+ exact match against EXPECTED. */
+ actual = apr_hash_make(pool);
+ while (change)
+ {
+ const char *path = apr_pstrmemdup(pool, change->path.data,
+ change->path.len);
+ /* No duplicates! */
+ SVN_TEST_ASSERT(!apr_hash_get(actual, path, change->path.len));
+ apr_hash_set(actual, path, change->path.len, path);
+
+ SVN_ERR(svn_fs_path_change_get(&change, iter));
+ }
+
+#if 0
+ /* Print ACTUAL and EXPECTED. */
+ {
+ int i;
+ for (i=0, hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi))
+ SVN_DBG(("expected[%d] = '%s'\n", i++, apr_hash_this_key(hi)));
+ for (i=0, hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi))
+ SVN_DBG(("actual[%d] = '%s'\n", i++, apr_hash_this_key(hi)));
+ }
+#endif
+
+ for (hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi))
+ if (NULL == svn_hash_gets(actual, apr_hash_this_key(hi)))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Path '%s' missing from actual changed-paths",
+ (const char *)apr_hash_this_key(hi));
+
+ for (hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi))
+ if (NULL == svn_hash_gets(expected, apr_hash_this_key(hi)))
+ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
+ "Path '%s' missing from expected changed-paths",
+ (const char *)apr_hash_this_key(hi));
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__txn_script_exec(svn_fs_root_t *txn_root,
+ svn_test__txn_script_command_t *script,
+ int num_edits,
+ apr_pool_t *pool)
+{
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Run through the list of edits, making the appropriate edit on
+ that entry in the TXN_ROOT. */
+ for (i = 0; i < num_edits; i++)
+ {
+ const char *path = script[i].path;
+ const char *param1 = script[i].param1;
+ int cmd = script[i].cmd;
+ svn_boolean_t is_dir = (param1 == 0);
+
+ svn_pool_clear(iterpool);
+ switch (cmd)
+ {
+ case 'a':
+ if (is_dir)
+ {
+ SVN_ERR(svn_fs_make_dir(txn_root, path, iterpool));
+ }
+ else
+ {
+ SVN_ERR(svn_fs_make_file(txn_root, path, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, path,
+ param1, iterpool));
+ }
+ break;
+
+ case 'c':
+ {
+ svn_revnum_t youngest;
+ svn_fs_root_t *rev_root;
+ svn_fs_t *fs = svn_fs_root_fs(txn_root);
+
+ SVN_ERR(svn_fs_youngest_rev(&youngest, fs, iterpool));
+ SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest, iterpool));
+ SVN_ERR(svn_fs_copy(rev_root, path, txn_root, param1, iterpool));
+ }
+ break;
+
+ case 'd':
+ SVN_ERR(svn_fs_delete(txn_root, path, iterpool));
+ break;
+
+ case 'e':
+ if (! is_dir)
+ {
+ SVN_ERR(svn_test__set_file_contents(txn_root, path,
+ param1, iterpool));
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+
+const struct svn_test__tree_entry_t svn_test__greek_tree_nodes[21] = {
+ { "iota", "This is the file 'iota'.\n" },
+ { "A", NULL },
+ { "A/mu", "This is the file 'mu'.\n" },
+ { "A/B", NULL },
+ { "A/B/lambda", "This is the file 'lambda'.\n" },
+ { "A/B/E", NULL },
+ { "A/B/E/alpha", "This is the file 'alpha'.\n" },
+ { "A/B/E/beta", "This is the file 'beta'.\n" },
+ { "A/B/F", NULL },
+ { "A/C", NULL },
+ { "A/D", NULL },
+ { "A/D/gamma", "This is the file 'gamma'.\n" },
+ { "A/D/G", NULL },
+ { "A/D/G/pi", "This is the file 'pi'.\n" },
+ { "A/D/G/rho", "This is the file 'rho'.\n" },
+ { "A/D/G/tau", "This is the file 'tau'.\n" },
+ { "A/D/H", NULL },
+ { "A/D/H/chi", "This is the file 'chi'.\n" },
+ { "A/D/H/psi", "This is the file 'psi'.\n" },
+ { "A/D/H/omega", "This is the file 'omega'.\n" },
+ { NULL, NULL },
+};
+
+svn_error_t *
+svn_test__check_greek_tree(svn_fs_root_t *root,
+ apr_pool_t *pool)
+{
+ svn_stream_t *rstream;
+ svn_stringbuf_t *rstring;
+ svn_stringbuf_t *content;
+ const struct svn_test__tree_entry_t *node;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* Loop through the list of files, checking for matching content. */
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ {
+ if (node->contents)
+ {
+ svn_pool_clear(iterpool);
+
+ SVN_ERR(svn_fs_file_contents(&rstream, root, node->path, iterpool));
+ SVN_ERR(svn_test__stream_to_string(&rstring, rstream, iterpool));
+ content = svn_stringbuf_create(node->contents, iterpool);
+ if (! svn_stringbuf_compare(rstring, content))
+ return svn_error_createf(SVN_ERR_FS_GENERAL, NULL,
+ "data read != data written in file '%s'.",
+ node->path);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__create_greek_tree_at(svn_fs_root_t *txn_root,
+ const char *root_dir,
+ apr_pool_t *pool)
+{
+ const struct svn_test__tree_entry_t *node;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ for (node = svn_test__greek_tree_nodes; node->path; node++)
+ {
+ const char *path;
+ svn_pool_clear(iterpool);
+
+ path = svn_relpath_join(root_dir, node->path, iterpool);
+
+ if (node->contents)
+ {
+ SVN_ERR(svn_fs_make_file(txn_root, path, iterpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, path, node->contents,
+ iterpool));
+ }
+ else
+ {
+ SVN_ERR(svn_fs_make_dir(txn_root, path, iterpool));
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__create_greek_tree(svn_fs_root_t *txn_root,
+ apr_pool_t *pool)
+{
+ return svn_test__create_greek_tree_at(txn_root, "", pool);
+}
+
+svn_error_t *
+svn_test__create_blame_repository(svn_repos_t **out_repos,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *txn_root, *revision_root;
+ svn_revnum_t youngest_rev = 0;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ /* Create a filesystem and repository. */
+ SVN_ERR(svn_test__create_repos(&repos, test_name,
+ opts, pool));
+ *out_repos = repos;
+
+ fs = svn_repos_fs(repos);
+
+ /* Revision 1: Add trunk, tags, branches. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "initial", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "trunk", subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "tags", subpool));
+ SVN_ERR(svn_fs_make_dir(txn_root, "branches", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 2: Add the Greek tree on the trunk. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "initial", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__create_greek_tree_at(txn_root, "trunk", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 3: Tweak trunk/A/mu. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "user-trunk", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "trunk/A/mu",
+ "A\nB\nC\nD\nE\nF\nG\nH\nI", subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 4: Copy trunk to branches/1.0.x. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "copy", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_fs_revision_root(&revision_root, fs, youngest_rev, subpool));
+ SVN_ERR(svn_fs_copy(revision_root, "trunk",
+ txn_root, "branches/1.0.x",
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 5: Tweak trunk/A/mu. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "user-trunk", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "trunk/A/mu",
+ "A\nB\nC -- trunk edit\nD\nE\nF\nG\nH\nI",
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 6: Tweak branches/1.0.x/A/mu. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "user-branch", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "branches/1.0.x/A/mu",
+ "A\nB\nC\nD -- branch edit\nE\nF\nG\nH\nI",
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 7: Merge trunk to branch. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "user-merge1", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "branches/1.0.x/A/mu",
+ "A\nB\nC -- trunk edit\nD -- branch edit"
+ "\nE\nF\nG\nH\nI", subpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "/branches/1.0.x", "svn:mergeinfo",
+ svn_string_create("/trunk:4-6", subpool),
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+ svn_pool_clear(subpool);
+
+ /* Revision 8: Merge branch to trunk. */
+ SVN_ERR(svn_repos_fs_begin_txn_for_commit(&txn, repos, youngest_rev,
+ "user-merge2", "log msg", subpool));
+ SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool));
+ SVN_ERR(svn_test__set_file_contents(txn_root, "trunk/A/mu",
+ "A\nB\nC -- trunk edit\nD -- branch edit\n"
+ "E\nF\nG\nH\nI", subpool));
+ SVN_ERR(svn_fs_change_node_prop(txn_root, "/trunk", "svn:mergeinfo",
+ svn_string_create("/branches/1.0.x:4-7", subpool),
+ subpool));
+ SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, subpool));
+ SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev));
+
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
diff --git a/subversion/tests/svn_test_fs.h b/subversion/tests/svn_test_fs.h
new file mode 100644
index 0000000..26cd26e
--- /dev/null
+++ b/subversion/tests/svn_test_fs.h
@@ -0,0 +1,203 @@
+/* svn_test_fs.h --- test helpers for the filesystem
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#ifndef SVN_TEST_FS_H
+#define SVN_TEST_FS_H
+
+#include <apr_pools.h>
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_repos.h"
+#include "svn_delta.h"
+#include "svn_test.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*-------------------------------------------------------------------*/
+
+/** Helper routines for filesystem functionality. **/
+
+
+/* Set *FS_P to a fresh, unopened FS object, with the right warning
+ handling function set. */
+svn_error_t *
+svn_test__fs_new(svn_fs_t **fs_p, apr_pool_t *pool);
+
+
+/* Creates a filesystem which is always of type "bdb" in a subdir NAME
+ and return a new FS object which points to it. (Ignores any
+ fs-type declaration in OPTS.) */
+svn_error_t *
+svn_test__create_bdb_fs(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+
+/* Create a filesystem based on OPTS in a subdir NAME and return a new
+ FS object which points to it. Override the default test filesystem
+ config with values from FS_CONFIG. */
+svn_error_t *
+svn_test__create_fs2(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_hash_t *fs_config,
+ apr_pool_t *pool);
+
+/* The same as svn_test__create_fs2() but with FS_CONFIG set to NULL. */
+svn_error_t *
+svn_test__create_fs(svn_fs_t **fs_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+
+/* Create a repository with a filesystem based on OPTS in a subdir NAME
+ and return a new REPOS object which points to it. */
+svn_error_t *
+svn_test__create_repos(svn_repos_t **repos_p,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+/* Create a repository with a filesystem based on OPTS in a subdir NAME
+ and return optionally new REPOS object, the directory it was created in
+ and/or the url of the repository . */
+svn_error_t *
+svn_test__create_repos2(svn_repos_t **repos_p,
+ const char **repos_url,
+ const char **repos_dirent,
+ const char *name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+
+/* Read all data from a generic read STREAM, and return it in STRING.
+ Allocate the svn_stringbuf_t in POOL. (All data in STRING will be
+ dup'ed from STREAM using POOL too.) */
+svn_error_t *
+svn_test__stream_to_string(svn_stringbuf_t **string,
+ svn_stream_t *stream,
+ apr_pool_t *pool);
+
+
+/* Set the contents of file in PATH under ROOT to CONTENTS. */
+svn_error_t *
+svn_test__set_file_contents(svn_fs_root_t *root,
+ const char *path,
+ const char *contents,
+ apr_pool_t *pool);
+
+
+/* Get the contents of file in PATH under ROOT, and copy them into
+ STR. */
+svn_error_t *
+svn_test__get_file_contents(svn_fs_root_t *root,
+ const char *path,
+ svn_stringbuf_t **str,
+ apr_pool_t *pool);
+
+
+
+/* The Helper Functions to End All Helper Functions */
+
+/* Given a transaction or revision root (ROOT), check to see if the
+ tree that grows from that root has all the path entries, and only
+ those entries, passed in the array ENTRIES (which is an array of
+ NUM_ENTRIES svn_test__tree_entry_t's). */
+svn_error_t *
+svn_test__validate_tree(svn_fs_root_t *root,
+ svn_test__tree_entry_t *entries,
+ int num_entries,
+ apr_pool_t *pool);
+
+/* Verify that svn_fs_paths_changed3(ROOT) returns a hash with exactly
+ the same keys as EXPECTED_KEYS. Values are not currently verified.
+ */
+svn_error_t *
+svn_test__validate_changes(svn_fs_root_t *root,
+ apr_hash_t *expected_keys,
+ apr_pool_t *pool);
+
+/* Structure for describing script-ish commands to perform on a
+ transaction using svn_test__txn_script_exec(). */
+typedef struct svn_test__txn_script_command_t
+{
+ /* command:
+
+ 'a' -- add (PARAM1 is file contents, or NULL for directories)
+ 'c' -- copy (PARAM1 is target path, copy source is youngest rev)
+ 'd' -- delete
+ 'e' -- edit (PARAM1 is new file contents)
+ */
+ int cmd;
+ const char *path; /* path to resource in the filesystem */
+ const char *param1; /* command parameter (see above) */
+}
+svn_test__txn_script_command_t;
+
+
+/* Execute a "script" SCRIPT on items under TXN_ROOT. */
+svn_error_t *
+svn_test__txn_script_exec(svn_fs_root_t *txn_root,
+ svn_test__txn_script_command_t *script,
+ int num_edits,
+ apr_pool_t *pool);
+
+/* Verify that the tree that exists under ROOT is exactly the Greek
+ Tree. */
+svn_error_t *
+svn_test__check_greek_tree(svn_fs_root_t *root,
+ apr_pool_t *pool);
+
+
+/* Create the Greek Tree under TXN_ROOT. See ./greek-tree.txt. */
+svn_error_t *
+svn_test__create_greek_tree(svn_fs_root_t *txn_root,
+ apr_pool_t *pool);
+
+/* Create the Greek Tree under TXN_ROOT at dir ROOT_DIR.
+ * ROOT_DIR should be created by the caller.
+ *
+ * Note: this function will not commit the transaction. */
+svn_error_t *
+svn_test__create_greek_tree_at(svn_fs_root_t *txn_root,
+ const char *root_dir,
+ apr_pool_t *pool);
+
+/* Create a new repository with a greek tree, trunk, branch and some
+ merges between them. */
+svn_error_t *
+svn_test__create_blame_repository(svn_repos_t **out_repos,
+ const char *test_name,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVN_TEST_FS_H */
diff --git a/subversion/tests/svn_test_main.c b/subversion/tests/svn_test_main.c
new file mode 100644
index 0000000..c3537d3
--- /dev/null
+++ b/subversion/tests/svn_test_main.c
@@ -0,0 +1,1125 @@
+/*
+ * svn_test_main.c: shared main() & friends for SVN test-suite programs
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+
+#include <string.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <setjmp.h>
+#ifdef WIN32
+#include <crtdbg.h>
+#endif
+
+#include <apr_pools.h>
+#include <apr_general.h>
+#include <apr_signal.h>
+#include <apr_env.h>
+
+#include "svn_cmdline.h"
+#include "svn_opt.h"
+#include "svn_pools.h"
+#include "svn_error.h"
+#include "svn_test.h"
+#include "svn_io.h"
+#include "svn_path.h"
+#include "svn_ctype.h"
+#include "svn_utf.h"
+#include "svn_version.h"
+
+#include "private/svn_cmdline_private.h"
+#include "private/svn_atomic.h"
+#include "private/svn_mutex.h"
+#include "private/svn_sqlite.h"
+
+#include "svn_private_config.h"
+
+#if APR_HAS_THREADS
+# include <apr_thread_proc.h>
+#endif
+
+/* Some Subversion test programs may want to parse options in the
+ argument list, so we remember it here. */
+extern int test_argc;
+extern const char **test_argv;
+int test_argc;
+const char **test_argv;
+
+/* Many tests write to disk. Instead of writing to the current
+ directory, they should use this path as the root of the test data
+ area. */
+static const char *data_path;
+
+/* Test option: Print more output */
+static svn_boolean_t verbose_mode = FALSE;
+
+/* Test option: Print only unexpected results */
+static svn_boolean_t quiet_mode = FALSE;
+
+/* Test option: Remove test directories after success */
+static svn_boolean_t cleanup_mode = FALSE;
+
+/* Test option: Allow segfaults */
+static svn_boolean_t allow_segfaults = FALSE;
+
+/* Test option: Limit testing to a given mode (i.e. XFail, Skip,
+ Pass, All). */
+static enum svn_test_mode_t mode_filter = svn_test_all;
+
+/* Test option: Allow concurrent execution of tests */
+static svn_boolean_t parallel = FALSE;
+
+/* Option parsing enums and structures */
+enum test_options_e {
+ help_opt = SVN_OPT_FIRST_LONGOPT_ID,
+ cleanup_opt,
+ fstype_opt,
+ list_opt,
+ verbose_opt,
+ quiet_opt,
+ config_opt,
+ server_minor_version_opt,
+ allow_segfault_opt,
+ srcdir_opt,
+ reposdir_opt,
+ reposurl_opt,
+ repostemplate_opt,
+ memcached_server_opt,
+ mode_filter_opt,
+ sqlite_log_opt,
+ parallel_opt,
+ fsfs_version_opt
+};
+
+static const apr_getopt_option_t cl_options[] =
+{
+ {"help", help_opt, 0,
+ N_("display this help")},
+ {"cleanup", cleanup_opt, 0,
+ N_("remove test directories after success")},
+ {"config-file", config_opt, 1,
+ N_("specify test config file ARG")},
+ {"fs-type", fstype_opt, 1,
+ N_("specify a filesystem backend type ARG")},
+ {"fsfs-version", fsfs_version_opt, 1,
+ N_("specify the FSFS version ARG")},
+ {"list", list_opt, 0,
+ N_("lists all the tests with their short description")},
+ {"mode-filter", mode_filter_opt, 1,
+ N_("only run/list tests with expected mode ARG = PASS, "
+ "XFAIL, SKIP, or ALL (default)")},
+ {"verbose", verbose_opt, 0,
+ N_("print extra information")},
+ {"server-minor-version", server_minor_version_opt, 1,
+ N_("set the minor version for the server ('3', '4', "
+ "'5', or '6')")},
+ {"quiet", quiet_opt, 0,
+ N_("print only unexpected results")},
+ {"allow-segfaults", allow_segfault_opt, 0,
+ N_("don't trap seg faults (useful for debugging)")},
+ {"srcdir", srcdir_opt, 1,
+ N_("directory which contains test's C source files")},
+ {"repos-dir", reposdir_opt, 1,
+ N_("directory to create repositories in")},
+ {"repos-url", reposurl_opt, 1,
+ N_("the url to access reposdir as")},
+ {"repos-template",repostemplate_opt, 1,
+ N_("the repository to use as template")},
+ {"memcached-server", memcached_server_opt, 1,
+ N_("the memcached server to use")},
+ {"sqlite-logging", sqlite_log_opt, 0,
+ N_("enable SQLite logging")},
+ {"parallel", parallel_opt, 0,
+ N_("allow concurrent execution of tests")},
+ {0, 0, 0, 0}
+};
+
+
+/* ================================================================= */
+/* Stuff for cleanup processing */
+
+/* When non-zero, don't remove test directories */
+static svn_boolean_t skip_cleanup = FALSE;
+
+/* All cleanup actions are registered as cleanups on the cleanup_pool,
+ * which may be thread-specific. */
+#if APR_HAS_THREADS
+/* The thread-local data key for the cleanup pool. */
+static apr_threadkey_t *cleanup_pool_key = NULL;
+
+/* No-op destructor for apr_threadkey_private_create(). */
+static void null_threadkey_dtor(void *stuff) {}
+
+/* Set the thread-specific cleanup pool. */
+static void set_cleanup_pool(apr_pool_t *pool)
+{
+ apr_status_t status = apr_threadkey_private_set(pool, cleanup_pool_key);
+ if (status)
+ {
+ printf("apr_threadkey_private_set() failed with code %ld.\n",
+ (long)status);
+ exit(1);
+ }
+}
+
+/* Get the thread-specific cleanup pool. */
+static apr_pool_t *get_cleanup_pool(void)
+{
+ void *data;
+ apr_status_t status = apr_threadkey_private_get(&data, cleanup_pool_key);
+ if (status)
+ {
+ printf("apr_threadkey_private_get() failed with code %ld.\n",
+ (long)status);
+ exit(1);
+ }
+ return data;
+}
+
+# define cleanup_pool (get_cleanup_pool())
+# define HAVE_PER_THREAD_CLEANUP
+#else
+static apr_pool_t *cleanup_pool = NULL;
+# define set_cleanup_pool(p) (cleanup_pool = (p))
+#endif
+
+/* Used by test_thread to serialize access to stdout. */
+static svn_mutex__t *log_mutex = NULL;
+
+static apr_status_t
+cleanup_rmtree(void *data)
+{
+ if (!skip_cleanup)
+ {
+ apr_pool_t *pool = svn_pool_create(NULL);
+ const char *path = data;
+
+ /* Ignore errors here. */
+ svn_error_t *err = svn_io_remove_dir2(path, FALSE, NULL, NULL, pool);
+ svn_error_clear(err);
+ if (verbose_mode)
+ {
+ if (err)
+ printf("FAILED CLEANUP: %s\n", path);
+ else
+ printf("CLEANUP: %s\n", path);
+ }
+ svn_pool_destroy(pool);
+ }
+ return APR_SUCCESS;
+}
+
+
+
+void
+svn_test_add_dir_cleanup(const char *path)
+{
+ if (cleanup_mode)
+ {
+ const char *abspath;
+ svn_error_t *err;
+
+ /* All cleanup functions use the *same* pool (not subpools of it).
+ Thus, we need to synchronize. */
+ err = svn_mutex__lock(log_mutex);
+ if (err)
+ {
+ if (verbose_mode)
+ printf("FAILED svn_mutex__lock in svn_test_add_dir_cleanup.\n");
+ svn_error_clear(err);
+ return;
+ }
+
+ err = svn_path_get_absolute(&abspath, path, cleanup_pool);
+ svn_error_clear(err);
+ if (!err)
+ apr_pool_cleanup_register(cleanup_pool, abspath, cleanup_rmtree,
+ apr_pool_cleanup_null);
+ else if (verbose_mode)
+ printf("FAILED ABSPATH: %s\n", path);
+
+ err = svn_mutex__unlock(log_mutex, NULL);
+ if (err)
+ {
+ if (verbose_mode)
+ printf("FAILED svn_mutex__unlock in svn_test_add_dir_cleanup.\n");
+ svn_error_clear(err);
+ }
+ }
+}
+
+
+/* ================================================================= */
+/* Quite a few tests use random numbers. */
+
+apr_uint32_t
+svn_test_rand(apr_uint32_t *seed)
+{
+ *seed = (*seed * 1103515245UL + 12345UL) & 0xffffffffUL;
+ return *seed;
+}
+
+
+/* ================================================================= */
+
+
+/* Determine the array size of test_funcs[], the inelegant way. :) */
+static int
+get_array_size(struct svn_test_descriptor_t *test_funcs)
+{
+ int i;
+
+ for (i = 1; test_funcs[i].func2 || test_funcs[i].func_opts; i++)
+ {
+ }
+
+ return (i - 1);
+}
+
+/* Buffer used for setjmp/longjmp. */
+static jmp_buf jump_buffer;
+
+/* Our SIGSEGV handler, which jumps back into do_test_num(), which see for
+ more information. */
+static void
+crash_handler(int signum)
+{
+ longjmp(jump_buffer, 1);
+}
+
+/* Write the result of test number TEST_NUM to stdout. Pretty-print test
+ name and dots according to our test-suite spec, and return TRUE if there
+ has been a test failure.
+
+ The parameters are basically the internal state of do_test_num() and
+ test_thread(). */
+/* */
+static svn_boolean_t
+log_results(const char *progname,
+ int test_num,
+ svn_boolean_t msg_only,
+ svn_boolean_t run_this_test,
+ svn_boolean_t skip,
+ svn_boolean_t xfail,
+ svn_boolean_t wimp,
+ svn_error_t *err,
+ const char *msg,
+ const struct svn_test_descriptor_t *desc)
+{
+ svn_boolean_t test_failed;
+
+ if (err && err->apr_err == SVN_ERR_TEST_SKIPPED)
+ {
+ svn_error_clear(err);
+ err = SVN_NO_ERROR;
+ skip = TRUE;
+ xfail = FALSE; /* Or all XFail tests reporting SKIP would be failing */
+ }
+
+ /* Failure means unexpected results -- FAIL or XPASS. */
+ test_failed = (!wimp && ((err != SVN_NO_ERROR) != (xfail != 0)));
+
+ /* If we got an error, print it out. */
+ if (err)
+ {
+ svn_handle_error2(err, stdout, FALSE, "svn_tests: ");
+ svn_error_clear(err);
+ }
+
+ if (msg_only)
+ {
+ const svn_boolean_t otoh = !!desc->predicate.description;
+
+ if (run_this_test)
+ printf(" %3d %-5s %s%s%s%s%s%s\n",
+ test_num,
+ (xfail ? "XFAIL" : (skip ? "SKIP" : "")),
+ msg ? msg : "(test did not provide name)",
+ (wimp && verbose_mode) ? " [[" : "",
+ (wimp && verbose_mode) ? desc->wip : "",
+ (wimp && verbose_mode) ? "]]" : "",
+ (otoh ? " / " : ""),
+ (otoh ? desc->predicate.description : ""));
+ }
+ else if (run_this_test && ((! quiet_mode) || test_failed))
+ {
+ printf("%s %s %d: %s%s%s%s\n",
+ (err
+ ? (xfail ? "XFAIL:" : "FAIL: ")
+ : (xfail ? "XPASS:" : (skip ? "SKIP: " : "PASS: "))),
+ progname,
+ test_num,
+ msg ? msg : "(test did not provide name)",
+ wimp ? " [[WIMP: " : "",
+ wimp ? desc->wip : "",
+ wimp ? "]]" : "");
+ }
+
+ if (msg)
+ {
+ size_t len = strlen(msg);
+ if (len > 50)
+ printf("WARNING: Test docstring exceeds 50 characters\n");
+ if (msg[len - 1] == '.')
+ printf("WARNING: Test docstring ends in a period (.)\n");
+ if (svn_ctype_isupper(msg[0]))
+ printf("WARNING: Test docstring is capitalized\n");
+ }
+ if (desc->msg == NULL)
+ printf("WARNING: New-style test descriptor is missing a docstring.\n");
+
+ fflush(stdout);
+
+ return test_failed;
+}
+
+/* Execute a test number TEST_NUM. Pretty-print test name and dots
+ according to our test-suite spec, and return the result code.
+ If HEADER_MSG and *HEADER_MSG are not NULL, print *HEADER_MSG prior
+ to pretty-printing the test information, then set *HEADER_MSG to NULL. */
+static svn_boolean_t
+do_test_num(const char *progname,
+ int test_num,
+ struct svn_test_descriptor_t *test_funcs,
+ svn_boolean_t msg_only,
+ svn_test_opts_t *opts,
+ const char **header_msg,
+ apr_pool_t *pool)
+{
+ svn_boolean_t skip, xfail, wimp;
+ svn_error_t *err;
+ const char *msg = NULL; /* the message this individual test prints out */
+ const struct svn_test_descriptor_t *desc;
+ const int array_size = get_array_size(test_funcs);
+ svn_boolean_t run_this_test; /* This test's mode matches DESC->MODE. */
+ enum svn_test_mode_t test_mode;
+ volatile int adjusted_num = test_num; /* volatile for setjmp */
+
+ /* This allows './some-test -- -1' to run the last test. */
+ if (adjusted_num < 0)
+ adjusted_num += array_size + 1;
+
+ /* Check our array bounds! */
+ if ((adjusted_num > array_size) || (adjusted_num <= 0))
+ {
+ if (header_msg && *header_msg)
+ printf("%s", *header_msg);
+ printf("FAIL: %s: THERE IS NO TEST NUMBER %2d\n", progname, adjusted_num);
+ skip_cleanup = TRUE;
+ return TRUE; /* BAIL, this test number doesn't exist. */
+ }
+
+ desc = &test_funcs[adjusted_num];
+ /* Check the test predicate. */
+ if (desc->predicate.func
+ && desc->predicate.func(opts, desc->predicate.value, pool))
+ test_mode = desc->predicate.alternate_mode;
+ else
+ test_mode = desc->mode;
+
+ skip = test_mode == svn_test_skip;
+ xfail = test_mode == svn_test_xfail;
+ wimp = xfail && desc->wip;
+ msg = desc->msg;
+ run_this_test = mode_filter == svn_test_all || mode_filter == test_mode;
+
+ if (run_this_test && header_msg && *header_msg)
+ {
+ printf("%s", *header_msg);
+ *header_msg = NULL;
+ }
+
+ if (!allow_segfaults)
+ {
+ /* Catch a crashing test, so we don't interrupt the rest of 'em. */
+ apr_signal(SIGSEGV, crash_handler);
+ }
+
+ /* We use setjmp/longjmp to recover from the crash. setjmp() essentially
+ establishes a rollback point, and longjmp() goes back to that point.
+ When we invoke longjmp(), it instructs setjmp() to return non-zero,
+ so we don't end up in an infinite loop.
+
+ If we've got non-zero from setjmp(), we know we've crashed. */
+ if (setjmp(jump_buffer) == 0)
+ {
+ /* Do test */
+ if (msg_only || skip || !run_this_test)
+ err = NULL; /* pass */
+ else if (desc->func2)
+ err = (*desc->func2)(pool);
+ else
+ err = (*desc->func_opts)(opts, pool);
+ }
+ else
+ err = svn_error_create(SVN_ERR_TEST_FAILED, NULL,
+ "Test crashed "
+ "(run in debugger with '--allow-segfaults')");
+
+ if (!allow_segfaults)
+ {
+ /* Now back to your regularly scheduled program... */
+ apr_signal(SIGSEGV, SIG_DFL);
+ }
+
+ /* Failure means unexpected results -- FAIL or XPASS. */
+ skip_cleanup = log_results(progname, adjusted_num, msg_only, run_this_test,
+ skip, xfail, wimp, err, msg, desc);
+
+ return skip_cleanup;
+}
+
+#if APR_HAS_THREADS
+
+/* Per-test parameters used by test_thread */
+typedef struct test_params_t
+{
+ /* Name of the application */
+ const char *progname;
+
+ /* Total number of tests to execute */
+ svn_atomic_t test_count;
+
+ /* Global test options as provided by main() */
+ svn_test_opts_t *opts;
+
+ /* Reference to the global failure flag. Set this if any test failed. */
+ svn_atomic_t got_error;
+
+ /* Test to execute next. */
+ svn_atomic_t test_num;
+
+ /* Test functions array. */
+ struct svn_test_descriptor_t *test_funcs;
+} test_params_t;
+
+/* Thread function similar to do_test_num() but with fewer options. We do
+ catch segfaults. All parameters are given as a test_params_t in DATA.
+ */
+static void * APR_THREAD_FUNC
+test_thread(apr_thread_t *thread, void *data)
+{
+ svn_boolean_t skip, xfail, wimp;
+ svn_error_t *err;
+ const struct svn_test_descriptor_t *desc;
+ svn_boolean_t run_this_test; /* This test's mode matches DESC->MODE. */
+ enum svn_test_mode_t test_mode;
+ test_params_t *params = data;
+ svn_atomic_t test_num;
+ apr_pool_t *pool;
+ apr_pool_t *thread_root
+ = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+#ifdef HAVE_PER_THREAD_CLEANUP
+ set_cleanup_pool(svn_pool_create(thread_root));
+#endif
+
+ pool = svn_pool_create(thread_root);
+
+ for (test_num = svn_atomic_inc(&params->test_num);
+ test_num <= params->test_count;
+ test_num = svn_atomic_inc(&params->test_num))
+ {
+ svn_pool_clear(pool);
+#ifdef HAVE_PER_THREAD_CLEANUP
+ svn_pool_clear(cleanup_pool); /* after clearing pool*/
+#endif
+
+ desc = &params->test_funcs[test_num];
+ /* Check the test predicate. */
+ if (desc->predicate.func
+ && desc->predicate.func(params->opts, desc->predicate.value, pool))
+ test_mode = desc->predicate.alternate_mode;
+ else
+ test_mode = desc->mode;
+
+ skip = test_mode == svn_test_skip;
+ xfail = test_mode == svn_test_xfail;
+ wimp = xfail && desc->wip;
+ run_this_test = mode_filter == svn_test_all
+ || mode_filter == test_mode;
+
+ /* Do test */
+ if (skip || !run_this_test)
+ err = NULL; /* pass */
+ else if (desc->func2)
+ err = (*desc->func2)(pool);
+ else
+ err = (*desc->func_opts)(params->opts, pool);
+
+ /* Write results to console */
+ svn_error_clear(svn_mutex__lock(log_mutex));
+ if (log_results(params->progname, test_num, FALSE, run_this_test,
+ skip, xfail, wimp, err, desc->msg, desc))
+ svn_atomic_set(&params->got_error, TRUE);
+ svn_error_clear(svn_mutex__unlock(log_mutex, NULL));
+ }
+
+ svn_pool_clear(pool); /* Make sure this is cleared before cleanup_pool*/
+
+ /* Release all test memory. Possibly includes cleanup_pool */
+ svn_pool_destroy(thread_root);
+
+ /* End thread explicitly to prevent APR_INCOMPLETE return codes in
+ apr_thread_join(). */
+ apr_thread_exit(thread, 0);
+ return NULL;
+}
+
+/* Log an error with message MSG if the APR status of EXPR is not 0.
+ */
+#define CHECK_STATUS(expr,msg) \
+ do { \
+ apr_status_t rv = (expr); \
+ if (rv) \
+ { \
+ svn_error_t *svn_err__temp = svn_error_wrap_apr(rv, msg); \
+ svn_handle_error2(svn_err__temp, stdout, FALSE, "svn_tests: "); \
+ svn_error_clear(svn_err__temp); \
+ } \
+ } while (0);
+
+/* Execute all ARRAY_SIZE tests concurrently using MAX_THREADS threads.
+ Pass PROGNAME and OPTS to the individual tests. Return TRUE if at least
+ one of the tests failed. Allocate all data in POOL.
+
+ Note that cleanups are delayed until all tests have been completed.
+ */
+static svn_boolean_t
+do_tests_concurrently(const char *progname,
+ struct svn_test_descriptor_t *test_funcs,
+ int array_size,
+ int max_threads,
+ svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ int i;
+ apr_thread_t **threads;
+
+ /* Prepare thread parameters. */
+ test_params_t params;
+ params.got_error = FALSE;
+ params.opts = opts;
+ params.progname = progname;
+ params.test_num = 1;
+ params.test_funcs = test_funcs;
+ params.test_count = array_size;
+
+ /* Start all threads. */
+ threads = apr_pcalloc(pool, max_threads * sizeof(*threads));
+ for (i = 0; i < max_threads; ++i)
+ {
+ CHECK_STATUS(apr_thread_create(&threads[i], NULL, test_thread, &params,
+ pool),
+ "creating test thread failed.\n");
+ }
+
+ /* Wait for all tasks (tests) to complete. */
+ for (i = 0; i < max_threads; ++i)
+ {
+ apr_status_t result = 0;
+ CHECK_STATUS(apr_thread_join(&result, threads[i]),
+ "Waiting for test thread to finish failed.");
+ CHECK_STATUS(result,
+ "Test thread returned an error.");
+ }
+
+ return params.got_error != FALSE;
+}
+
+#endif
+
+static void help(const char *progname, apr_pool_t *pool)
+{
+ int i;
+
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("usage: %s [options] [test-numbers]\n"
+ "\n"
+ "Valid options:\n"),
+ progname));
+ for (i = 0; cl_options[i].name && cl_options[i].optch; i++)
+ {
+ const char *optstr;
+
+ svn_opt_format_option(&optstr, cl_options + i, TRUE, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr));
+ }
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool, "\n"));
+}
+
+static svn_error_t *init_test_data(const char *argv0, apr_pool_t *pool)
+{
+ const char *temp_path;
+ const char *base_name;
+
+ /* Convert the program path to an absolute path. */
+ SVN_ERR(svn_utf_cstring_to_utf8(&temp_path, argv0, pool));
+ temp_path = svn_dirent_internal_style(temp_path, pool);
+ SVN_ERR(svn_dirent_get_absolute(&temp_path, temp_path, pool));
+ SVN_ERR_ASSERT(!svn_dirent_is_root(temp_path, strlen(temp_path)));
+
+ /* Extract the interesting bits of the path. */
+ temp_path = svn_dirent_dirname(temp_path, pool);
+ base_name = svn_dirent_basename(temp_path, pool);
+ if (0 == strcmp(base_name, ".libs"))
+ {
+ /* This is a libtoolized binary, skip the .libs directory. */
+ temp_path = svn_dirent_dirname(temp_path, pool);
+ base_name = svn_dirent_basename(temp_path, pool);
+ }
+ temp_path = svn_dirent_dirname(temp_path, pool);
+
+ /* temp_path should now point to the root of the test
+ builddir. Construct the path to the transient dir. Note that we
+ put the path insinde the cmdline/svn-test-work area. This is
+ because trying to get the cmdline tests to use a different work
+ area is unprintable; so we put the C test transient dir in the
+ cmdline tests area, as the lesser of evils ... */
+ temp_path = svn_dirent_join_many(pool, temp_path,
+ "cmdline", "svn-test-work",
+ base_name, SVN_VA_NULL);
+
+ /* Finally, create the transient directory. */
+ SVN_ERR(svn_io_make_dir_recursively(temp_path, pool));
+
+ data_path = temp_path;
+ return SVN_NO_ERROR;
+}
+
+const char *
+svn_test_data_path(const char *base_name, apr_pool_t *result_pool)
+{
+ return svn_dirent_join(data_path, base_name, result_pool);
+}
+
+svn_error_t *
+svn_test_get_srcdir(const char **srcdir,
+ const svn_test_opts_t *opts,
+ apr_pool_t *pool)
+{
+ const char *cwd;
+
+ if (opts->srcdir)
+ {
+ *srcdir = opts->srcdir;
+ return SVN_NO_ERROR;
+ }
+
+ fprintf(stderr, "WARNING: missing '--srcdir' option");
+ SVN_ERR(svn_dirent_get_absolute(&cwd, ".", pool));
+ fprintf(stderr, ", assuming '%s'\n", cwd);
+ *srcdir = cwd;
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test__init_auth_baton(svn_auth_baton_t **ab,
+ apr_pool_t *result_pool)
+{
+ svn_config_t *cfg_config;
+
+ SVN_ERR(svn_config_create2(&cfg_config, FALSE, FALSE, result_pool));
+
+ /* Disable the crypto backends that might not be entirely
+ threadsafe and/or compatible with running headless.
+
+ The windows system is just our own files, but then with user-key
+ encrypted data inside. */
+ svn_config_set(cfg_config,
+ SVN_CONFIG_SECTION_AUTH,
+ SVN_CONFIG_OPTION_PASSWORD_STORES,
+ "windows-cryptoapi");
+
+ SVN_ERR(svn_cmdline_create_auth_baton2(ab,
+ TRUE /* non_interactive */,
+ "jrandom", "rayjandom",
+ NULL,
+ TRUE /* no_auth_cache */,
+ TRUE /* trust_server_cert_unkown_ca */,
+ FALSE, FALSE, FALSE, FALSE,
+ cfg_config, NULL, NULL, result_pool));
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_test_make_sandbox_dir(const char **sb_dir_p,
+ const char *sb_name,
+ apr_pool_t *pool)
+{
+ const char *sb_dir;
+
+ sb_dir = svn_test_data_path(sb_name, pool);
+ SVN_ERR(svn_io_remove_dir2(sb_dir, TRUE, NULL, NULL, pool));
+ SVN_ERR(svn_io_make_dir_recursively(sb_dir, pool));
+ svn_test_add_dir_cleanup(sb_dir);
+
+ *sb_dir_p = sb_dir;
+
+ return SVN_NO_ERROR;
+}
+
+/* Standard svn test program */
+int
+svn_test_main(int argc, const char *argv[], int max_threads,
+ struct svn_test_descriptor_t *test_funcs)
+{
+ int i;
+ svn_boolean_t got_error = FALSE;
+ apr_pool_t *pool, *test_pool;
+ svn_boolean_t ran_a_test = FALSE;
+ svn_boolean_t list_mode = FALSE;
+ int opt_id;
+ apr_status_t apr_err;
+ apr_getopt_t *os;
+ svn_error_t *err;
+ char errmsg[200];
+ /* How many tests are there? */
+ int array_size = get_array_size(test_funcs);
+
+ svn_test_opts_t opts = { NULL };
+
+ opts.fs_type = DEFAULT_FS_TYPE;
+
+ /* Initialize APR (Apache pools) */
+ if (apr_initialize() != APR_SUCCESS)
+ {
+ printf("apr_initialize() failed.\n");
+ exit(1);
+ }
+
+ /* set up the global pool. Use a separate allocator to limit memory
+ * usage but make it thread-safe to allow for multi-threaded tests.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(TRUE));
+ err = svn_mutex__init(&log_mutex, TRUE, pool);
+ if (err)
+ {
+ svn_handle_error2(err, stderr, TRUE, "svn_tests: ");
+ svn_error_clear(err);
+ }
+
+ /* Set up the thread-local storage key for the cleanup pool. */
+#ifdef HAVE_PER_THREAD_CLEANUP
+ apr_err = apr_threadkey_private_create(&cleanup_pool_key,
+ null_threadkey_dtor,
+ pool);
+ if (apr_err)
+ {
+ printf("apr_threadkey_private_create() failed with code %ld.\n",
+ (long)apr_err);
+ exit(1);
+ }
+#endif /* HAVE_PER_THREAD_CLEANUP */
+
+ /* Remember the command line */
+ test_argc = argc;
+ test_argv = argv;
+
+ err = init_test_data(argv[0], pool);
+ if (err)
+ {
+ svn_handle_error2(err, stderr, TRUE, "svn_tests: ");
+ svn_error_clear(err);
+ }
+
+ err = svn_cmdline__getopt_init(&os, argc, argv, pool);
+ if (err)
+ {
+ svn_handle_error2(err, stderr, TRUE, "svn_tests: ");
+ svn_error_clear(err);
+ }
+
+
+ os->interleave = TRUE; /* Let options and arguments be interleaved */
+
+ /* Strip off any leading path components from the program name. */
+ opts.prog_name = svn_dirent_internal_style(argv[0], pool);
+ opts.prog_name = svn_dirent_basename(opts.prog_name, NULL);
+
+#ifdef WIN32
+ /* Abuse cast in strstr() to remove .exe extension.
+ Value is allocated in pool by svn_dirent_internal_style() */
+ {
+ char *exe_ext = strstr(opts.prog_name, ".exe");
+
+ if (exe_ext)
+ *exe_ext = '\0';
+ }
+
+#if _MSC_VER >= 1400
+ /* ### This should work for VC++ 2002 (=1300) and later */
+ /* Show the abort message on STDERR instead of a dialog to allow
+ scripts (e.g. our testsuite) to continue after an abort without
+ user intervention. Allow overriding for easier debugging. */
+ if (!getenv("SVN_CMDLINE_USE_DIALOG_FOR_ABORT"))
+ {
+ /* In release mode: Redirect abort() errors to stderr */
+ _set_error_mode(_OUT_TO_STDERR);
+
+ /* In _DEBUG mode: Redirect all debug output (E.g. assert() to stderr.
+ (Ignored in releas builds) */
+ _CrtSetReportFile( _CRT_ASSERT, _CRTDBG_FILE_STDERR);
+ _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
+ _CrtSetReportMode(_CRT_ERROR, _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
+ _CrtSetReportMode(_CRT_ASSERT, _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
+ }
+#endif /* _MSC_VER >= 1400 */
+#endif
+
+ if (err)
+ return svn_cmdline_handle_exit_error(err, pool, opts.prog_name);
+
+ /* For efficient UTF8 handling (e.g. used by our file I/O routines). */
+ svn_utf_initialize2(FALSE, pool);
+
+ while (1)
+ {
+ const char *opt_arg;
+
+ /* Parse the next option. */
+ apr_err = apr_getopt_long(os, cl_options, &opt_id, &opt_arg);
+ if (APR_STATUS_IS_EOF(apr_err))
+ break;
+ else if (apr_err && (apr_err != APR_BADCH))
+ {
+ /* Ignore invalid option error to allow passing arbitrary options */
+ fprintf(stderr, "apr_getopt_long failed : [%d] %s\n",
+ apr_err, apr_strerror(apr_err, errmsg, sizeof(errmsg)));
+ exit(1);
+ }
+
+ switch (opt_id) {
+ case help_opt:
+ help(opts.prog_name, pool);
+ exit(0);
+ case cleanup_opt:
+ cleanup_mode = TRUE;
+ break;
+ case config_opt:
+ opts.config_file = apr_pstrdup(pool, opt_arg);
+ break;
+ case fstype_opt:
+ opts.fs_type = apr_pstrdup(pool, opt_arg);
+ break;
+ case srcdir_opt:
+ SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.srcdir, opt_arg, pool));
+ opts.srcdir = svn_dirent_internal_style(opts.srcdir, pool);
+ break;
+ case reposdir_opt:
+ SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_dir, opt_arg, pool));
+ opts.repos_dir = svn_dirent_internal_style(opts.repos_dir, pool);
+ break;
+ case reposurl_opt:
+ SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_url, opt_arg, pool));
+ opts.repos_url = svn_uri_canonicalize(opts.repos_url, pool);
+ break;
+ case repostemplate_opt:
+ SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_template, opt_arg,
+ pool));
+ opts.repos_template = svn_dirent_internal_style(opts.repos_template,
+ pool);
+ break;
+ case memcached_server_opt:
+ SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.memcached_server, opt_arg,
+ pool));
+ break;
+ case list_opt:
+ list_mode = TRUE;
+ break;
+ case mode_filter_opt:
+ if (svn_cstring_casecmp(opt_arg, "PASS") == 0)
+ mode_filter = svn_test_pass;
+ else if (svn_cstring_casecmp(opt_arg, "XFAIL") == 0)
+ mode_filter = svn_test_xfail;
+ else if (svn_cstring_casecmp(opt_arg, "SKIP") == 0)
+ mode_filter = svn_test_skip;
+ else if (svn_cstring_casecmp(opt_arg, "ALL") == 0)
+ mode_filter = svn_test_all;
+ else
+ {
+ fprintf(stderr, "FAIL: Invalid --mode-filter option. Try ");
+ fprintf(stderr, " PASS, XFAIL, SKIP or ALL.\n");
+ exit(1);
+ }
+ break;
+ case verbose_opt:
+ verbose_mode = TRUE;
+ break;
+ case quiet_opt:
+ quiet_mode = TRUE;
+ break;
+ case allow_segfault_opt:
+ allow_segfaults = TRUE;
+ break;
+ case server_minor_version_opt:
+ {
+ char *end;
+ opts.server_minor_version = (int) strtol(opt_arg, &end, 10);
+ if (end == opt_arg || *end != '\0')
+ {
+ fprintf(stderr, "FAIL: Non-numeric minor version given\n");
+ exit(1);
+ }
+ if ((opts.server_minor_version < 3)
+ || (opts.server_minor_version > SVN_VER_MINOR))
+ {
+ fprintf(stderr, "FAIL: Invalid minor version given\n");
+ exit(1);
+ }
+ break;
+ }
+ case sqlite_log_opt:
+ svn_sqlite__dbg_enable_errorlog();
+ break;
+#if APR_HAS_THREADS
+ case parallel_opt:
+ parallel = TRUE;
+ break;
+#endif
+ }
+ }
+ opts.verbose = verbose_mode;
+
+ /* Disable sleeping for timestamps, to speed up the tests. */
+ apr_env_set(
+ "SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS",
+ "yes", pool);
+
+ /* You can't be both quiet and verbose. */
+ if (quiet_mode && verbose_mode)
+ {
+ fprintf(stderr, "FAIL: --verbose and --quiet are mutually exclusive\n");
+ exit(1);
+ }
+
+ /* Create an iteration pool for the tests */
+ set_cleanup_pool(svn_pool_create(pool));
+ test_pool = svn_pool_create(pool);
+
+ if (!allow_segfaults)
+ svn_error_set_malfunction_handler(svn_error_raise_on_malfunction);
+
+ if (argc >= 2) /* notice command-line arguments */
+ {
+ if (! strcmp(argv[1], "list") || list_mode)
+ {
+ const char *header_msg;
+ ran_a_test = TRUE;
+
+ /* run all tests with MSG_ONLY set to TRUE */
+ header_msg = "Test # Mode Test Description\n"
+ "------ ----- ----------------\n";
+ for (i = 1; i <= array_size; i++)
+ {
+ if (do_test_num(opts.prog_name, i, test_funcs,
+ TRUE, &opts, &header_msg, test_pool))
+ got_error = TRUE;
+
+ /* Clear the per-function pool */
+ svn_pool_clear(test_pool);
+ svn_pool_clear(cleanup_pool);
+ }
+ }
+ else
+ {
+ for (i = 1; i < argc; i++)
+ {
+ if (svn_ctype_isdigit(argv[i][0]) || argv[i][0] == '-')
+ {
+ int test_num = atoi(argv[i]);
+ if (test_num == 0)
+ /* A --option argument, most likely. */
+ continue;
+
+ ran_a_test = TRUE;
+ if (do_test_num(opts.prog_name, test_num, test_funcs,
+ FALSE, &opts, NULL, test_pool))
+ got_error = TRUE;
+
+ /* Clear the per-function pool */
+ svn_pool_clear(test_pool);
+ svn_pool_clear(cleanup_pool);
+ }
+ }
+ }
+ }
+
+ if (! ran_a_test)
+ {
+ /* just run all tests */
+ if (max_threads < 1)
+ max_threads = array_size;
+
+ if (max_threads == 1 || !parallel)
+ {
+ for (i = 1; i <= array_size; i++)
+ {
+ if (do_test_num(opts.prog_name, i, test_funcs,
+ FALSE, &opts, NULL, test_pool))
+ got_error = TRUE;
+
+ /* Clear the per-function pool */
+ svn_pool_clear(test_pool);
+ svn_pool_clear(cleanup_pool);
+ }
+ }
+#if APR_HAS_THREADS
+ else
+ {
+ got_error = do_tests_concurrently(opts.prog_name, test_funcs,
+ array_size, max_threads,
+ &opts, test_pool);
+
+ /* Execute all cleanups */
+ svn_pool_clear(test_pool);
+ svn_pool_clear(cleanup_pool);
+ }
+#endif
+ }
+
+ /* Clean up APR */
+ svn_pool_destroy(pool); /* takes test_pool with it */
+ apr_terminate();
+
+ return got_error;
+}
+
+
+svn_boolean_t
+svn_test__fs_type_is(const svn_test_opts_t *opts,
+ const char *predicate_value,
+ apr_pool_t *pool)
+{
+ return (0 == strcmp(predicate_value, opts->fs_type));
+}
+
+svn_boolean_t
+svn_test__fs_type_not(const svn_test_opts_t *opts,
+ const char *predicate_value,
+ apr_pool_t *pool)
+{
+ return (0 != strcmp(predicate_value, opts->fs_type));
+}
diff --git a/subversion/tests/templates/empty-fsfs-v1.zip b/subversion/tests/templates/empty-fsfs-v1.zip
new file mode 100644
index 0000000..7293b31
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v1.zip
Binary files differ
diff --git a/subversion/tests/templates/empty-fsfs-v2.zip b/subversion/tests/templates/empty-fsfs-v2.zip
new file mode 100644
index 0000000..b2a477e
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v2.zip
Binary files differ
diff --git a/subversion/tests/templates/empty-fsfs-v3.zip b/subversion/tests/templates/empty-fsfs-v3.zip
new file mode 100644
index 0000000..4edcab2
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v3.zip
Binary files differ
diff --git a/subversion/tests/templates/empty-fsfs-v4.zip b/subversion/tests/templates/empty-fsfs-v4.zip
new file mode 100644
index 0000000..deca429
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v4.zip
Binary files differ
diff --git a/subversion/tests/templates/empty-fsfs-v6.zip b/subversion/tests/templates/empty-fsfs-v6.zip
new file mode 100644
index 0000000..4fce7ca
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v6.zip
Binary files differ
diff --git a/subversion/tests/templates/empty-fsfs-v7.zip b/subversion/tests/templates/empty-fsfs-v7.zip
new file mode 100644
index 0000000..11b79f9
--- /dev/null
+++ b/subversion/tests/templates/empty-fsfs-v7.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v1.zip b/subversion/tests/templates/greek-fsfs-v1.zip
new file mode 100644
index 0000000..1ae7438
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v1.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v2.zip b/subversion/tests/templates/greek-fsfs-v2.zip
new file mode 100644
index 0000000..e42edb7
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v2.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v3.zip b/subversion/tests/templates/greek-fsfs-v3.zip
new file mode 100644
index 0000000..1cb3690
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v3.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v4.zip b/subversion/tests/templates/greek-fsfs-v4.zip
new file mode 100644
index 0000000..22011f9
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v4.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v6.zip b/subversion/tests/templates/greek-fsfs-v6.zip
new file mode 100644
index 0000000..3dc2c3e
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v6.zip
Binary files differ
diff --git a/subversion/tests/templates/greek-fsfs-v7.zip b/subversion/tests/templates/greek-fsfs-v7.zip
new file mode 100644
index 0000000..5b7ebd0
--- /dev/null
+++ b/subversion/tests/templates/greek-fsfs-v7.zip
Binary files differ
diff --git a/subversion/tests/templates/greek.dump b/subversion/tests/templates/greek.dump
new file mode 100644
index 0000000..b70c811
--- /dev/null
+++ b/subversion/tests/templates/greek.dump
@@ -0,0 +1,260 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 77e48e13-c942-4450-8676-1d60a12bd220
+
+Revision-number: 0
+Prop-content-length: 56
+Content-length: 56
+
+K 8
+svn:date
+V 27
+2014-08-22T10:58:13.847732Z
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 129
+Content-length: 129
+
+K 10
+svn:author
+V 7
+jrandom
+K 8
+svn:date
+V 27
+2014-08-22T11:16:26.921067Z
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
diff --git a/subversion/tests/tests.conf b/subversion/tests/tests.conf
new file mode 100644
index 0000000..3418d5d
--- /dev/null
+++ b/subversion/tests/tests.conf
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+### This config file configures some aspects of the Subversion test
+### suite. Pass --config-file FILENAME to test programs if running
+### them manually; "make check" passes this file in automatically.
+
+### Currently, it is used for two purposes: it is used to configure
+### memcached for direct svn_cache/memcached tests in
+### libsvn_subr/cache-test; and it is copied into new FSFS
+### repositories as fsfs.conf (to configure their use of memcached as
+### well).
+
+[memcached-servers]
+### Run memcached servers and enter lines like the following (the key
+### is ignored):
+# key = 127.0.0.1:11211
+
+[caches]
+### In the test suite, we should make FSFS cache failures into actual
+### test failures:
+fail-stop = true